[CalendarServer-changes] [14423] CalendarServer/branches/users/cdaboo/pod2pod-migration

source_changes at macosforge.org source_changes at macosforge.org
Tue Feb 17 09:51:14 PST 2015


Revision: 14423
          http://trac.calendarserver.org//changeset/14423
Author:   cdaboo at apple.com
Date:     2015-02-17 09:51:13 -0800 (Tue, 17 Feb 2015)
Log Message:
-----------
Checkpoint: migration final sync delegates.

Modified Paths:
--------------
    CalendarServer/branches/users/cdaboo/pod2pod-migration/requirements-stable.txt
    CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/directory.py
    CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/home_sync.py
    CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/test/test_home_sync.py
    CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/sql_directory.py

Modified: CalendarServer/branches/users/cdaboo/pod2pod-migration/requirements-stable.txt
===================================================================
--- CalendarServer/branches/users/cdaboo/pod2pod-migration/requirements-stable.txt	2015-02-17 17:47:33 UTC (rev 14422)
+++ CalendarServer/branches/users/cdaboo/pod2pod-migration/requirements-stable.txt	2015-02-17 17:51:13 UTC (rev 14423)
@@ -36,7 +36,7 @@
             #pyOpenSSL
         pycrypto==2.6.1
 
-    --editable svn+http://svn.calendarserver.org/repository/calendarserver/twext/branches/users/cdaboo/pod2pod-migration@14416#egg=twextpy
+    --editable svn+http://svn.calendarserver.org/repository/calendarserver/twext/branches/users/cdaboo/pod2pod-migration@14422#egg=twextpy
         cffi==0.8.6
             pycparser==2.10
         #twisted

Modified: CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/directory.py
===================================================================
--- CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/directory.py	2015-02-17 17:47:33 UTC (rev 14422)
+++ CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/directory.py	2015-02-17 17:51:13 UTC (rev 14423)
@@ -191,3 +191,126 @@
         delegators = yield Delegates._delegatedToUIDs(txn, delegate, request["read-write"], onlyThisServer=True)
 
         returnValue(list(delegators))
+
+
+    @inlineCallbacks
+    def send_dump_individual_delegates(self, txn, delegator):
+        """
+        Get L{DelegateRecords} from another pod.
+
+        @param txn: transaction to use
+        @type txn: L{CommonStoreTransaction}
+        @param delegator: delegate to lookup
+        @type delegator: L{DirectoryRecord}
+        @param readWrite: if True, read and write access delegates are returned;
+            read-only access otherwise
+        """
+        if delegator.thisServer():
+            raise FailedCrossPodRequestError("Cross-pod destination on this server: {}".format(delegator.uid))
+
+        request = {
+            "action": "dump-individual-delegates",
+            "uid": delegator.uid,
+        }
+        response = yield self.sendRequestToServer(txn, delegator.server(), request)
+        returnValue(response)
+
+
+    @inlineCallbacks
+    def recv_dump_individual_delegates(self, txn, request):
+        """
+        Process an delegators cross-pod request. Request arguments as per L{send_dump_individual_delegates}.
+
+        @param request: request arguments
+        @type request: C{dict}
+        """
+
+        delegator = yield txn.directoryService().recordWithUID(request["uid"])
+        if delegator is None or not delegator.thisServer():
+            raise FailedCrossPodRequestError("Cross-pod delegate missing or on this server: {}".format(delegator.uid))
+
+        delegates = yield txn.dumpIndividualDelegatesLocal(delegator.uid)
+
+        returnValue(self._to_serialize_list(delegates))
+
+
+    @inlineCallbacks
+    def send_dump_group_delegates(self, txn, delegator):
+        """
+        Get L{DelegateGroupsRecord},L{GroupsRecord} from another pod.
+
+        @param txn: transaction to use
+        @type txn: L{CommonStoreTransaction}
+        @param delegator: delegate to lookup
+        @type delegator: L{DirectoryRecord}
+        @param readWrite: if True, read and write access delegates are returned;
+            read-only access otherwise
+        """
+        if delegator.thisServer():
+            raise FailedCrossPodRequestError("Cross-pod destination on this server: {}".format(delegator.uid))
+
+        request = {
+            "action": "dump-group-delegates",
+            "uid": delegator.uid,
+        }
+        response = yield self.sendRequestToServer(txn, delegator.server(), request)
+        returnValue(response)
+
+
+    @inlineCallbacks
+    def recv_dump_group_delegates(self, txn, request):
+        """
+        Process an delegators cross-pod request. Request arguments as per L{send_dump_group_delegates}.
+
+        @param request: request arguments
+        @type request: C{dict}
+        """
+
+        delegator = yield txn.directoryService().recordWithUID(request["uid"])
+        if delegator is None or not delegator.thisServer():
+            raise FailedCrossPodRequestError("Cross-pod delegate missing or on this server: {}".format(delegator.uid))
+
+        results = yield txn.dumpGroupDelegatesLocal(delegator.uid)
+
+        returnValue([[delegator.serialize(), group.serialize()] for delegator, group in results])
+
+
+    @inlineCallbacks
+    def send_dump_external_delegates(self, txn, delegator):
+        """
+        Get L{ExternalDelegateGroupsRecord} from another pod.
+
+        @param txn: transaction to use
+        @type txn: L{CommonStoreTransaction}
+        @param delegator: delegate to lookup
+        @type delegator: L{DirectoryRecord}
+        @param readWrite: if True, read and write access delegates are returned;
+            read-only access otherwise
+        """
+        if delegator.thisServer():
+            raise FailedCrossPodRequestError("Cross-pod destination on this server: {}".format(delegator.uid))
+
+        request = {
+            "action": "dump-external-delegates",
+            "uid": delegator.uid,
+        }
+        response = yield self.sendRequestToServer(txn, delegator.server(), request)
+        returnValue(response)
+
+
+    @inlineCallbacks
+    def recv_dump_external_delegates(self, txn, request):
+        """
+        Process an delegators cross-pod request. Request arguments as per L{send_dump_external_delegates}.
+
+        @param request: request arguments
+        @type request: C{dict}
+        """
+
+        delegator = yield txn.directoryService().recordWithUID(request["uid"])
+        if delegator is None or not delegator.thisServer():
+            raise FailedCrossPodRequestError("Cross-pod delegate missing or on this server: {}".format(delegator.uid))
+
+        delegates = yield txn.dumpExternalDelegatesLocal(delegator.uid)
+
+        returnValue(self._to_serialize_list(delegates))

Modified: CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/home_sync.py
===================================================================
--- CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/home_sync.py	2015-02-17 17:47:33 UTC (rev 14422)
+++ CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/home_sync.py	2015-02-17 17:51:13 UTC (rev 14423)
@@ -150,12 +150,13 @@
         yield self.loadRecord()
         self.homeId = yield self.prepareCalendarHome()
 
+        # Calendar list and calendar data
         yield self.syncCalendarList()
 
-        # sync home metadata such as alarms, default calendars, etc
+        # Sync home metadata such as alarms, default calendars, etc
         yield self.syncCalendarHomeMetaData()
 
-        # TODO: sync attachments
+        # Sync attachments
         yield self.syncAttachments()
 
 
@@ -166,7 +167,7 @@
         rows, recalculate quota etc.
         """
 
-        # TODO: link attachments to resources: ATTACHMENT_CALENDAR_OBJECT table
+        # Link attachments to resources: ATTACHMENT_CALENDAR_OBJECT table
         yield self.linkAttachments()
 
         # TODO: Re-write attachment URIs - not sure if we need this as reverse proxy may take care of it
@@ -182,7 +183,7 @@
         pass
 
         # TODO: delegates reconcile
-        pass
+        yield self.delegateReconcile()
 
         # TODO: notifications
         pass
@@ -780,3 +781,54 @@
             link._calendarObjectID = objectIDMap[link._calendarObjectID].localResourceID
 
             yield link.insert()
+
+
+    @inlineCallbacks
+    def delegateReconcile(self):
+        """
+        Sync the delegate assignments from the remote home to the local home. We won't use
+        a fake directory UID locally.
+        """
+
+        yield self.individualDelegateReconcile()
+        yield self.groupDelegateReconcile()
+        yield self.externalDelegateReconcile()
+
+
+    @inTransactionWrapper
+    @inlineCallbacks
+    def individualDelegateReconcile(self, txn):
+        """
+        Sync the delegate assignments from the remote home to the local home. We won't use
+        a fake directory UID locally.
+        """
+        remote_records = yield txn.dumpIndividualDelegatesExternal(self.record)
+        for record in remote_records:
+            yield record.insert(txn)
+
+
+    @inTransactionWrapper
+    @inlineCallbacks
+    def groupDelegateReconcile(self, txn):
+        """
+        Sync the delegate assignments from the remote home to the local home. We won't use
+        a fake directory UID locally.
+        """
+        remote_records = yield txn.dumpGroupDelegatesExternal(self.record)
+        for delegator, group in remote_records:
+            # We need to make sure the group exists locally first and map the groupID to the local one
+            local_group = yield txn.groupByUID(group.groupUID)
+            delegator.groupID = local_group.groupID
+            yield delegator.insert(txn)
+
+
+    @inTransactionWrapper
+    @inlineCallbacks
+    def externalDelegateReconcile(self, txn):
+        """
+        Sync the external delegate assignments from the remote home to the local home. We won't use
+        a fake directory UID locally.
+        """
+        remote_records = yield txn.dumpExternalDelegatesExternal(self.record)
+        for record in remote_records:
+            yield record.insert(txn)

Modified: CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/test/test_home_sync.py
===================================================================
--- CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/test/test_home_sync.py	2015-02-17 17:47:33 UTC (rev 14422)
+++ CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/podding/migration/test/test_home_sync.py	2015-02-17 17:51:13 UTC (rev 14423)
@@ -22,14 +22,17 @@
 from txdav.common.datastore.podding.migration.home_sync import CrossPodHomeSync
 from txdav.common.datastore.podding.migration.sync_metadata import CalendarMigrationRecord
 from txdav.common.datastore.podding.test.util import MultiStoreConduitTest
+from txdav.common.datastore.sql_directory import DelegateRecord, \
+    ExternalDelegateGroupsRecord, DelegateGroupsRecord
 from txdav.common.datastore.sql_tables import schema
+from txdav.who.delegates import Delegates
 from txweb2.http_headers import MimeType
 from txweb2.stream import MemoryStream
 
 
-class TestConduitAPI(MultiStoreConduitTest):
+class TestCrossPodHomeSync(MultiStoreConduitTest):
     """
-    Test that the conduit api works.
+    Test that L{CrossPodHomeSync} works.
     """
 
     nowYear = {"now": DateTime.getToday().getYear()}
@@ -698,3 +701,89 @@
         attachment = yield object1.attachmentWithManagedID(managedid0_1)
         self.assertTrue(attachment is not None)
         self.assertEqual(attachment.md5(), md50_1)
+
+
+    @inlineCallbacks
+    def test_delegate_reconcile(self):
+        """
+        Test that L{delegateReconcile} copies over the full set of delegates and caches associated groups..
+        """
+
+        # Create remote home
+        yield self.homeUnderTest(txn=self.theTransactionUnderTest(0), name="user01", create=True)
+        yield self.commitTransaction(0)
+
+        # Add some delegates
+        txn = self.theTransactionUnderTest(0)
+        record01 = yield txn.directoryService().recordWithUID(u"user01")
+        record02 = yield txn.directoryService().recordWithUID(u"user02")
+        record03 = yield txn.directoryService().recordWithUID(u"user03")
+
+        group01 = yield txn.directoryService().recordWithUID(u"__top_group_1__")
+        group02 = yield txn.directoryService().recordWithUID(u"right_coast")
+
+        # Add user02 and user03 as individual delegates
+        yield Delegates.addDelegate(txn, record01, record02, True)
+        yield Delegates.addDelegate(txn, record01, record03, False)
+
+        # Add group delegates
+        yield Delegates.addDelegate(txn, record01, group01, True)
+        yield Delegates.addDelegate(txn, record01, group02, False)
+
+        # Add external delegates
+        yield txn.assignExternalDelegates(u"user01", None, None, u"external1", u"external2")
+
+        yield self.commitTransaction(0)
+
+
+        # Initially no local delegates
+        txn = self.theTransactionUnderTest(1)
+        delegates = yield txn.dumpIndividualDelegatesLocal(u"user01")
+        self.assertEqual(len(delegates), 0)
+        delegates = yield txn.dumpGroupDelegatesLocal(u"user04")
+        self.assertEqual(len(delegates), 0)
+        externals = yield txn.dumpExternalDelegatesLocal(u"user01")
+        self.assertEqual(len(externals), 0)
+        yield self.commitTransaction(1)
+
+        # Sync from remote side
+        syncer = CrossPodHomeSync(self.theStoreUnderTest(1), "user01")
+        yield syncer.loadRecord()
+        yield syncer.delegateReconcile()
+
+        # Now have local delegates
+        txn = self.theTransactionUnderTest(1)
+
+        delegates = yield txn.dumpIndividualDelegatesLocal(u"user01")
+        self.assertEqual(
+            set(delegates),
+            set((
+                DelegateRecord.make(delegator="user01", delegate="user02", readWrite=1),
+                DelegateRecord.make(delegator="user01", delegate="user03", readWrite=0),
+            )),
+        )
+
+        delegateGroups = yield txn.dumpGroupDelegatesLocal(u"user01")
+        group_top = yield txn.groupByUID(u"__top_group_1__")
+        group_right = yield txn.groupByUID(u"right_coast")
+        self.assertEqual(
+            set([item[0] for item in delegateGroups]),
+            set((
+                DelegateGroupsRecord.make(delegator="user01", groupID=group_top.groupID, readWrite=1, isExternal=False),
+                DelegateGroupsRecord.make(delegator="user01", groupID=group_right.groupID, readWrite=0, isExternal=False),
+            )),
+        )
+
+        externals = yield txn.dumpExternalDelegatesLocal(u"user01")
+        self.assertEqual(
+            set(externals),
+            set((
+                ExternalDelegateGroupsRecord.make(
+                    delegator="user01",
+                    groupUIDRead="external1",
+                    groupUIDWrite="external2",
+                ),
+            )),
+        )
+
+        yield self.commitTransaction(1)

Modified: CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/sql_directory.py
===================================================================
--- CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/sql_directory.py	2015-02-17 17:47:33 UTC (rev 14422)
+++ CalendarServer/branches/users/cdaboo/pod2pod-migration/txdav/common/datastore/sql_directory.py	2015-02-17 17:51:13 UTC (rev 14423)
@@ -16,7 +16,7 @@
 ##
 
 from twext.enterprise.dal.record import SerializableRecord, fromTable
-from twext.enterprise.dal.syntax import SavepointAction
+from twext.enterprise.dal.syntax import SavepointAction, Select
 from twext.python.log import Logger
 from twisted.internet.defer import inlineCallbacks, returnValue
 from txdav.common.datastore.sql_tables import schema
@@ -149,7 +149,39 @@
         )
 
 
+    @classmethod
+    @inlineCallbacks
+    def delegatorGroups(cls, txn, delegator):
+        """
+        Get delegator/group pairs for the specified delegator.
+        """
 
+        # Do a join to get what we need
+        rows = yield Select(
+            list(DelegateGroupsRecord.table) + list(GroupsRecord.table),
+            From=DelegateGroupsRecord.table.join(GroupsRecord.table, DelegateGroupsRecord.groupID == GroupsRecord.groupID),
+            Where=(DelegateGroupsRecord.delegator == delegator.encode("utf-8"))
+        ).on(txn)
+
+        results = []
+        delegatorNames = [DelegateGroupsRecord.__colmap__[column] for column in list(DelegateGroupsRecord.table)]
+        groupsNames = [GroupsRecord.__colmap__[column] for column in list(GroupsRecord.table)]
+        split_point = len(delegatorNames)
+        for row in rows:
+            delegatorRow = row[:split_point]
+            delegatorRecord = DelegateGroupsRecord()
+            delegatorRecord._attributesFromRow(zip(delegatorNames, delegatorRow))
+            delegatorRecord.transaction = txn
+            groupsRow = row[split_point:]
+            groupsRecord = GroupsRecord()
+            groupsRecord._attributesFromRow(zip(groupsNames, groupsRow))
+            groupsRecord.transaction = txn
+            results.append((delegatorRecord, groupsRecord,))
+
+        returnValue(results)
+
+
+
 class ExternalDelegateGroupsRecord(SerializableRecord, fromTable(schema.EXTERNAL_DELEGATE_GROUPS)):
     """
     @DynamicAttrs
@@ -766,3 +798,51 @@
             yield self.addDelegateGroup(
                 delegator, writeDelegateGroupID, True, isExternal=True
             )
+
+
+    def dumpIndividualDelegatesLocal(self, delegator):
+        """
+        Get the L{DelegateRecord} for all delegates associated with this delegator.
+        """
+        return DelegateRecord.querysimple(self, delegator=delegator.encode("utf-8"))
+
+
+    @inlineCallbacks
+    def dumpIndividualDelegatesExternal(self, delegator):
+        """
+        Get the L{DelegateRecord} for all delegates associated with this delegator.
+        """
+        raw_results = yield self.store().conduit.send_dump_individual_delegates(self, delegator)
+        returnValue([DelegateRecord.deserialize(row) for row in raw_results])
+
+
+    def dumpGroupDelegatesLocal(self, delegator):
+        """
+        Get the L{DelegateGroupsRecord},L{GroupsRecord} for all group delegates associated with this delegator.
+        """
+        return DelegateGroupsRecord.delegatorGroups(self, delegator)
+
+
+    @inlineCallbacks
+    def dumpGroupDelegatesExternal(self, delegator):
+        """
+        Get the L{DelegateGroupsRecord},L{GroupsRecord} for all delegates associated with this delegator.
+        """
+        raw_results = yield self.store().conduit.send_dump_group_delegates(self, delegator)
+        returnValue([(DelegateGroupsRecord.deserialize(row[0]), GroupsRecord.deserialize(row[1]),) for row in raw_results])
+
+
+    def dumpExternalDelegatesLocal(self, delegator):
+        """
+        Get the L{ExternalDelegateGroupsRecord} for all delegates associated with this delegator.
+        """
+        return ExternalDelegateGroupsRecord.querysimple(self, delegator=delegator.encode("utf-8"))
+
+
+    @inlineCallbacks
+    def dumpExternalDelegatesExternal(self, delegator):
+        """
+        Get the L{ExternalDelegateGroupsRecord} for all delegates associated with this delegator.
+        """
+        raw_results = yield self.store().conduit.send_dump_external_delegates(self, delegator)
+        returnValue([ExternalDelegateGroupsRecord.deserialize(row) for row in raw_results])
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20150217/54303540/attachment-0001.html>


More information about the calendarserver-changes mailing list