[CalendarServer-changes] [6818] CalendarServer/trunk

source_changes at macosforge.org source_changes at macosforge.org
Thu Jan 27 13:48:25 PST 2011


Revision: 6818
          http://trac.macosforge.org/projects/calendarserver/changeset/6818
Author:   cdaboo at apple.com
Date:     2011-01-27 13:48:24 -0800 (Thu, 27 Jan 2011)
Log Message:
-----------
Make the propfind cache depend on child resources as well as the home resource. That way when a shared calendar is
changed, the propfind caches of all homes of sharer and sharees are invalidated. Added more documentation of the
caching mechanism as far as I understand it.

Modified Paths:
--------------
    CalendarServer/trunk/calendarserver/provision/root.py
    CalendarServer/trunk/twistedcaldav/cache.py
    CalendarServer/trunk/twistedcaldav/method/propfind.py
    CalendarServer/trunk/twistedcaldav/resource.py
    CalendarServer/trunk/twistedcaldav/storebridge.py
    CalendarServer/trunk/twistedcaldav/test/test_cache.py

Modified: CalendarServer/trunk/calendarserver/provision/root.py
===================================================================
--- CalendarServer/trunk/calendarserver/provision/root.py	2011-01-27 19:05:18 UTC (rev 6817)
+++ CalendarServer/trunk/calendarserver/provision/root.py	2011-01-27 21:48:24 UTC (rev 6818)
@@ -1,6 +1,6 @@
 # -*- test-case-name: calendarserver.provision.test.test_root -*-
 ##
-# Copyright (c) 2005-2010 Apple Inc. All rights reserved.
+# Copyright (c) 2005-2011 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -39,6 +39,8 @@
 from twistedcaldav.resource import CalDAVComplianceMixIn
 from twistedcaldav.resource import CalendarHomeResource, AddressBookHomeResource
 from twistedcaldav.directory.principal import DirectoryPrincipalResource
+from twistedcaldav.storebridge import CalendarCollectionResource,\
+    AddressBookCollectionResource
 
 log = Logger()
 
@@ -76,9 +78,12 @@
         if config.EnableResponseCache and config.Memcached.Pools.Default.ClientEnabled:
             self.responseCache = MemcacheResponseCache(self.fp)
 
+            # These class attributes need to be setup with our memcache notifier
             CalendarHomeResource.cacheNotifierFactory = MemcacheChangeNotifier
             AddressBookHomeResource.cacheNotifierFactory = MemcacheChangeNotifier
             DirectoryPrincipalResource.cacheNotifierFactory = MemcacheChangeNotifier
+            CalendarCollectionResource.cacheNotifierFactory = MemcacheChangeNotifier
+            AddressBookCollectionResource.cacheNotifierFactory = MemcacheChangeNotifier
         else:
             self.responseCache = DisabledCache()
 

Modified: CalendarServer/trunk/twistedcaldav/cache.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/cache.py	2011-01-27 19:05:18 UTC (rev 6817)
+++ CalendarServer/trunk/twistedcaldav/cache.py	2011-01-27 21:48:24 UTC (rev 6818)
@@ -1,5 +1,5 @@
 ##
-# Copyright (c) 2008-2010 Apple Inc. All rights reserved.
+# Copyright (c) 2008-2011 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -20,7 +20,8 @@
 
 from zope.interface import implements
 
-from twisted.internet.defer import succeed, maybeDeferred
+from twisted.internet.defer import succeed, maybeDeferred, inlineCallbacks,\
+    returnValue
 from twext.web2.dav.util import allDataFromStream
 from twext.web2.http import Response
 from twext.web2.iweb import IResource
@@ -31,7 +32,42 @@
 from twistedcaldav.memcachepool import CachePoolUserMixIn, defaultCachePool
 from twistedcaldav.config import config
 
+"""
+The basic principals of the PROPFIND cache are this:
 
+(1) In RootResource.locateChild we "intercept" request processing at a very early stage (before traversing the resource
+hierarchy for the request URI). If the request is a PROPFIND we check to see whether a cache entry exists and if so immediately
+return that. If no cache entry exists, normal PROPFIND processing occurs.
+
+(2) The PropfindCacheMixin class is mixed into calendar/address book homes. That causes all valid PROPFIND responses to be
+cached, and also provides a cache invalidation api to allow signaling of changes that need to invalidate the cache. The main
+and child resources need to cause that api to be called when appropriate changes occur.
+
+(3) The response cache entries consist of a key, derived from the request only, and a value. The value contains the set of tokens
+in effect at the time the entry was cached, together with the response that was cached. The tokens are:
+
+  - principalToken - a token for the authenticated user's principal
+  - directoryToken - a hash of that principal's directory record
+  - uriToken - a token for the request uri
+  - childTokens - tokens for any child resources the request uri depends on (for depth:1)
+  
+  The current principalToken, uriToken and childTokens values are themselves stored in the cache using the key prefix 'cacheToken:'.
+When the 'changeCache' api is called the cached value for the matching token is updated.
+  
+(4) When a request is being checked in the cache, the response cache entry key is first computed and any value extracted. The
+tokens in the value are then checked against the current set of tokens in the cache. If there is any mismatch between tokens, the
+cache entry is considered invalid and the cached response is not returned. If everything matches up, the cached response is returned
+to the caller and ultimately sent directly back to the client.
+ 
+(5) Because of shared calendars/address books that can affect the calendar/address book homes of several different users at once, we
+need to keep track of the separate childTokens for each child resource. The tokens for shared resources are keyed of the sharer's uri,
+so sharee's homes use that token. That way a single token for all shared instances is used and changed just once.
+
+(6) Principals and directory records need to be included as tokens to take account of variations in access control based on who
+is making the request (including proxy state changes etc).
+
+"""
+
 class DisabledCacheNotifier(object):
     def __init__(self, *args, **kwargs):
         pass
@@ -96,6 +132,9 @@
 
 
     def _getRecordForURI(self, uri, request):
+        """
+        Return the directory record for the specified principal uri.
+        """
         def _getRecord(resrc):
             if hasattr(resrc, 'record'):
                 return resrc.record
@@ -108,6 +147,9 @@
 
 
     def _canonicalizeURIForRequest(self, uri, request):
+        """
+        Always use canonicalized forms of the URIs for caching (i.e. __uids__ paths).
+        """
         try:
             return request.locateResource(uri).addCallback(
                 lambda resrc: resrc.url()).addErrback(self._uriNotFound, uri)
@@ -116,6 +158,9 @@
 
 
     def _getURIs(self, request):
+        """
+        Get principal and resource URIs from the request.
+        """
         def _getSecondURI(rURI):
             return self._canonicalizeURIForRequest(
                 self._principalURI(request.authnUser),
@@ -126,30 +171,32 @@
 
         return d
 
-
+    @inlineCallbacks
     def _requestKey(self, request):
-        def _getBody(uris):
-            return allDataFromStream(request.stream).addCallback(
-                lambda body: (body, uris))
+        """
+        Get a key for this request. This depends on the method, Depth: header, authn user principal,
+        request uri and a hash of the request body (the body being normalized for property order).
+        """
+        requestBody = (yield allDataFromStream(request.stream))
+        if requestBody is not None:
+            # Give it back to the request so it can be read again
+            request.stream = MemoryStream(requestBody)
+            request.stream.doStartReading = None
+            
+            # Normalize the property order by doing a "dumb" sort on lines
+            requestLines = requestBody.splitlines()
+            requestLines.sort()
+            requestBody = "\n".join(requestLines)
 
-        def _getKey((requestBody, (pURI, rURI))):
-            if requestBody is not None:
-                request.stream = MemoryStream(requestBody)
-                request.stream.doStartReading = None
+        request.cacheKey = (request.method,
+                            self._principalURI(request.authnUser),
+                            request.uri,
+                            request.headers.getHeader('depth'),
+                            hash(requestBody))
 
-            request.cacheKey = (request.method,
-                                pURI,
-                                rURI,
-                                request.headers.getHeader('depth'),
-                                hash(requestBody))
+        returnValue(request.cacheKey)
 
-            return request.cacheKey
 
-        d = _getBody((self._principalURI(request.authnUser), request.uri))
-        d.addCallback(_getKey)
-        return d
-
-
     def _getResponseBody(self, key, response):
         d1 = allDataFromStream(response.stream)
         d1.addCallback(lambda responseBody: (key, responseBody))
@@ -164,10 +211,7 @@
 
     def _tokenForURI(self, uri, cachePoolHandle=None):
         """
-        Get a property store for the given C{uri}.
-
-        @param uri: The URI we'd like the token for.
-        @return: A C{str} representing the token for the URI.
+        Get the current token for a particular URI.
         """
 
         if cachePoolHandle:
@@ -175,146 +219,155 @@
         else:
             return self.getCachePool().get('cacheToken:%s' % (uri,))
 
-
+    @inlineCallbacks
+    def _tokensForChildren(self, rURI, request):
+        """
+        Create a dict of child resource tokens for any "recorded" during this request in the childCacheURIs attribute.
+        """
+        
+        if hasattr(request, "childCacheURIs"):
+            tokens = dict([(uri, (yield self._tokenForURI(uri)),) for uri in request.childCacheURIs])
+            returnValue(tokens)
+        else:
+            returnValue({})
+    
+    @inlineCallbacks 
     def _getTokens(self, request):
-        def _tokensForURIs((pURI, rURI)):
-            tokens = []
-            d1 = self._tokenForURI(pURI, "PrincipalToken")
-            d1.addCallback(tokens.append)
-            d1.addCallback(lambda _ign: self._getRecordForURI(pURI, request))
-            d1.addCallback(lambda dToken: tokens.append(hash(dToken)))
-            d1.addCallback(lambda _ign: self._tokenForURI(rURI))
-            d1.addCallback(tokens.append)
-            d1.addCallback(lambda _ign: tokens)
-            return d1
+        """
+        Tokens are a principal token, directory record token, resource token and list
+        of child resource tokens. A change to any one of those will cause cache invalidation.
+        """
+        tokens = []
+        pURI, rURI = (yield self._getURIs(request))
+        tokens.append((yield self._tokenForURI(pURI, "PrincipalToken")))
+        tokens.append(hash((yield self._getRecordForURI(pURI, request))))
+        tokens.append((yield self._tokenForURI(rURI)))
+        tokens.append((yield self._tokensForChildren(rURI, request)))
+        returnValue(tokens)
 
-        d = self._getURIs(request)
-        d.addCallback(_tokensForURIs)
-        return d
 
-
+    @inlineCallbacks
     def _hashedRequestKey(self, request):
-        def _hashKey(key):
-            oldkey = key
-            request.cacheKey = key = hashlib.md5(
-                ':'.join([str(t) for t in key])).hexdigest()
-            self.log_debug("hashing key for get: %r to %r" % (oldkey, key))
-            return request.cacheKey
+        """
+        Make a key for a response cache entry. This depends on various request parameters
+        (see _requestKey for details).
+        """
+        oldkey = (yield self._requestKey(request))
+        request.cacheKey = key = hashlib.md5(
+            ':'.join([str(t) for t in oldkey])).hexdigest()
+        self.log_debug("hashing key for get: %r to %r" % (oldkey, key))
+        returnValue(request.cacheKey)
 
-        d = self._requestKey(request)
-        d.addCallback(_hashKey)
-        return d
 
-
+    @inlineCallbacks
     def getResponseForRequest(self, request):
-        def _checkTokens(curTokens, expectedTokens, (code, headers, body)):
-            if curTokens[0] != expectedTokens[0]:
+        """
+        Try to match a request and a response cache entry. We first get the request key and match that, then pull
+        the cache entry and decompose it into tokens and response. We then compare the cached tokens with their current values.
+        If all match, we can return the cached response data.
+        """
+        try:
+            key = (yield self._hashedRequestKey(request))
+    
+            self.log_debug("Checking cache for: %r" % (key,))
+            _ignore_flags, value = (yield self.getCachePool().get(key))
+    
+            if value is None:
+                self.log_debug("Not in cache: %r" % (key,))
+                returnValue(None)
+    
+            self.log_debug("Found in cache: %r = %r" % (key, value))
+    
+            (principalToken, directoryToken, uriToken, childTokens, (code, headers, body)) = cPickle.loads(value)
+            currentTokens = (yield self._getTokens(request))
+    
+            if currentTokens[0] != principalToken:
                 self.log_debug(
                     "Principal token doesn't match for %r: %r != %r" % (
                         request.cacheKey,
-                        curTokens[0],
-                        expectedTokens[0]))
-                return None
-
-            if curTokens[1] != expectedTokens[1]:
+                        currentTokens[0],
+                        principalToken))
+                returnValue(None)
+    
+            if currentTokens[1] != directoryToken:
                 self.log_debug(
                     "Directory Record Token doesn't match for %r: %r != %r" % (
                         request.cacheKey,
-                        curTokens[1],
-                        expectedTokens[1]))
-                return None
-
-            if curTokens[2] != expectedTokens[2]:
+                        currentTokens[1],
+                        directoryToken))
+                returnValue(None)
+    
+            if currentTokens[2] != uriToken:
                 self.log_debug(
                     "URI token doesn't match for %r: %r != %r" % (
                         request.cacheKey,
-                        curTokens[1],
-                        expectedTokens[1]))
-                return None
-
+                        currentTokens[2],
+                        uriToken))
+                returnValue(None)
+    
+            for childuri, token in childTokens.items():
+                currentToken = (yield self._tokenForURI(childuri))
+                if currentToken != token:
+                    self.log_debug(
+                        "Child %s token doesn't match for %r: %r != %r" % (
+                            childuri,
+                            request.cacheKey,
+                            currentToken,
+                            token))
+                    returnValue(None)
+                     
             r = Response(code,
                          stream=MemoryStream(body))
-
+    
             for key, value in headers.iteritems():
                 r.headers.setRawHeaders(key, value)
+    
+            returnValue(r)
 
-            return r
+        except URINotFoundException, e:
+            self.log_debug("Could not locate URI: %r" % (e,))
+            returnValue(None)
 
-        def _unpickleResponse((flags, value), key):
-            if value is None:
-                self.log_debug("Not in cache: %r" % (key,))
-                return None
-
-            self.log_debug("Found in cache: %r = %r" % (key, value))
-
-            (principalToken, directoryToken, uriToken,
-             resp) = cPickle.loads(value)
-            d2 = self._getTokens(request)
-
-            d2.addCallback(_checkTokens,
-                           (principalToken,
-                            directoryToken,
-                            uriToken),
-                           resp)
-
-            return d2
-
-        def _getCached(key):
-            self.log_debug("Checking cache for: %r" % (key,))
-            d1 = self.getCachePool().get(key)
-            return d1.addCallback(_unpickleResponse, key)
-
-        def _handleExceptions(f):
-            f.trap(URINotFoundException)
-            self.log_debug("Could not locate URI: %r" % (f.value,))
-            return None
-
-        d = self._hashedRequestKey(request)
-        d.addCallback(_getCached)
-        d.addErrback(_handleExceptions)
-        return d
-
-
+    @inlineCallbacks
     def cacheResponseForRequest(self, request, response):
-        def _makeCacheEntry((pToken, dToken, uToken), (key, responseBody)):
-            cacheEntry = cPickle.dumps(
-                (pToken,
-                 dToken,
-                 uToken,
-                 (response.code,
-                  dict(list(response.headers.getAllRawHeaders())),
-                  responseBody)))
-
-            self.log_debug("Adding to cache: %r = %r" % (key, cacheEntry))
-            return self.getCachePool().set(key, cacheEntry,
-                expireTime=config.ResponseCacheTimeout*60).addCallback(
-                lambda _: response)
-
-        def _cacheResponse((key, responseBody)):
-
+        """
+        Given a request and its response, make a response cache entry that encodes the response and various
+        cache tokens. Later, when getResponseForRequest is called we retrieve this entry and compare the
+        old cache tokens with the current ones. If any have changed the response cache entry is removed.
+        """
+        try:
+            if hasattr(request, 'cacheKey'):
+                key = request.cacheKey
+            else:
+                key = (yield self._hashedRequestKey(request))
+    
+            key, responseBody = (yield self._getResponseBody(key, response))
+    
             response.headers.removeHeader('date')
             response.stream = MemoryStream(responseBody)
+            pToken, dToken, uToken, cTokens = (yield self._getTokens(request))
 
-            d1 = self._getTokens(request)
-            d1.addCallback(_makeCacheEntry, (key, responseBody))
-            return d1
+            cacheEntry = cPickle.dumps((
+                pToken,
+                dToken,
+                uToken,
+                cTokens,
+                (
+                    response.code,
+                    dict(list(response.headers.getAllRawHeaders())),
+                    responseBody
+                )
+            ))
+            self.log_debug("Adding to cache: %r = %r" % (key, cacheEntry))
+            yield self.getCachePool().set(key, cacheEntry,
+                expireTime=config.ResponseCacheTimeout*60)
 
-        def _handleExceptions(f):
-            f.trap(URINotFoundException)
-            self.log_debug("Could not locate URI: %r" % (f.value,))
-            return response
+        except URINotFoundException, e:
+            self.log_debug("Could not locate URI: %r" % (e,))
 
-        if hasattr(request, 'cacheKey'):
-            d = succeed(request.cacheKey)
-        else:
-            d = self._hashedRequestKey(request)
+        returnValue(response)            
 
-        d.addCallback(self._getResponseBody, response)
-        d.addCallback(_cacheResponse)
-        d.addErrback(_handleExceptions)
-        return d
 
-
 class _CachedResponseResource(object):
     implements(IResource)
 
@@ -332,6 +385,10 @@
 
 
 class PropfindCacheMixin(object):
+    """
+    A mixin that causes a resource's PROPFIND response to be cached. It also adds an api to change the
+    resource's uriToken - this must be used whenever something changes to cause the cache to be invalidated.
+    """
     def renderHTTP(self, request):
         def _cacheResponse(responseCache, response):
             return responseCache.cacheResponseForRequest(request, response)
@@ -354,6 +411,18 @@
         else:
             self.log_debug("%r does not have a cacheNotifier but was changed" % (self,))
 
+class ResponseCacheMixin(object):
+    """
+    This is a mixin for a child resource that does not itself cache PROPFINDs, but needs to invalidate a parent
+    resource's PROPFIND cache by virtue of a change to its own childToken.
+    """
+
+    def changeCache(self):
+        if hasattr(self, 'cacheNotifier'):
+            return self.cacheNotifier.changed()
+        else:
+            self.log_debug("%r does not have a cacheNotifier but was changed" % (self,))
+
 class CacheStoreNotifier(object):
     
     def __init__(self, resource):

Modified: CalendarServer/trunk/twistedcaldav/method/propfind.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/propfind.py	2011-01-27 19:05:18 UTC (rev 6817)
+++ CalendarServer/trunk/twistedcaldav/method/propfind.py	2011-01-27 21:48:24 UTC (rev 6818)
@@ -127,6 +127,10 @@
         inherited_aces=filtered_aces,
     )
 
+    # This needed for propfind cache tracking of children changes
+    if depth == "1":
+        request.childCacheURIs = []
+
     for readable, resource, uri in resources:
         if readable:
             if search_properties is "names":
@@ -183,6 +187,11 @@
                 propstats.append(xml_propstat)
     
             xml_response = davxml.PropertyStatusResponse(davxml.HRef(uri), *propstats)
+
+            # This needed for propfind cache tracking of children changes
+            if depth == "1":
+                if resource != self and hasattr(resource, "url"):
+                    request.childCacheURIs.append(resource.url())
         else:
             xml_response = davxml.StatusResponse(davxml.HRef(uri), davxml.Status.fromResponseCode(responsecode.FORBIDDEN))
     

Modified: CalendarServer/trunk/twistedcaldav/resource.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/resource.py	2011-01-27 19:05:18 UTC (rev 6817)
+++ CalendarServer/trunk/twistedcaldav/resource.py	2011-01-27 21:48:24 UTC (rev 6818)
@@ -2438,7 +2438,7 @@
         newCalendar = yield self._newStoreHome.calendarWithName(name)
         from twistedcaldav.storebridge import CalendarCollectionResource
         similar = CalendarCollectionResource(
-            newCalendar, self._newStoreHome, name=name,
+            newCalendar, self, name=name,
             principalCollections=self.principalCollections()
         )
         self.propagateTransaction(similar)
@@ -2617,7 +2617,7 @@
 
         newAddressBook = yield self._newStoreHome.addressbookWithName(name)
         similar = mainCls(
-            newAddressBook, self._newStoreHome, name,
+            newAddressBook, self, name,
             principalCollections=self.principalCollections()
         )
         self.propagateTransaction(similar)

Modified: CalendarServer/trunk/twistedcaldav/storebridge.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/storebridge.py	2011-01-27 19:05:18 UTC (rev 6817)
+++ CalendarServer/trunk/twistedcaldav/storebridge.py	2011-01-27 21:48:24 UTC (rev 6818)
@@ -46,6 +46,8 @@
 )
 from twext.web2.stream import ProducerStream, readStream, MemoryStream
 
+from twistedcaldav.cache import CacheStoreNotifier, ResponseCacheMixin,\
+    DisabledCacheNotifier
 from twistedcaldav.caldavxml import caldav_namespace
 from twistedcaldav.config import config
 from twistedcaldav import customxml
@@ -193,12 +195,13 @@
 
 
 
-class _CommonHomeChildCollectionMixin(object):
+class _CommonHomeChildCollectionMixin(ResponseCacheMixin):
     """
     Methods for things which are like calendars.
     """
 
     _childClass = None
+    cacheNotifierFactory = DisabledCacheNotifier
 
     def _initializeWithHomeChild(self, child, home):
         """
@@ -211,10 +214,14 @@
         @type home: L{txdav.common._.CommonHome}
         """
         self._newStoreObject = child
-        self._newStoreParentHome = home
+        self._newStoreParentHome = home._newStoreHome
+        self._parentResource = home
         self._dead_properties = _NewStorePropertiesWrapper(
             self._newStoreObject.properties()
         ) if self._newStoreObject else NonePropertyStore(self)
+        if self._newStoreObject:
+            self.cacheNotifier = self.cacheNotifierFactory(self)
+            self._newStoreObject.addNotifier(CacheStoreNotifier(self))
 
 
     def liveProperties(self):
@@ -238,6 +245,9 @@
 
         returnValue((yield super(_CommonHomeChildCollectionMixin, self).readProperty(property, request)))
 
+    def url(self):
+        return joinURL(self._parentResource.url(), self._name, "/")
+
     def index(self):
         """
         Retrieve the new-style index wrapper.
@@ -343,7 +353,7 @@
         self._newStoreObject = (yield self._newStoreParentHome.createChildWithName(self._name))
         
         # Re-initialize to get stuff setup again now we have a "real" object
-        self._initializeWithHomeChild(self._newStoreObject, self._newStoreParentHome)
+        self._initializeWithHomeChild(self._newStoreObject, self._parentResource)
 
         returnValue(CREATED)
 
@@ -437,7 +447,7 @@
         yield self._newStoreObject.remove()
         
         # Re-initialize to get stuff setup again now we have no object
-        self._initializeWithHomeChild(None, self._newStoreParentHome)
+        self._initializeWithHomeChild(None, self._parentResource)
 
         # FIXME: handle exceptions, possibly like this:
 
@@ -682,7 +692,7 @@
             storage = yield home.calendarWithName("inbox")
         self._initializeWithHomeChild(
             storage,
-            self.parent._newStoreHome
+            self.parent
         )
         self._name = storage.name()
         returnValue(self)

Modified: CalendarServer/trunk/twistedcaldav/test/test_cache.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_cache.py	2011-01-27 19:05:18 UTC (rev 6817)
+++ CalendarServer/trunk/twistedcaldav/test/test_cache.py	2011-01-27 21:48:24 UTC (rev 6818)
@@ -231,6 +231,18 @@
         return d
 
 
+    def test_getResponseForRequestChildTokenChanged(self):
+        self.tokens['/calendars/__uids__/cdaboo/calendars/'] = 'childToken1'
+
+        d = self.rc.getResponseForRequest(StubRequest(
+                'PROPFIND',
+                '/calendars/__uids__/cdaboo/',
+                '/principals/__uids__/cdaboo/'))
+
+        d.addCallback(self.assertEquals, None)
+        return d
+
+
     def test_getResponseForDepthZero(self):
         d = self.rc.getResponseForRequest(StubRequest(
                 'PROPFIND',
@@ -333,6 +345,7 @@
         self.tokens = {}
 
         self.tokens['/calendars/__uids__/cdaboo/'] = 'uriToken0'
+        self.tokens['/calendars/__uids__/cdaboo/calendars/'] = 'childToken0'
         self.tokens['/principals/__uids__/cdaboo/'] = 'principalToken0'
         self.tokens['/principals/__uids__/dreid/'] = 'principalTokenX'
 
@@ -357,6 +370,7 @@
             'principalToken0',
             hash('directoryToken0'),
             'uriToken0',
+            {'/calendars/__uids__/cdaboo/calendars/':'childToken0'},
             (self.expected_response[0],
              dict(list(self.expected_response[1].getAllRawHeaders())),
              self.expected_response[2]))))
@@ -386,6 +400,7 @@
                     'principalToken0',
                     hash('directoryToken0'),
                     'uriToken0',
+                    {'/calendars/__uids__/cdaboo/calendars/':'childToken0'},
                     (expected_response[0],
                      dict(list(expected_response[1].getAllRawHeaders())),
                      expected_response[2]))))
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20110127/0eabab01/attachment-0001.html>


More information about the calendarserver-changes mailing list