[CalendarServer-changes] [10147] CalendarServer/trunk
source_changes at macosforge.org
source_changes at macosforge.org
Mon Dec 10 12:22:58 PST 2012
Revision: 10147
http://trac.calendarserver.org//changeset/10147
Author: cdaboo at apple.com
Date: 2012-12-10 12:22:58 -0800 (Mon, 10 Dec 2012)
Log Message:
-----------
Merge managed attachments support.
Modified Paths:
--------------
CalendarServer/trunk/calendarserver/tap/util.py
CalendarServer/trunk/twext/web2/http_headers.py
CalendarServer/trunk/twext/web2/test/test_http_headers.py
CalendarServer/trunk/twistedcaldav/caldavxml.py
CalendarServer/trunk/twistedcaldav/ical.py
CalendarServer/trunk/twistedcaldav/method/post.py
CalendarServer/trunk/twistedcaldav/method/put_addressbook_common.py
CalendarServer/trunk/twistedcaldav/method/put_common.py
CalendarServer/trunk/twistedcaldav/resource.py
CalendarServer/trunk/twistedcaldav/scheduling/implicit.py
CalendarServer/trunk/twistedcaldav/stdconfig.py
CalendarServer/trunk/twistedcaldav/storebridge.py
CalendarServer/trunk/twistedcaldav/test/test_icalendar.py
CalendarServer/trunk/txdav/caldav/datastore/file.py
CalendarServer/trunk/txdav/caldav/datastore/sql.py
CalendarServer/trunk/txdav/caldav/datastore/test/common.py
CalendarServer/trunk/txdav/caldav/datastore/test/test_sql.py
CalendarServer/trunk/txdav/caldav/datastore/test/test_util.py
CalendarServer/trunk/txdav/caldav/datastore/util.py
CalendarServer/trunk/txdav/caldav/icalendarstore.py
CalendarServer/trunk/txdav/common/datastore/file.py
CalendarServer/trunk/txdav/common/datastore/sql.py
CalendarServer/trunk/txdav/common/datastore/sql_schema/current-oracle-dialect.sql
CalendarServer/trunk/txdav/common/datastore/sql_schema/current.sql
CalendarServer/trunk/txdav/common/datastore/sql_tables.py
CalendarServer/trunk/txdav/common/datastore/test/util.py
Added Paths:
-----------
CalendarServer/trunk/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql
CalendarServer/trunk/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql
CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/oracle-dialect/upgrade_from_12_to_13.sql
CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql
Property Changed:
----------------
CalendarServer/trunk/
Property changes on: CalendarServer/trunk
___________________________________________________________________
Modified: svn:mergeinfo
- /CalendarServer/branches/config-separation:4379-4443
/CalendarServer/branches/egg-info-351:4589-4625
/CalendarServer/branches/generic-sqlstore:6167-6191
/CalendarServer/branches/new-store:5594-5934
/CalendarServer/branches/new-store-no-caldavfile:5911-5935
/CalendarServer/branches/new-store-no-caldavfile-2:5936-5981
/CalendarServer/branches/users/cdaboo/batchupload-6699:6700-7198
/CalendarServer/branches/users/cdaboo/cached-subscription-calendars-5692:5693-5702
/CalendarServer/branches/users/cdaboo/component-set-fixes:8130-8346
/CalendarServer/branches/users/cdaboo/directory-cache-on-demand-3627:3628-3644
/CalendarServer/branches/users/cdaboo/implicituidrace:8137-8141
/CalendarServer/branches/users/cdaboo/ischedule-dkim:9747-9979
/CalendarServer/branches/users/cdaboo/more-sharing-5591:5592-5601
/CalendarServer/branches/users/cdaboo/partition-4464:4465-4957
/CalendarServer/branches/users/cdaboo/pods:7297-7377
/CalendarServer/branches/users/cdaboo/pycalendar:7085-7206
/CalendarServer/branches/users/cdaboo/pycard:7227-7237
/CalendarServer/branches/users/cdaboo/queued-attendee-refreshes:7740-8287
/CalendarServer/branches/users/cdaboo/relative-config-paths-5070:5071-5105
/CalendarServer/branches/users/cdaboo/shared-calendars-5187:5188-5440
/CalendarServer/branches/users/cdaboo/timezones:7443-7699
/CalendarServer/branches/users/cdaboo/txn-debugging:8730-8743
/CalendarServer/branches/users/glyph/always-abort-txn-on-error:9958-9969
/CalendarServer/branches/users/glyph/case-insensitive-uid:8772-8805
/CalendarServer/branches/users/glyph/conn-limit:6574-6577
/CalendarServer/branches/users/glyph/contacts-server-merge:4971-5080
/CalendarServer/branches/users/glyph/dalify:6932-7023
/CalendarServer/branches/users/glyph/db-reconnect:6824-6876
/CalendarServer/branches/users/glyph/deploybuild:7563-7572
/CalendarServer/branches/users/glyph/disable-quota:7718-7727
/CalendarServer/branches/users/glyph/dont-start-postgres:6592-6614
/CalendarServer/branches/users/glyph/imip-and-admin-html:7866-7984
/CalendarServer/branches/users/glyph/ipv6-client:9054-9105
/CalendarServer/branches/users/glyph/linux-tests:6893-6900
/CalendarServer/branches/users/glyph/migrate-merge:8690-8713
/CalendarServer/branches/users/glyph/misc-portability-fixes:7365-7374
/CalendarServer/branches/users/glyph/more-deferreds-6:6322-6368
/CalendarServer/branches/users/glyph/more-deferreds-7:6369-6445
/CalendarServer/branches/users/glyph/multiget-delete:8321-8330
/CalendarServer/branches/users/glyph/new-export:7444-7485
/CalendarServer/branches/users/glyph/one-home-list-api:10048-10073
/CalendarServer/branches/users/glyph/oracle:7106-7155
/CalendarServer/branches/users/glyph/oracle-nulls:7340-7351
/CalendarServer/branches/users/glyph/other-html:8062-8091
/CalendarServer/branches/users/glyph/parallel-sim:8240-8251
/CalendarServer/branches/users/glyph/parallel-upgrade:8376-8400
/CalendarServer/branches/users/glyph/parallel-upgrade_to_1:8571-8583
/CalendarServer/branches/users/glyph/q:9560-9688
/CalendarServer/branches/users/glyph/quota:7604-7637
/CalendarServer/branches/users/glyph/sendfdport:5388-5424
/CalendarServer/branches/users/glyph/shared-pool-fixes:8436-8443
/CalendarServer/branches/users/glyph/shared-pool-take2:8155-8174
/CalendarServer/branches/users/glyph/sharedpool:6490-6550
/CalendarServer/branches/users/glyph/sharing-api:9192-9205
/CalendarServer/branches/users/glyph/skip-lonely-vtimezones:8524-8535
/CalendarServer/branches/users/glyph/sql-store:5929-6073
/CalendarServer/branches/users/glyph/subtransactions:7248-7258
/CalendarServer/branches/users/glyph/table-alias:8651-8664
/CalendarServer/branches/users/glyph/uidexport:7673-7676
/CalendarServer/branches/users/glyph/use-system-twisted:5084-5149
/CalendarServer/branches/users/glyph/uuid-normalize:9268-9296
/CalendarServer/branches/users/glyph/xattrs-from-files:7757-7769
/CalendarServer/branches/users/sagen/applepush:8126-8184
/CalendarServer/branches/users/sagen/inboxitems:7380-7381
/CalendarServer/branches/users/sagen/locations-resources:5032-5051
/CalendarServer/branches/users/sagen/locations-resources-2:5052-5061
/CalendarServer/branches/users/sagen/purge_old_events:6735-6746
/CalendarServer/branches/users/sagen/resource-delegates-4038:4040-4067
/CalendarServer/branches/users/sagen/resource-delegates-4066:4068-4075
/CalendarServer/branches/users/sagen/resources-2:5084-5093
/CalendarServer/branches/users/wsanchez/transations:5515-5593
+ /CalendarServer/branches/config-separation:4379-4443
/CalendarServer/branches/egg-info-351:4589-4625
/CalendarServer/branches/generic-sqlstore:6167-6191
/CalendarServer/branches/new-store:5594-5934
/CalendarServer/branches/new-store-no-caldavfile:5911-5935
/CalendarServer/branches/new-store-no-caldavfile-2:5936-5981
/CalendarServer/branches/users/cdaboo/batchupload-6699:6700-7198
/CalendarServer/branches/users/cdaboo/cached-subscription-calendars-5692:5693-5702
/CalendarServer/branches/users/cdaboo/component-set-fixes:8130-8346
/CalendarServer/branches/users/cdaboo/directory-cache-on-demand-3627:3628-3644
/CalendarServer/branches/users/cdaboo/implicituidrace:8137-8141
/CalendarServer/branches/users/cdaboo/ischedule-dkim:9747-9979
/CalendarServer/branches/users/cdaboo/managed-attachments:9985-10145
/CalendarServer/branches/users/cdaboo/more-sharing-5591:5592-5601
/CalendarServer/branches/users/cdaboo/partition-4464:4465-4957
/CalendarServer/branches/users/cdaboo/pods:7297-7377
/CalendarServer/branches/users/cdaboo/pycalendar:7085-7206
/CalendarServer/branches/users/cdaboo/pycard:7227-7237
/CalendarServer/branches/users/cdaboo/queued-attendee-refreshes:7740-8287
/CalendarServer/branches/users/cdaboo/relative-config-paths-5070:5071-5105
/CalendarServer/branches/users/cdaboo/shared-calendars-5187:5188-5440
/CalendarServer/branches/users/cdaboo/timezones:7443-7699
/CalendarServer/branches/users/cdaboo/txn-debugging:8730-8743
/CalendarServer/branches/users/glyph/always-abort-txn-on-error:9958-9969
/CalendarServer/branches/users/glyph/case-insensitive-uid:8772-8805
/CalendarServer/branches/users/glyph/conn-limit:6574-6577
/CalendarServer/branches/users/glyph/contacts-server-merge:4971-5080
/CalendarServer/branches/users/glyph/dalify:6932-7023
/CalendarServer/branches/users/glyph/db-reconnect:6824-6876
/CalendarServer/branches/users/glyph/deploybuild:7563-7572
/CalendarServer/branches/users/glyph/disable-quota:7718-7727
/CalendarServer/branches/users/glyph/dont-start-postgres:6592-6614
/CalendarServer/branches/users/glyph/imip-and-admin-html:7866-7984
/CalendarServer/branches/users/glyph/ipv6-client:9054-9105
/CalendarServer/branches/users/glyph/linux-tests:6893-6900
/CalendarServer/branches/users/glyph/migrate-merge:8690-8713
/CalendarServer/branches/users/glyph/misc-portability-fixes:7365-7374
/CalendarServer/branches/users/glyph/more-deferreds-6:6322-6368
/CalendarServer/branches/users/glyph/more-deferreds-7:6369-6445
/CalendarServer/branches/users/glyph/multiget-delete:8321-8330
/CalendarServer/branches/users/glyph/new-export:7444-7485
/CalendarServer/branches/users/glyph/one-home-list-api:10048-10073
/CalendarServer/branches/users/glyph/oracle:7106-7155
/CalendarServer/branches/users/glyph/oracle-nulls:7340-7351
/CalendarServer/branches/users/glyph/other-html:8062-8091
/CalendarServer/branches/users/glyph/parallel-sim:8240-8251
/CalendarServer/branches/users/glyph/parallel-upgrade:8376-8400
/CalendarServer/branches/users/glyph/parallel-upgrade_to_1:8571-8583
/CalendarServer/branches/users/glyph/q:9560-9688
/CalendarServer/branches/users/glyph/quota:7604-7637
/CalendarServer/branches/users/glyph/sendfdport:5388-5424
/CalendarServer/branches/users/glyph/shared-pool-fixes:8436-8443
/CalendarServer/branches/users/glyph/shared-pool-take2:8155-8174
/CalendarServer/branches/users/glyph/sharedpool:6490-6550
/CalendarServer/branches/users/glyph/sharing-api:9192-9205
/CalendarServer/branches/users/glyph/skip-lonely-vtimezones:8524-8535
/CalendarServer/branches/users/glyph/sql-store:5929-6073
/CalendarServer/branches/users/glyph/subtransactions:7248-7258
/CalendarServer/branches/users/glyph/table-alias:8651-8664
/CalendarServer/branches/users/glyph/uidexport:7673-7676
/CalendarServer/branches/users/glyph/use-system-twisted:5084-5149
/CalendarServer/branches/users/glyph/uuid-normalize:9268-9296
/CalendarServer/branches/users/glyph/xattrs-from-files:7757-7769
/CalendarServer/branches/users/sagen/applepush:8126-8184
/CalendarServer/branches/users/sagen/inboxitems:7380-7381
/CalendarServer/branches/users/sagen/locations-resources:5032-5051
/CalendarServer/branches/users/sagen/locations-resources-2:5052-5061
/CalendarServer/branches/users/sagen/purge_old_events:6735-6746
/CalendarServer/branches/users/sagen/resource-delegates-4038:4040-4067
/CalendarServer/branches/users/sagen/resource-delegates-4066:4068-4075
/CalendarServer/branches/users/sagen/resources-2:5084-5093
/CalendarServer/branches/users/wsanchez/transations:5515-5593
Modified: CalendarServer/trunk/calendarserver/tap/util.py
===================================================================
--- CalendarServer/trunk/calendarserver/tap/util.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/calendarserver/tap/util.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -234,8 +234,14 @@
if quota == 0:
quota = None
if txnFactory is not None:
+ if config.EnableSSL:
+ uri = "https://%s:%s" % (config.ServerHostName, config.SSLPort,)
+ else:
+ uri = "http://%s:%s" % (config.ServerHostName, config.HTTPPort,)
+ attachments_uri = uri + "/calendars/__uids__/%(home)s/attachments/%(name)s"
return CommonSQLDataStore(
- txnFactory, notifierFactory, FilePath(config.AttachmentsRoot),
+ txnFactory, notifierFactory,
+ FilePath(config.AttachmentsRoot), attachments_uri,
config.EnableCalDAV, config.EnableCardDAV,
quota=quota,
logLabels=config.LogDatabase.LabelsInSQL,
Modified: CalendarServer/trunk/twext/web2/http_headers.py
===================================================================
--- CalendarServer/trunk/twext/web2/http_headers.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twext/web2/http_headers.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -1,5 +1,5 @@
# -*- test-case-name: twext.web2.test.test_http_headers -*-
-##
+# #
# Copyright (c) 2008 Twisted Matrix Laboratories.
# Copyright (c) 2010-2012 Apple Computer, Inc. All rights reserved.
#
@@ -21,7 +21,7 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
-##
+# #
"""
HTTP header representation, parsing, and serialization.
@@ -33,8 +33,8 @@
import re
def dashCapitalize(s):
- ''' Capitalize a string, making sure to treat - as a word seperator '''
- return '-'.join([ x.capitalize() for x in s.split('-')])
+ ''' Capitalize a string, making sure to treat - as a word separator '''
+ return '-'.join([x.capitalize() for x in s.split('-')])
# datetime parsing and formatting
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
@@ -50,13 +50,16 @@
def casemappingify(d):
global header_case_mapping
- newd = dict([(key.lower(),key) for key in d.keys()])
+ newd = dict([(key.lower(), key) for key in d.keys()])
header_case_mapping.update(newd)
+
+
def lowerify(d):
- return dict([(key.lower(),value) for key,value in d.items()])
+ return dict([(key.lower(), value) for key, value in d.items()])
+
class HeaderHandler(object):
"""HeaderHandler manages header generating and parsing functions.
"""
@@ -77,6 +80,7 @@
if generators:
self.HTTPGenerators.update(generators)
+
def parse(self, name, header):
"""
Parse the given header based on its given name.
@@ -101,10 +105,11 @@
# if isinstance(h, types.GeneratorType):
# h=list(h)
except ValueError:
- header=None
+ header = None
return header
+
def generate(self, name, header):
"""
Generate the given header based on its given name.
@@ -126,9 +131,10 @@
for g in generator:
header = g(header)
- #self._raw_headers[name] = h
+ # self._raw_headers[name] = h
return header
+
def updateParsers(self, parsers):
"""Update en masse the parser maps.
@@ -138,6 +144,7 @@
casemappingify(parsers)
self.HTTPParsers.update(lowerify(parsers))
+
def addParser(self, name, value):
"""Add an individual parser chain for the given header.
@@ -149,6 +156,7 @@
"""
self.updateParsers({name: value})
+
def updateGenerators(self, generators):
"""Update en masse the generator maps.
@@ -158,6 +166,7 @@
casemappingify(generators)
self.HTTPGenerators.update(lowerify(generators))
+
def addGenerators(self, name, value):
"""Add an individual generator chain for the given header.
@@ -169,6 +178,7 @@
"""
self.updateGenerators({name: value})
+
def update(self, parsers, generators):
"""Conveniently update parsers and generators all at once.
"""
@@ -179,7 +189,7 @@
DefaultHTTPHandler = HeaderHandler()
-## HTTP DateTime parser
+# # HTTP DateTime parser
def parseDateTime(dateString):
"""Convert an HTTP date string (one of three formats) to seconds since epoch."""
parts = dateString.split()
@@ -187,7 +197,7 @@
if not parts[0][0:3].lower() in weekdayname_lower:
# Weekday is stupid. Might have been omitted.
try:
- return parseDateTime("Sun, "+dateString)
+ return parseDateTime("Sun, " + dateString)
except ValueError:
# Guess not.
pass
@@ -209,7 +219,7 @@
# Two digit year, yucko.
day, month, year = parts[1].split('-')
time = parts[2]
- year=int(year)
+ year = int(year)
if year < 69:
year = year + 2000
elif year < 100:
@@ -231,9 +241,10 @@
return int(timegm((year, month, day, hour, min, sec)))
+
##### HTTP tokenizer
class Token(str):
- __slots__=[]
+ __slots__ = []
tokens = {}
def __new__(self, char):
token = Token.tokens.get(char)
@@ -241,6 +252,7 @@
Token.tokens[char] = token = str.__new__(self, char)
return token
+
def __repr__(self):
return "Token(%s)" % str.__repr__(self)
@@ -265,8 +277,8 @@
Takes a raw header value (list of strings), and
Returns a generator of strings and Token class instances.
"""
- tokens=http_tokens
- ctls=http_ctls
+ tokens = http_tokens
+ ctls = http_ctls
string = ",".join(header)
start = 0
@@ -280,15 +292,15 @@
if quoted:
if qpair:
qpair = False
- qstring = qstring+string[start:cur-1]+x
- start = cur+1
+ qstring = qstring + string[start:cur - 1] + x
+ start = cur + 1
elif x == '\\':
qpair = True
elif x == '"':
quoted = False
- yield qstring+string[start:cur]
- qstring=None
- start = cur+1
+ yield qstring + string[start:cur]
+ qstring = None
+ start = cur + 1
elif x in tokens:
if start != cur:
if foldCase:
@@ -296,7 +308,7 @@
else:
yield string[start:cur]
- start = cur+1
+ start = cur + 1
if x == '"':
quoted = True
qstring = ""
@@ -315,12 +327,12 @@
inSpaces = False
inSpaces = False
- cur = cur+1
+ cur = cur + 1
if qpair:
- raise ValueError, "Missing character after '\\'"
+ raise ValueError("Missing character after '\\'")
if quoted:
- raise ValueError, "Missing end quote"
+ raise ValueError("Missing end quote")
if start != cur:
if foldCase:
@@ -328,6 +340,8 @@
else:
yield string[start:cur]
+
+
def split(seq, delim):
"""The same as str.split but works on arbitrary sequences.
Too bad it's not builtin to python!"""
@@ -350,6 +364,7 @@
# return -1
+
def filterTokens(seq):
"""Filter out instances of Token, leaving only a list of strings.
@@ -361,31 +376,39 @@
hurt anything, in any case.
"""
- l=[]
+ l = []
for x in seq:
if not isinstance(x, Token):
l.append(x)
return l
+
+
##### parser utilities:
def checkSingleToken(tokens):
if len(tokens) != 1:
- raise ValueError, "Expected single token, not %s." % (tokens,)
+ raise ValueError("Expected single token, not %s." % (tokens,))
return tokens[0]
+
+
def parseKeyValue(val):
if len(val) == 1:
- return val[0],None
+ return val[0], None
elif len(val) == 3 and val[1] == Token('='):
- return val[0],val[2]
- raise ValueError, "Expected key or key=value, but got %s." % (val,)
+ return val[0], val[2]
+ raise ValueError("Expected key or key=value, but got %s." % (val,))
+
+
def parseArgs(field):
- args=split(field, Token(';'))
+ args = split(field, Token(';'))
val = args.next()
args = [parseKeyValue(arg) for arg in args]
- return val,args
+ return val, args
+
+
def listParser(fun):
"""Return a function which applies 'fun' to every element in the
comma-separated list"""
@@ -397,11 +420,15 @@
return listParserHelper
+
+
def last(seq):
"""Return seq[-1]"""
return seq[-1]
+
+
##### Generation utilities
def quoteString(s):
"""
@@ -413,6 +440,8 @@
"""
return '"%s"' % s.replace('\\', '\\\\').replace('"', '\\"')
+
+
def listGenerator(fun):
"""Return a function which applies 'fun' to every element in
the given list, then joins the result with generateList"""
@@ -421,9 +450,13 @@
return listGeneratorHelper
+
+
def generateList(seq):
return ", ".join(seq)
+
+
def singleHeader(item):
return [item]
@@ -456,8 +489,10 @@
return ";".join(l)
+
class MimeType(object):
- def fromString(klass, mimeTypeString):
+
+ def fromString(cls, mimeTypeString):
"""Generate a MimeType object from the given string.
@param mimeTypeString: The mimetype to parse
@@ -483,27 +518,82 @@
if kwargs:
self.params.update(kwargs)
+
def __eq__(self, other):
- if not isinstance(other, MimeType): return NotImplemented
+ if not isinstance(other, MimeType):
+ return NotImplemented
return (self.mediaType == other.mediaType and
self.mediaSubtype == other.mediaSubtype and
self.params == other.params)
+
def __ne__(self, other):
return not self.__eq__(other)
+
def __repr__(self):
return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params)
+
def __hash__(self):
- return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))
+ return hash(self.mediaType) ^ hash(self.mediaSubtype) ^ hash(tuple(self.params.iteritems()))
+
+
+class MimeDisposition(object):
+
+ def fromString(cls, dispositionString):
+ """Generate a MimeDisposition object from the given string.
+
+ @param dispositionString: The disposition to parse
+
+ @return: L{MimeDisposition}
+ """
+ return DefaultHTTPHandler.parse('content-disposition', [dispositionString])
+
+ fromString = classmethod(fromString)
+
+ def __init__(self, dispositionType, params={}, **kwargs):
+ """
+ @type mediaType: C{str}
+
+ @type mediaSubtype: C{str}
+
+ @type params: C{dict}
+ """
+ self.dispositionType = dispositionType
+ self.params = dict(params)
+
+ if kwargs:
+ self.params.update(kwargs)
+
+
+ def __eq__(self, other):
+ if not isinstance(other, MimeDisposition):
+ return NotImplemented
+ return (self.dispositionType == other.dispositionType and
+ self.params == other.params)
+
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+
+ def __repr__(self):
+ return "MimeDisposition(%r, %r)" % (self.dispositionType, self.params)
+
+
+ def __hash__(self):
+ return hash(self.dispositionType) ^ hash(tuple(self.params.iteritems()))
+
+
+
##### Specific header parsers.
def parseAccept(field):
- type,args = parseArgs(field)
+ atype, args = parseArgs(field)
- if len(type) != 3 or type[1] != Token('/'):
- raise ValueError, "MIME Type "+str(type)+" invalid."
+ if len(atype) != 3 or atype[1] != Token('/'):
+ raise ValueError("MIME Type " + str(atype) + " invalid.")
# okay, this spec is screwy. A 'q' parameter is used as the separator
# between MIME parameters and (as yet undefined) additional HTTP
@@ -512,44 +602,50 @@
num = 0
for arg in args:
if arg[0] == 'q':
- mimeparams=tuple(args[0:num])
- params=args[num:]
+ mimeparams = tuple(args[0:num])
+ params = args[num:]
break
num = num + 1
else:
- mimeparams=tuple(args)
- params=[]
+ mimeparams = tuple(args)
+ params = []
# Default values for parameters:
qval = 1.0
# Parse accept parameters:
for param in params:
- if param[0] =='q':
+ if param[0] == 'q':
qval = float(param[1])
else:
# Warn? ignored parameter.
pass
- ret = MimeType(type[0],type[2],mimeparams),qval
+ ret = MimeType(atype[0], atype[2], mimeparams), qval
return ret
+
+
def parseAcceptQvalue(field):
- type,args=parseArgs(field)
+ atype, args = parseArgs(field)
- type = checkSingleToken(type)
+ atype = checkSingleToken(atype)
qvalue = 1.0 # Default qvalue is 1
for arg in args:
if arg[0] == 'q':
qvalue = float(arg[1])
- return type,qvalue
+ return atype, qvalue
+
+
def addDefaultCharset(charsets):
if charsets.get('*') is None and charsets.get('iso-8859-1') is None:
charsets['iso-8859-1'] = 1.0
return charsets
+
+
def addDefaultEncoding(encodings):
if encodings.get('*') is None and encodings.get('identity') is None:
# RFC doesn't specify a default value for identity, only that it
@@ -558,26 +654,46 @@
return encodings
+
def parseContentType(header):
# Case folding is disabled for this header, because of use of
# Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf
- # So, we need to explicitly .lower() the type/subtype and arg keys.
+ # So, we need to explicitly .lower() the ctype and arg keys.
- type,args = parseArgs(header)
+ ctype, args = parseArgs(header)
- if len(type) != 3 or type[1] != Token('/'):
- raise ValueError, "MIME Type "+str(type)+" invalid."
+ if len(ctype) != 3 or ctype[1] != Token('/'):
+ raise ValueError("MIME Type " + str(ctype) + " invalid.")
args = [(kv[0].lower(), kv[1]) for kv in args]
- return MimeType(type[0].lower(), type[2].lower(), tuple(args))
+ return MimeType(ctype[0].lower(), ctype[2].lower(), tuple(args))
+
+
+def parseContentDisposition(header):
+ # Case folding is disabled for this header, because of use of
+ # So, we need to explicitly .lower() the dtype and arg keys.
+
+ dtype, args = parseArgs(header)
+
+ if len(dtype) != 1:
+ raise ValueError("Content-Disposition " + str(dtype) + " invalid.")
+
+ args = [(kv[0].lower(), kv[1]) for kv in args]
+
+ return MimeDisposition(dtype[0].lower(), tuple(args))
+
+
+
def parseContentMD5(header):
try:
return base64.decodestring(header)
- except Exception,e:
+ except Exception, e:
raise ValueError(e)
+
+
def parseContentRange(header):
"""Parse a content-range header into (kind, start, end, realLength).
@@ -589,7 +705,7 @@
raise ValueError("a range of type %r is not supported")
startend, realLength = other.split("/")
if startend.strip() == '*':
- start,end=None,None
+ start, end = None, None
else:
start, end = map(int, startend.split("-"))
if realLength == "*":
@@ -598,12 +714,16 @@
realLength = int(realLength)
return (kind, start, end, realLength)
+
+
def parseExpect(field):
- type,args=parseArgs(field)
+ etype, args = parseArgs(field)
- type=parseKeyValue(type)
- return (type[0], (lambda *args:args)(type[1], *args))
+ etype = parseKeyValue(etype)
+ return (etype[0], (lambda *args: args)(etype[1], *args))
+
+
def parseExpires(header):
# """HTTP/1.1 clients and caches MUST treat other invalid date formats,
# especially including the value 0, as in the past (i.e., "already expired")."""
@@ -613,6 +733,8 @@
except ValueError:
return 0
+
+
def parseIfModifiedSince(header):
# Ancient versions of netscape and *current* versions of MSIE send
# If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123
@@ -622,30 +744,34 @@
# So, we'll just strip off everything after a ';'.
return parseDateTime(header.split(';', 1)[0])
+
+
def parseIfRange(headers):
try:
return ETag.parse(tokenize(headers))
except ValueError:
return parseDateTime(last(headers))
-def parseRange(range):
- range = list(range)
- if len(range) < 3 or range[1] != Token('='):
- raise ValueError("Invalid range header format: %s" %(range,))
- type=range[0]
- if type != 'bytes':
- raise ValueError("Unknown range unit: %s." % (type,))
- rangeset=split(range[2:], Token(','))
+
+def parseRange(crange):
+ crange = list(crange)
+ if len(crange) < 3 or crange[1] != Token('='):
+ raise ValueError("Invalid range header format: %s" % (crange,))
+
+ rtype = crange[0]
+ if rtype != 'bytes':
+ raise ValueError("Unknown range unit: %s." % (rtype,))
+ rangeset = split(crange[2:], Token(','))
ranges = []
for byterangespec in rangeset:
if len(byterangespec) != 1:
- raise ValueError("Invalid range header format: %s" % (range,))
- start,end=byterangespec[0].split('-')
+ raise ValueError("Invalid range header format: %s" % (crange,))
+ start, end = byterangespec[0].split('-')
if not start and not end:
- raise ValueError("Invalid range header format: %s" % (range,))
+ raise ValueError("Invalid range header format: %s" % (crange,))
if start:
start = int(start)
@@ -658,10 +784,12 @@
end = None
if start and end and start > end:
- raise ValueError("Invalid range header, start > end: %s" % (range,))
- ranges.append((start,end))
- return type,ranges
+ raise ValueError("Invalid range header, start > end: %s" % (crange,))
+ ranges.append((start, end))
+ return rtype, ranges
+
+
def parseRetryAfter(header):
try:
# delta seconds
@@ -672,6 +800,8 @@
# WWW-Authenticate and Authorization
+
+
def parseWWWAuthenticate(tokenized):
headers = []
@@ -715,36 +845,46 @@
return headers
+
+
def parseAuthorization(header):
scheme, rest = header.split(' ', 1)
# this header isn't tokenized because it may eat characters
# in the unquoted base64 encoded credentials
return scheme.lower(), rest
+
+
#### Header generators
def generateAccept(accept):
- mimeType,q = accept
+ mimeType, q = accept
- out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+ out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype)
if mimeType.params:
- out+=';'+generateKeyValues(mimeType.params.iteritems())
+ out += ';' + generateKeyValues(mimeType.params.iteritems())
if q != 1.0:
- out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.')
+ out += (';q=%.3f' % (q,)).rstrip('0').rstrip('.')
return out
+
+
def removeDefaultEncoding(seq):
for item in seq:
if item[0] != 'identity' or item[1] != .0001:
yield item
+
+
def generateAcceptQvalue(keyvalue):
if keyvalue[1] == 1.0:
return "%s" % keyvalue[0:1]
else:
return ("%s;q=%.3f" % keyvalue).rstrip('0').rstrip('.')
+
+
def parseCacheControl(kv):
k, v = parseKeyValue(kv)
if k == 'max-age' or k == 'min-fresh' or k == 's-maxage':
@@ -763,6 +903,8 @@
v = [field.strip().lower() for field in v.split(',')]
return k, v
+
+
def generateCacheControl((k, v)):
if v is None:
return str(k)
@@ -771,67 +913,89 @@
# quoted list of values
v = quoteString(generateList(
[header_case_mapping.get(name) or dashCapitalize(name) for name in v]))
- return '%s=%s' % (k,v)
+ return '%s=%s' % (k, v)
+
+
def generateContentRange(tup):
- """tup is (type, start, end, len)
- len can be None.
+ """tup is (rtype, start, end, rlen)
+ rlen can be None.
"""
- type, start, end, len = tup
- if len == None:
- len = '*'
+ rtype, start, end, rlen = tup
+ if rlen == None:
+ rlen = '*'
else:
- len = int(len)
+ rlen = int(rlen)
if start == None and end == None:
startend = '*'
else:
startend = '%d-%d' % (start, end)
- return '%s %s/%s' % (type, startend, len)
+ return '%s %s/%s' % (rtype, startend, rlen)
+
+
def generateDateTime(secSinceEpoch):
"""Convert seconds since epoch to HTTP datetime string."""
- year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch)
+ year, month, day, hh, mm, ss, wd, _ignore_y, _ignore_z = time.gmtime(secSinceEpoch)
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
weekdayname[wd],
day, monthname[month], year,
hh, mm, ss)
return s
+
+
def generateExpect(item):
if item[1][0] is None:
out = '%s' % (item[0],)
else:
out = '%s=%s' % (item[0], item[1][0])
if len(item[1]) > 1:
- out += ';'+generateKeyValues(item[1][1:])
+ out += ';' + generateKeyValues(item[1][1:])
return out
-def generateRange(range):
+
+
+def generateRange(crange):
def noneOr(s):
if s is None:
return ''
return s
- type,ranges=range
+ rtype, ranges = crange
- if type != 'bytes':
- raise ValueError("Unknown range unit: "+type+".")
+ if rtype != 'bytes':
+ raise ValueError("Unknown range unit: " + rtype + ".")
- return (type+'='+
+ return (rtype + '=' +
','.join(['%s-%s' % (noneOr(startend[0]), noneOr(startend[1]))
for startend in ranges]))
+
+
def generateRetryAfter(when):
# always generate delta seconds format
return str(int(when - time.time()))
+
+
def generateContentType(mimeType):
- out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+ out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype)
if mimeType.params:
- out+=';'+generateKeyValues(mimeType.params.iteritems())
+ out += ';' + generateKeyValues(mimeType.params.iteritems())
return out
+
+
+def generateContentDisposition(disposition):
+ out = disposition.dispositionType
+ if disposition.params:
+ out += ';' + generateKeyValues(disposition.params.iteritems())
+ return out
+
+
+
def generateIfRange(dateOrETag):
if isinstance(dateOrETag, ETag):
return dateOrETag.generate()
@@ -840,6 +1004,8 @@
# WWW-Authenticate and Authorization
+
+
def generateWWWAuthenticate(headers):
_generated = []
for seq in headers:
@@ -850,7 +1016,7 @@
try:
l = []
- for k,v in dict(challenge).iteritems():
+ for k, v in dict(challenge).iteritems():
l.append("%s=%s" % (k, quoteString(v)))
_generated.append("%s %s" % (scheme, ", ".join(l)))
@@ -859,16 +1025,20 @@
return _generated
+
+
def generateAuthorization(seq):
return [' '.join(seq)]
+
####
class ETag(object):
def __init__(self, tag, weak=False):
self.tag = str(tag)
self.weak = weak
+
def match(self, other, strongCompare):
# Sec 13.3.
# The strong comparison function: in order to be considered equal, both
@@ -885,17 +1055,21 @@
return False
return True
+
def __eq__(self, other):
return isinstance(other, ETag) and other.tag == self.tag and other.weak == self.weak
+
def __ne__(self, other):
return not self.__eq__(other)
+
def __repr__(self):
return "Etag(%r, weak=%r)" % (self.tag, self.weak)
+
def parse(tokens):
- tokens=tuple(tokens)
+ tokens = tuple(tokens)
if len(tokens) == 1 and not isinstance(tokens[0], Token):
return ETag(tokens[0])
@@ -905,57 +1079,73 @@
raise ValueError("Invalid ETag.")
- parse=staticmethod(parse)
+ parse = staticmethod(parse)
def generate(self):
if self.weak:
- return 'W/'+quoteString(self.tag)
+ return 'W/' + quoteString(self.tag)
else:
return quoteString(self.tag)
+
+
def parseStarOrETag(tokens):
- tokens=tuple(tokens)
+ tokens = tuple(tokens)
if tokens == ('*',):
return '*'
else:
return ETag.parse(tokens)
+
+
def generateStarOrETag(etag):
- if etag=='*':
+ if etag == '*':
return etag
else:
return etag.generate()
+
+
#### Cookies. Blech!
class Cookie(object):
# __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version']
def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0):
- self.name=name
- self.value=value
- self.path=path
- self.domain=domain
- self.ports=ports
- self.expires=expires
- self.discard=discard
- self.secure=secure
- self.comment=comment
- self.commenturl=commenturl
- self.version=version
+ self.name = name
+ self.value = value
+ self.path = path
+ self.domain = domain
+ self.ports = ports
+ self.expires = expires
+ self.discard = discard
+ self.secure = secure
+ self.comment = comment
+ self.commenturl = commenturl
+ self.version = version
+
def __repr__(self):
- s="Cookie(%r=%r" % (self.name, self.value)
- if self.path is not None: s+=", path=%r" % (self.path,)
- if self.domain is not None: s+=", domain=%r" % (self.domain,)
- if self.ports is not None: s+=", ports=%r" % (self.ports,)
- if self.expires is not None: s+=", expires=%r" % (self.expires,)
- if self.secure is not False: s+=", secure=%r" % (self.secure,)
- if self.comment is not None: s+=", comment=%r" % (self.comment,)
- if self.commenturl is not None: s+=", commenturl=%r" % (self.commenturl,)
- if self.version != 0: s+=", version=%r" % (self.version,)
- s+=")"
+ s = "Cookie(%r=%r" % (self.name, self.value)
+ if self.path is not None:
+ s += ", path=%r" % (self.path,)
+ if self.domain is not None:
+ s += ", domain=%r" % (self.domain,)
+ if self.ports is not None:
+ s += ", ports=%r" % (self.ports,)
+ if self.expires is not None:
+ s += ", expires=%r" % (self.expires,)
+ if self.secure is not False:
+ s += ", secure=%r" % (self.secure,)
+ if self.comment is not None:
+ s += ", comment=%r" % (self.comment,)
+ if self.commenturl is not None:
+ s += ", commenturl=%r" % (self.commenturl,)
+ if self.version != 0:
+ s += ", version=%r" % (self.version,)
+ s += ")"
return s
+
def __eq__(self, other):
return (isinstance(other, Cookie) and
other.path == self.path and
@@ -967,10 +1157,12 @@
other.commenturl == self.commenturl and
other.version == self.version)
+
def __ne__(self, other):
return not self.__eq__(other)
+
def parseCookie(headers):
"""Bleargh, the cookie spec sucks.
This surely needs interoperability testing.
@@ -987,7 +1179,7 @@
header = ';'.join(headers)
if header[0:8].lower() == "$version":
# RFC2965 cookie
- h=tokenize([header], foldCase=False)
+ h = tokenize([header], foldCase=False)
r_cookies = split(h, Token(','))
for r_cookie in r_cookies:
last_cookie = None
@@ -1000,20 +1192,20 @@
(name,), = nameval
value = None
- name=name.lower()
+ name = name.lower()
if name == '$version':
continue
if name[0] == '$':
if last_cookie is not None:
if name == '$path':
- last_cookie.path=value
+ last_cookie.path = value
elif name == '$domain':
- last_cookie.domain=value
+ last_cookie.domain = value
elif name == '$port':
if value is None:
last_cookie.ports = ()
else:
- last_cookie.ports=tuple([int(s) for s in value.split(',')])
+ last_cookie.ports = tuple([int(s) for s in value.split(',')])
else:
last_cookie = Cookie(name, value, version=1)
cookies.append(last_cookie)
@@ -1024,19 +1216,21 @@
# however.
r_cookies = header.split(';')
for r_cookie in r_cookies:
- name,value = r_cookie.split('=', 1)
- name=name.strip(' \t')
- value=value.strip(' \t')
+ name, value = r_cookie.split('=', 1)
+ name = name.strip(' \t')
+ value = value.strip(' \t')
cookies.append(Cookie(name, value))
return cookies
-cookie_validname = "[^"+re.escape(http_tokens+http_ctls)+"]*$"
+cookie_validname = "[^" + re.escape(http_tokens + http_ctls) + "]*$"
cookie_validname_re = re.compile(cookie_validname)
-cookie_validvalue = cookie_validname+'|"([^"]|\\\\")*"$'
+cookie_validvalue = cookie_validname + '|"([^"]|\\\\")*"$'
cookie_validvalue_re = re.compile(cookie_validvalue)
+
+
def generateCookie(cookies):
# There's a fundamental problem with the two cookie specifications.
# They both use the "Cookie" header, and the RFC Cookie header only allows
@@ -1094,7 +1288,7 @@
if cookie_validname_re.match(cookie.name) is None:
continue
- value=cookie.value
+ value = cookie.value
if cookie_validvalue_re.match(cookie.value) is None:
value = quoteString(value)
@@ -1114,6 +1308,8 @@
str_cookies.append("$Port=%s" % quoteString(",".join([str(x) for x in cookie.ports])))
return ';'.join(str_cookies)
+
+
def parseSetCookie(headers):
setCookies = []
for header in headers:
@@ -1122,15 +1318,15 @@
l = []
for part in parts:
- namevalue = part.split('=',1)
+ namevalue = part.split('=', 1)
if len(namevalue) == 1:
- name=namevalue[0]
- value=None
+ name = namevalue[0]
+ value = None
else:
- name,value=namevalue
- value=value.strip(' \t')
+ name, value = namevalue
+ value = value.strip(' \t')
- name=name.strip(' \t')
+ name = name.strip(' \t')
l.append((name, value))
@@ -1141,6 +1337,8 @@
pass
return setCookies
+
+
def parseSetCookie2(toks):
outCookies = []
for cookie in [[parseKeyValue(x) for x in split(y, Token(';'))]
@@ -1152,6 +1350,8 @@
pass
return outCookies
+
+
def makeCookieFromList(tup, netscapeFormat):
name, value = tup[0]
if name is None or value is None:
@@ -1161,7 +1361,7 @@
cookie = Cookie(name, value)
hadMaxAge = False
- for name,value in tup[1:]:
+ for name, value in tup[1:]:
name = name.lower()
if value is None:
@@ -1195,6 +1395,7 @@
return cookie
+
def generateSetCookie(cookies):
setCookies = []
for cookie in cookies:
@@ -1211,6 +1412,8 @@
setCookies.append('; '.join(out))
return setCookies
+
+
def generateSetCookie2(cookies):
setCookies = []
for cookie in cookies:
@@ -1238,11 +1441,15 @@
setCookies.append('; '.join(out))
return setCookies
+
+
def parseDepth(depth):
if depth not in ("0", "1", "infinity"):
raise ValueError("Invalid depth header value: %s" % (depth,))
return depth
+
+
def parseOverWrite(overwrite):
if overwrite == "F":
return False
@@ -1250,12 +1457,16 @@
return True
raise ValueError("Invalid overwrite header value: %s" % (overwrite,))
+
+
def generateOverWrite(overwrite):
if overwrite:
return "T"
else:
return "F"
+
+
def parseBrief(brief):
# We accept upper or lower case
if brief.upper() == "F":
@@ -1264,6 +1475,8 @@
return True
raise ValueError("Invalid brief header value: %s" % (brief,))
+
+
def generateBrief(brief):
# MS definition uses lower case
return "t" if brief else "f"
@@ -1308,6 +1521,8 @@
# return accepts.get('*')
+
+
# Headers object
class __RecalcNeeded(object):
def __repr__(self):
@@ -1315,6 +1530,7 @@
_RecalcNeeded = __RecalcNeeded()
+
class Headers(object):
"""
This class stores the HTTP headers as both a parsed representation
@@ -1333,10 +1549,12 @@
for key, value in rawHeaders.iteritems():
self.setRawHeaders(key, value)
+
def _setRawHeaders(self, headers):
self._raw_headers = headers
self._headers = {}
+
def _toParsed(self, name):
r = self._raw_headers.get(name, None)
h = self.handler.parse(name, r)
@@ -1344,6 +1562,7 @@
self._headers[name] = h
return h
+
def _toRaw(self, name):
h = self._headers.get(name, None)
r = self.handler.generate(name, h)
@@ -1351,21 +1570,24 @@
self._raw_headers[name] = r
return r
+
def hasHeader(self, name):
"""Does a header with the given name exist?"""
- name=name.lower()
- return self._raw_headers.has_key(name)
+ name = name.lower()
+ return name in self._raw_headers
+
def getRawHeaders(self, name, default=None):
"""Returns a list of headers matching the given name as the raw string given."""
- name=name.lower()
+ name = name.lower()
raw_header = self._raw_headers.get(name, default)
if raw_header is not _RecalcNeeded:
return raw_header
return self._toRaw(name)
+
def getHeader(self, name, default=None):
"""Ret9urns the parsed representation of the given header.
The exact form of the return value depends on the header in question.
@@ -1374,37 +1596,40 @@
If the header doesn't exist, return default (or None if not specified)
"""
- name=name.lower()
+ name = name.lower()
parsed = self._headers.get(name, default)
if parsed is not _RecalcNeeded:
return parsed
return self._toParsed(name)
+
def setRawHeaders(self, name, value):
"""Sets the raw representation of the given header.
Value should be a list of strings, each being one header of the
given name.
"""
- name=name.lower()
+ name = name.lower()
self._raw_headers[name] = value
self._headers[name] = _RecalcNeeded
+
def setHeader(self, name, value):
"""Sets the parsed representation of the given header.
Value should be a list of objects whose exact form depends
on the header in question.
"""
- name=name.lower()
+ name = name.lower()
self._raw_headers[name] = _RecalcNeeded
self._headers[name] = value
+
def addRawHeader(self, name, value):
"""
Add a raw value to a header that may or may not already exist.
If it exists, add it as a separate header to output; do not
replace anything.
"""
- name=name.lower()
+ name = name.lower()
raw_header = self._raw_headers.get(name)
if raw_header is None:
# No header yet
@@ -1416,36 +1641,42 @@
raw_header.append(value)
self._headers[name] = _RecalcNeeded
+
def removeHeader(self, name):
"""Removes the header named."""
- name=name.lower()
- if self._raw_headers.has_key(name):
+ name = name.lower()
+ if name in self._raw_headers:
del self._raw_headers[name]
del self._headers[name]
+
def __repr__(self):
- return '<Headers: Raw: %s Parsed: %s>'% (self._raw_headers, self._headers)
+ return '<Headers: Raw: %s Parsed: %s>' % (self._raw_headers, self._headers)
+
def canonicalNameCaps(self, name):
"""Return the name with the canonical capitalization, if known,
otherwise, Caps-After-Dashes"""
return header_case_mapping.get(name) or dashCapitalize(name)
+
def getAllRawHeaders(self):
"""Return an iterator of key,value pairs of all headers
contained in this object, as strings. The keys are capitalized
in canonical capitalization."""
- for k,v in self._raw_headers.iteritems():
+ for k, v in self._raw_headers.iteritems():
if v is _RecalcNeeded:
v = self._toRaw(k)
yield self.canonicalNameCaps(k), v
+
def makeImmutable(self):
"""Make this header set immutable. All mutating operations will
raise an exception."""
self.setHeader = self.setRawHeaders = self.removeHeader = self._mutateRaise
+
def _mutateRaise(self, *args):
raise AttributeError("This header object is immutable as the headers have already been sent.")
@@ -1462,24 +1693,24 @@
parser_general_headers = {
- 'Cache-Control':(tokenize, listParser(parseCacheControl), dict),
- 'Connection':(tokenize,filterTokens),
- 'Date':(last,parseDateTime),
-# 'Pragma':tokenize
-# 'Trailer':tokenize
- 'Transfer-Encoding':(tokenize,filterTokens),
-# 'Upgrade':tokenize
-# 'Via':tokenize,stripComment
-# 'Warning':tokenize
+ 'Cache-Control': (tokenize, listParser(parseCacheControl), dict),
+ 'Connection': (tokenize, filterTokens),
+ 'Date': (last, parseDateTime),
+# 'Pragma': tokenize
+# 'Trailer': tokenize
+ 'Transfer-Encoding': (tokenize, filterTokens),
+# 'Upgrade': tokenize
+# 'Via': tokenize,stripComment
+# 'Warning': tokenize
}
generator_general_headers = {
- 'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader),
- 'Connection':(generateList,singleHeader),
- 'Date':(generateDateTime,singleHeader),
+ 'Cache-Control': (iteritems, listGenerator(generateCacheControl), singleHeader),
+ 'Connection': (generateList, singleHeader),
+ 'Date': (generateDateTime, singleHeader),
# 'Pragma':
# 'Trailer':
- 'Transfer-Encoding':(generateList,singleHeader),
+ 'Transfer-Encoding': (generateList, singleHeader),
# 'Upgrade':
# 'Via':
# 'Warning':
@@ -1488,104 +1719,106 @@
parser_request_headers = {
'Accept': (tokenize, listParser(parseAccept), dict),
'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset),
- 'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding),
- 'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict),
+ 'Accept-Encoding': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding),
+ 'Accept-Language': (tokenize, listParser(parseAcceptQvalue), dict),
'Authorization': (last, parseAuthorization),
- 'Cookie':(parseCookie,),
- 'Expect':(tokenize, listParser(parseExpect), dict),
- 'From':(last,),
- 'Host':(last,),
- 'If-Match':(tokenize, listParser(parseStarOrETag), list),
- 'If-Modified-Since':(last, parseIfModifiedSince),
- 'If-None-Match':(tokenize, listParser(parseStarOrETag), list),
- 'If-Range':(parseIfRange,),
- 'If-Unmodified-Since':(last,parseDateTime),
- 'Max-Forwards':(last,int),
- 'Prefer':(tokenize, listParser(parseExpect), dict), # Prefer like Expect
-# 'Proxy-Authorization':str, # what is "credentials"
- 'Range':(tokenize, parseRange),
- 'Referer':(last,str), # TODO: URI object?
- 'TE':(tokenize, listParser(parseAcceptQvalue), dict),
- 'User-Agent':(last,str),
+ 'Cookie': (parseCookie,),
+ 'Expect': (tokenize, listParser(parseExpect), dict),
+ 'From': (last,),
+ 'Host': (last,),
+ 'If-Match': (tokenize, listParser(parseStarOrETag), list),
+ 'If-Modified-Since': (last, parseIfModifiedSince),
+ 'If-None-Match': (tokenize, listParser(parseStarOrETag), list),
+ 'If-Range': (parseIfRange,),
+ 'If-Unmodified-Since': (last, parseDateTime),
+ 'Max-Forwards': (last, int),
+ 'Prefer': (tokenize, listParser(parseExpect), dict), # Prefer like Expect
+# 'Proxy-Authorization': str, # what is "credentials"
+ 'Range': (tokenize, parseRange),
+ 'Referer': (last, str), # TODO: URI object?
+ 'TE': (tokenize, listParser(parseAcceptQvalue), dict),
+ 'User-Agent': (last, str),
}
generator_request_headers = {
- 'Accept': (iteritems,listGenerator(generateAccept),singleHeader),
- 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
- 'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader),
- 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+ 'Accept': (iteritems, listGenerator(generateAccept), singleHeader),
+ 'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue), singleHeader),
+ 'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue), singleHeader),
+ 'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue), singleHeader),
'Authorization': (generateAuthorization,), # what is "credentials"
- 'Cookie':(generateCookie,singleHeader),
- 'Expect':(iteritems, listGenerator(generateExpect), singleHeader),
- 'From':(str,singleHeader),
- 'Host':(str,singleHeader),
- 'If-Match':(listGenerator(generateStarOrETag), singleHeader),
- 'If-Modified-Since':(generateDateTime,singleHeader),
- 'If-None-Match':(listGenerator(generateStarOrETag), singleHeader),
- 'If-Range':(generateIfRange, singleHeader),
- 'If-Unmodified-Since':(generateDateTime,singleHeader),
- 'Max-Forwards':(str, singleHeader),
- 'Prefer':(iteritems, listGenerator(generateExpect), singleHeader), # Prefer like Expect
-# 'Proxy-Authorization':str, # what is "credentials"
- 'Range':(generateRange,singleHeader),
- 'Referer':(str,singleHeader),
- 'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
- 'User-Agent':(str,singleHeader),
+ 'Cookie': (generateCookie, singleHeader),
+ 'Expect': (iteritems, listGenerator(generateExpect), singleHeader),
+ 'From': (str, singleHeader),
+ 'Host': (str, singleHeader),
+ 'If-Match': (listGenerator(generateStarOrETag), singleHeader),
+ 'If-Modified-Since': (generateDateTime, singleHeader),
+ 'If-None-Match': (listGenerator(generateStarOrETag), singleHeader),
+ 'If-Range': (generateIfRange, singleHeader),
+ 'If-Unmodified-Since': (generateDateTime, singleHeader),
+ 'Max-Forwards': (str, singleHeader),
+ 'Prefer': (iteritems, listGenerator(generateExpect), singleHeader), # Prefer like Expect
+# 'Proxy-Authorization': str, # what is "credentials"
+ 'Range': (generateRange, singleHeader),
+ 'Referer': (str, singleHeader),
+ 'TE': (iteritems, listGenerator(generateAcceptQvalue), singleHeader),
+ 'User-Agent': (str, singleHeader),
}
parser_response_headers = {
- 'Accept-Ranges':(tokenize, filterTokens),
- 'Age':(last,int),
- 'ETag':(tokenize, ETag.parse),
- 'Location':(last,), # TODO: URI object?
+ 'Accept-Ranges': (tokenize, filterTokens),
+ 'Age': (last, int),
+ 'ETag': (tokenize, ETag.parse),
+ 'Location': (last,), # TODO: URI object?
# 'Proxy-Authenticate'
- 'Retry-After':(last, parseRetryAfter),
- 'Server':(last,),
- 'Set-Cookie':(parseSetCookie,),
- 'Set-Cookie2':(tokenize, parseSetCookie2),
- 'Vary':(tokenize, filterTokens),
+ 'Retry-After': (last, parseRetryAfter),
+ 'Server': (last,),
+ 'Set-Cookie': (parseSetCookie,),
+ 'Set-Cookie2': (tokenize, parseSetCookie2),
+ 'Vary': (tokenize, filterTokens),
'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False),
parseWWWAuthenticate,)
}
generator_response_headers = {
- 'Accept-Ranges':(generateList, singleHeader),
- 'Age':(str, singleHeader),
- 'ETag':(ETag.generate, singleHeader),
- 'Location':(str, singleHeader),
+ 'Accept-Ranges': (generateList, singleHeader),
+ 'Age': (str, singleHeader),
+ 'ETag': (ETag.generate, singleHeader),
+ 'Location': (str, singleHeader),
# 'Proxy-Authenticate'
- 'Retry-After':(generateRetryAfter, singleHeader),
- 'Server':(str, singleHeader),
- 'Set-Cookie':(generateSetCookie,),
- 'Set-Cookie2':(generateSetCookie2,),
- 'Vary':(generateList, singleHeader),
- 'WWW-Authenticate':(generateWWWAuthenticate,)
+ 'Retry-After': (generateRetryAfter, singleHeader),
+ 'Server': (str, singleHeader),
+ 'Set-Cookie': (generateSetCookie,),
+ 'Set-Cookie2': (generateSetCookie2,),
+ 'Vary': (generateList, singleHeader),
+ 'WWW-Authenticate': (generateWWWAuthenticate,)
}
parser_entity_headers = {
- 'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens),
- 'Content-Encoding':(tokenize, filterTokens),
- 'Content-Language':(tokenize, filterTokens),
- 'Content-Length':(last, int),
- 'Content-Location':(last,), # TODO: URI object?
- 'Content-MD5':(last, parseContentMD5),
- 'Content-Range':(last, parseContentRange),
- 'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType),
- 'Expires':(last, parseExpires),
- 'Last-Modified':(last, parseDateTime),
+ 'Allow': (lambda hdr: tokenize(hdr, foldCase=False), filterTokens),
+ 'Content-Disposition': (lambda hdr: tokenize(hdr, foldCase=False), parseContentDisposition),
+ 'Content-Encoding': (tokenize, filterTokens),
+ 'Content-Language': (tokenize, filterTokens),
+ 'Content-Length': (last, int),
+ 'Content-Location': (last,), # TODO: URI object?
+ 'Content-MD5': (last, parseContentMD5),
+ 'Content-Range': (last, parseContentRange),
+ 'Content-Type': (lambda hdr: tokenize(hdr, foldCase=False), parseContentType),
+ 'Expires': (last, parseExpires),
+ 'Last-Modified': (last, parseDateTime),
}
generator_entity_headers = {
- 'Allow':(generateList, singleHeader),
- 'Content-Encoding':(generateList, singleHeader),
- 'Content-Language':(generateList, singleHeader),
- 'Content-Length':(str, singleHeader),
- 'Content-Location':(str, singleHeader),
- 'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader),
- 'Content-Range':(generateContentRange, singleHeader),
- 'Content-Type':(generateContentType, singleHeader),
- 'Expires':(generateDateTime, singleHeader),
- 'Last-Modified':(generateDateTime, singleHeader),
+ 'Allow': (generateList, singleHeader),
+ 'Content-Disposition': (generateContentDisposition, singleHeader),
+ 'Content-Encoding': (generateList, singleHeader),
+ 'Content-Language': (generateList, singleHeader),
+ 'Content-Length': (str, singleHeader),
+ 'Content-Location': (str, singleHeader),
+ 'Content-MD5': (base64.encodestring, lambda x: x.strip("\n"), singleHeader),
+ 'Content-Range': (generateContentRange, singleHeader),
+ 'Content-Type': (generateContentType, singleHeader),
+ 'Expires': (generateDateTime, singleHeader),
+ 'Last-Modified': (generateDateTime, singleHeader),
}
parser_dav_headers = {
@@ -1593,11 +1826,11 @@
'DAV' : (tokenize, list),
'Depth' : (last, parseDepth),
'Destination' : (last,), # TODO: URI object?
- #'If' : (),
- #'Lock-Token' : (),
+ # 'If' : (),
+ # 'Lock-Token' : (),
'Overwrite' : (last, parseOverWrite),
- #'Status-URI' : (),
- #'Timeout' : (),
+ # 'Status-URI' : (),
+ # 'Timeout' : (),
}
generator_dav_headers = {
@@ -1605,11 +1838,11 @@
'DAV' : (generateList, singleHeader),
'Depth' : (singleHeader),
'Destination' : (singleHeader),
- #'If' : (),
- #'Lock-Token' : (),
+ # 'If' : (),
+ # 'Lock-Token' : (),
'Overwrite' : (),
- #'Status-URI' : (),
- #'Timeout' : (),
+ # 'Status-URI' : (),
+ # 'Timeout' : (),
}
DefaultHTTPHandler.updateParsers(parser_general_headers)
Modified: CalendarServer/trunk/twext/web2/test/test_http_headers.py
===================================================================
--- CalendarServer/trunk/twext/web2/test/test_http_headers.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twext/web2/test/test_http_headers.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -6,7 +6,8 @@
"""
from twisted.trial import unittest
-import random, time
+import random
+import time
from twext.web2 import http_headers
from twext.web2.http_headers import Cookie, HeaderHandler, quoteString, generateKeyValues
@@ -17,9 +18,12 @@
def __init__(self, raw):
self.raw = raw
+
def __eq__(self, other):
return isinstance(other, parsedvalue) and other.raw == self.raw
+
+
class HeadersAPITest(unittest.TestCase):
"""Make sure the public API exists and works."""
def testRaw(self):
@@ -34,6 +38,7 @@
h.removeHeader("test")
self.assertEquals(h.getRawHeaders("test"), None)
+
def testParsed(self):
parsed = parsedvalue(("value1", "value2"))
h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
@@ -46,17 +51,19 @@
h.removeHeader("test")
self.assertEquals(h.getHeader("test"), None)
+
def testParsedAndRaw(self):
def parse(raw):
return parsedvalue(raw)
+
def generate(parsed):
return parsed.raw
rawvalue = ("value1", "value2")
rawvalue2 = ("value3", "value4")
- handler = HeaderHandler(parsers={'test':(parse,)},
- generators={'test':(generate,)})
+ handler = HeaderHandler(parsers={'test': (parse,)},
+ generators={'test': (generate,)})
h = http_headers.Headers(handler=handler)
h.setRawHeaders("test", rawvalue)
@@ -74,6 +81,7 @@
handler=handler)
self.assertEquals(h.getRawHeaders("test"), rawvalue2)
+
def testImmutable(self):
h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
@@ -82,11 +90,13 @@
self.assertRaises(AttributeError, h.setHeader, "test", 1)
self.assertRaises(AttributeError, h.removeHeader, "test")
+
+
class TokenizerTest(unittest.TestCase):
"""Test header list parsing functions."""
def testParse(self):
- parser = lambda val: list(http_headers.tokenize([val,]))
+ parser = lambda val: list(http_headers.tokenize([val, ]))
Token = http_headers.Token
tests = (('foo,bar', ['foo', Token(','), 'bar']),
('FOO,BAR', ['foo', Token(','), 'bar']),
@@ -97,17 +107,21 @@
raiseTests = ('"open quote', '"ending \\', "control character: \x127", "\x00", "\x1f")
- for test,result in tests:
+ for test, result in tests:
self.assertEquals(parser(test), result)
for test in raiseTests:
self.assertRaises(ValueError, parser, test)
+
def testGenerate(self):
pass
+
def testRoundtrip(self):
pass
+
+
def atSpecifiedTime(when, func):
def inner(*a, **kw):
orig = time.time
@@ -118,12 +132,16 @@
time.time = orig
return util.mergeFunctionMetadata(func, inner)
+
+
def parseHeader(name, val):
head = http_headers.Headers(handler=http_headers.DefaultHTTPHandler)
- head.setRawHeaders(name,val)
+ head.setRawHeaders(name, val)
return head.getHeader(name)
parseHeader = atSpecifiedTime(999999990, parseHeader) # Sun, 09 Sep 2001 01:46:30 GMT
+
+
def generateHeader(name, val):
head = http_headers.Headers(handler=http_headers.DefaultHTTPHandler)
head.setHeader(name, val)
@@ -131,6 +149,7 @@
generateHeader = atSpecifiedTime(999999990, generateHeader) # Sun, 09 Sep 2001 01:46:30 GMT
+
class HeaderParsingTestBase(unittest.TestCase):
def runRoundtripTest(self, headername, table):
"""
@@ -163,11 +182,10 @@
elif len(row) == 3:
rawHeaderInput, parsedHeaderData, requiredGeneratedElements = row
-
assert isinstance(requiredGeneratedElements, list)
# parser
- parsed = parseHeader(headername, [rawHeaderInput,])
+ parsed = parseHeader(headername, [rawHeaderInput, ])
self.assertEquals(parsed, parsedHeaderData)
regeneratedHeaderValue = generateHeader(headername, parsed)
@@ -193,33 +211,35 @@
parsed = parseHeader(headername, val)
self.assertEquals(parsed, None)
+
+
class GeneralHeaderParsingTests(HeaderParsingTestBase):
def testCacheControl(self):
table = (
("no-cache",
- {'no-cache':None}),
+ {'no-cache': None}),
("no-cache, no-store, max-age=5, max-stale=3, min-fresh=5, no-transform, only-if-cached, blahblah-extension-thingy",
{'no-cache': None,
'no-store': None,
- 'max-age':5,
- 'max-stale':3,
- 'min-fresh':5,
- 'no-transform':None,
- 'only-if-cached':None,
- 'blahblah-extension-thingy':None}),
+ 'max-age': 5,
+ 'max-stale': 3,
+ 'min-fresh': 5,
+ 'no-transform': None,
+ 'only-if-cached': None,
+ 'blahblah-extension-thingy': None}),
("max-stale",
- {'max-stale':None}),
+ {'max-stale': None}),
("public, private, no-cache, no-store, no-transform, must-revalidate, proxy-revalidate, max-age=5, s-maxage=10, blahblah-extension-thingy",
- {'public':None,
- 'private':None,
- 'no-cache':None,
- 'no-store':None,
- 'no-transform':None,
- 'must-revalidate':None,
- 'proxy-revalidate':None,
- 'max-age':5,
- 's-maxage':10,
- 'blahblah-extension-thingy':None}),
+ {'public': None,
+ 'private': None,
+ 'no-cache': None,
+ 'no-store': None,
+ 'no-transform': None,
+ 'must-revalidate': None,
+ 'proxy-revalidate': None,
+ 'max-age': 5,
+ 's-maxage': 10,
+ 'blahblah-extension-thingy': None}),
('private="Set-Cookie, Set-Cookie2", no-cache="PROXY-AUTHENTICATE"',
{'private': ['set-cookie', 'set-cookie2'],
'no-cache': ['proxy-authenticate']},
@@ -227,13 +247,15 @@
)
self.runRoundtripTest("Cache-Control", table)
+
def testConnection(self):
table = (
- ("close", ['close',]),
+ ("close", ['close', ]),
("close, foo-bar", ['close', 'foo-bar'])
)
self.runRoundtripTest("Connection", table)
+
def testDate(self):
# Don't need major tests since the datetime parser has its own tests
self.runRoundtripTest("Date", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
@@ -244,6 +266,7 @@
# def testTrailer(self):
# fail
+
def testTransferEncoding(self):
table = (
('chunked', ['chunked']),
@@ -260,8 +283,10 @@
# def testWarning(self):
# fail
+
+
class RequestHeaderParsingTests(HeaderParsingTestBase):
- #FIXME test ordering too.
+ # FIXME test ordering too.
def testAccept(self):
table = (
("audio/*;q=0.2, audio/basic",
@@ -307,6 +332,7 @@
)
self.runRoundtripTest("Accept-Charset", table)
+
def testAcceptEncoding(self):
table = (
("compress, gzip",
@@ -319,11 +345,12 @@
{'compress': 0.5, 'gzip': 1.0, 'identity': 0.0001},
["compress;q=0.5", "gzip"]),
("gzip;q=1.0, identity;q=0.5, *;q=0",
- {'gzip': 1.0, 'identity': 0.5, '*':0},
+ {'gzip': 1.0, 'identity': 0.5, '*': 0},
["gzip", "identity;q=0.5", "*;q=0"]),
)
self.runRoundtripTest("Accept-Encoding", table)
+
def testAcceptLanguage(self):
table = (
("da, en-gb;q=0.8, en;q=0.7",
@@ -333,6 +360,7 @@
)
self.runRoundtripTest("Accept-Language", table)
+
def testAuthorization(self):
table = (
("Basic dXNlcm5hbWU6cGFzc3dvcmQ=",
@@ -345,6 +373,7 @@
self.runRoundtripTest("Authorization", table)
+
def testCookie(self):
table = (
('name=value', [Cookie('name', 'value')]),
@@ -358,12 +387,12 @@
)
self.runRoundtripTest("Cookie", table)
- #newstyle RFC2965 Cookie
+ # newstyle RFC2965 Cookie
table2 = (
('$Version="1";'
'name="value";$Path="/foo";$Domain="www.local";$Port="80,8000";'
'name2="value"',
- [Cookie('name', 'value', path='/foo', domain='www.local', ports=(80,8000), version=1), Cookie('name2', 'value', version=1)]),
+ [Cookie('name', 'value', path='/foo', domain='www.local', ports=(80, 8000), version=1), Cookie('name2', 'value', version=1)]),
('$Version="1";'
'name="value";$Port',
[Cookie('name', 'value', ports=(), version=1)]),
@@ -386,10 +415,9 @@
'$Version="1";name="qq\\"qq";name2="value2"'),
)
for row in table3:
- self.assertEquals(generateHeader("Cookie", row[0]), [row[1],])
+ self.assertEquals(generateHeader("Cookie", row[0]), [row[1], ])
-
def testSetCookie(self):
table = (
('name,"blah=value,; expires=Sun, 09 Sep 2001 01:46:40 GMT; path=/foo; domain=bar.baz; secure',
@@ -400,30 +428,35 @@
)
self.runRoundtripTest("Set-Cookie", table)
+
def testSetCookie2(self):
table = (
('name="value"; Comment="YadaYada"; CommentURL="http://frobnotz/"; Discard; Domain="blah.blah"; Max-Age=10; Path="/foo"; Port="80,8080"; Secure; Version="1"',
- [Cookie("name", "value", comment="YadaYada", commenturl="http://frobnotz/", discard=True, domain="blah.blah", expires=1000000000, path="/foo", ports=(80,8080), secure=True, version=1)]),
+ [Cookie("name", "value", comment="YadaYada", commenturl="http://frobnotz/", discard=True, domain="blah.blah", expires=1000000000, path="/foo", ports=(80, 8080), secure=True, version=1)]),
)
self.runRoundtripTest("Set-Cookie2", table)
+
def testExpect(self):
table = (
("100-continue",
- {"100-continue":(None,)}),
+ {"100-continue": (None,)}),
('foobar=twiddle',
- {'foobar':('twiddle',)}),
+ {'foobar': ('twiddle',)}),
("foo=bar;a=b;c",
- {'foo':('bar',('a', 'b'), ('c', None))})
+ {'foo': ('bar', ('a', 'b'), ('c', None))})
)
self.runRoundtripTest("Expect", table)
+
def testFrom(self):
self.runRoundtripTest("From", (("webmaster at w3.org", "webmaster at w3.org"),))
+
def testHost(self):
self.runRoundtripTest("Host", (("www.w3.org", "www.w3.org"),))
+
def testIfMatch(self):
table = (
('"xyzzy"', [http_headers.ETag('xyzzy')]),
@@ -433,6 +466,8 @@
('*', ['*']),
)
self.runRoundtripTest("If-Match", table)
+
+
def testIfModifiedSince(self):
# Don't need major tests since the datetime parser has its own test
# Just test stupid ; length= brokenness.
@@ -443,6 +478,7 @@
self.runRoundtripTest("If-Modified-Since", table)
+
def testIfNoneMatch(self):
table = (
('"xyzzy"', [http_headers.ETag('xyzzy')]),
@@ -456,6 +492,7 @@
)
self.runRoundtripTest("If-None-Match", table)
+
def testIfRange(self):
table = (
('"xyzzy"', http_headers.ETag('xyzzy')),
@@ -465,9 +502,11 @@
)
self.runRoundtripTest("If-Range", table)
+
def testIfUnmodifiedSince(self):
self.runRoundtripTest("If-Unmodified-Since", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
+
def testMaxForwards(self):
self.runRoundtripTest("Max-Forwards", (("15", 15),))
@@ -475,13 +514,14 @@
# def testProxyAuthorize(self):
# fail
+
def testRange(self):
table = (
- ("bytes=0-499", ('bytes', [(0,499),])),
- ("bytes=500-999", ('bytes', [(500,999),])),
- ("bytes=-500",('bytes', [(None,500),])),
- ("bytes=9500-",('bytes', [(9500, None),])),
- ("bytes=0-0,-1", ('bytes', [(0,0),(None,1)])),
+ ("bytes=0-499", ('bytes', [(0, 499), ])),
+ ("bytes=500-999", ('bytes', [(500, 999), ])),
+ ("bytes=-500", ('bytes', [(None, 500), ])),
+ ("bytes=9500-", ('bytes', [(9500, None), ])),
+ ("bytes=0-0,-1", ('bytes', [(0, 0), (None, 1)])),
)
self.runRoundtripTest("Range", table)
@@ -493,23 +533,27 @@
def testTE(self):
table = (
- ("deflate", {'deflate':1}),
+ ("deflate", {'deflate': 1}),
("", {}),
- ("trailers, deflate;q=0.5", {'trailers':1, 'deflate':0.5}),
+ ("trailers, deflate;q=0.5", {'trailers': 1, 'deflate': 0.5}),
)
self.runRoundtripTest("TE", table)
+
def testUserAgent(self):
self.runRoundtripTest("User-Agent", (("CERN-LineMode/2.15 libwww/2.17b3", "CERN-LineMode/2.15 libwww/2.17b3"),))
+
class ResponseHeaderParsingTests(HeaderParsingTestBase):
def testAcceptRanges(self):
self.runRoundtripTest("Accept-Ranges", (("bytes", ["bytes"]), ("none", ["none"])))
+
def testAge(self):
self.runRoundtripTest("Age", (("15", 15),))
+
def testETag(self):
table = (
('"xyzzy"', http_headers.ETag('xyzzy')),
@@ -518,6 +562,7 @@
)
self.runRoundtripTest("ETag", table)
+
def testLocation(self):
self.runRoundtripTest("Location", (("http://www.w3.org/pub/WWW/People.htm",
"http://www.w3.org/pub/WWW/People.htm"),))
@@ -526,17 +571,20 @@
# def testProxyAuthenticate(self):
# fail
+
def testRetryAfter(self):
# time() is always 999999990 when being tested.
table = (
("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000, ["10"]),
- ("120", 999999990+120),
+ ("120", 999999990 + 120),
)
self.runRoundtripTest("Retry-After", table)
+
def testServer(self):
self.runRoundtripTest("Server", (("CERN/3.0 libwww/2.17", "CERN/3.0 libwww/2.17"),))
+
def testVary(self):
table = (
("*", ["*"]),
@@ -544,11 +592,12 @@
)
self.runRoundtripTest("Vary", table)
+
def testWWWAuthenticate(self):
digest = ('Digest realm="digest realm", nonce="bAr", qop="auth"',
- [('Digest', {'realm': 'digest realm', 'nonce': 'bAr',
+ [('Digest', {'realm': 'digest realm', 'nonce': 'bAr',
'qop': 'auth'})],
- ['Digest', 'realm="digest realm"',
+ ['Digest', 'realm="digest realm"',
'nonce="bAr"', 'qop="auth"'])
basic = ('Basic realm="foo"',
@@ -558,29 +607,29 @@
[('NTLM', {})], ['NTLM', ''])
negotiate = ('Negotiate SomeGssAPIData',
- [('Negotiate', 'SomeGssAPIData')],
+ [('Negotiate', 'SomeGssAPIData')],
['Negotiate', 'SomeGssAPIData'])
table = (digest,
basic,
- (digest[0]+', '+basic[0],
+ (digest[0] + ', ' + basic[0],
digest[1] + basic[1],
[digest[2], basic[2]]),
ntlm,
negotiate,
- (ntlm[0]+', '+basic[0],
+ (ntlm[0] + ', ' + basic[0],
ntlm[1] + basic[1],
[ntlm[2], basic[2]]),
- (digest[0]+', '+negotiate[0],
+ (digest[0] + ', ' + negotiate[0],
digest[1] + negotiate[1],
[digest[2], negotiate[2]]),
- (negotiate[0]+', '+negotiate[0],
+ (negotiate[0] + ', ' + negotiate[0],
negotiate[1] + negotiate[1],
[negotiate[2] + negotiate[2]]),
- (ntlm[0]+', '+ntlm[0],
+ (ntlm[0] + ', ' + ntlm[0],
ntlm[1] + ntlm[1],
[ntlm[2], ntlm[2]]),
- (basic[0]+', '+ntlm[0],
+ (basic[0] + ', ' + ntlm[0],
basic[1] + ntlm[1],
[basic[2], ntlm[2]]),
)
@@ -593,7 +642,7 @@
for row in table:
rawHeaderInput, parsedHeaderData, requiredGeneratedElements = row
- parsed = parseHeader(headername, [rawHeaderInput,])
+ parsed = parseHeader(headername, [rawHeaderInput, ])
self.assertEquals(parsed, parsedHeaderData)
regeneratedHeaderValue = generateHeader(headername, parsed)
@@ -617,6 +666,7 @@
self.assertEquals(parsed, reparsed)
+
class EntityHeaderParsingTests(HeaderParsingTestBase):
def testAllow(self):
# Allow is a silly case-sensitive header unlike all the rest
@@ -626,32 +676,38 @@
)
self.runRoundtripTest("Allow", table)
+
def testContentEncoding(self):
table = (
- ("gzip", ['gzip',]),
+ ("gzip", ['gzip', ]),
)
self.runRoundtripTest("Content-Encoding", table)
+
def testContentLanguage(self):
table = (
- ("da", ['da',]),
+ ("da", ['da', ]),
("mi, en", ['mi', 'en']),
)
self.runRoundtripTest("Content-Language", table)
+
def testContentLength(self):
self.runRoundtripTest("Content-Length", (("15", 15),))
self.invalidParseTest("Content-Length", ("asdf",))
+
def testContentLocation(self):
self.runRoundtripTest("Content-Location",
(("http://www.w3.org/pub/WWW/People.htm",
"http://www.w3.org/pub/WWW/People.htm"),))
+
def testContentMD5(self):
self.runRoundtripTest("Content-MD5", (("Q2hlY2sgSW50ZWdyaXR5IQ==", "Check Integrity!"),))
self.invalidParseTest("Content-MD5", ("sdlaksjdfhlkaj",))
+
def testContentRange(self):
table = (
("bytes 0-499/1234", ("bytes", 0, 499, 1234)),
@@ -664,13 +720,23 @@
)
self.runRoundtripTest("Content-Range", table)
+
def testContentType(self):
table = (
- ("text/html;charset=iso-8859-4", http_headers.MimeType('text', 'html', (('charset','iso-8859-4'),))),
+ ("text/html;charset=iso-8859-4", http_headers.MimeType('text', 'html', (('charset', 'iso-8859-4'),))),
("text/html", http_headers.MimeType('text', 'html')),
)
self.runRoundtripTest("Content-Type", table)
+
+ def testContentDisposition(self):
+ table = (
+ ("attachment;filename=foo.txt", http_headers.MimeDisposition('attachment', (('filename', 'foo.txt'),))),
+ ("inline", http_headers.MimeDisposition('inline')),
+ )
+ self.runRoundtripTest("Content-Disposition", table)
+
+
def testExpires(self):
self.runRoundtripTest("Expires", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
# Invalid expires MUST return date in the past.
@@ -682,6 +748,8 @@
# Don't need major tests since the datetime parser has its own test
self.runRoundtripTest("Last-Modified", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
+
+
class DateTimeTest(unittest.TestCase):
"""Test date parsing functions."""
@@ -716,15 +784,18 @@
def testGenerate(self):
self.assertEquals(http_headers.generateDateTime(784111777), 'Sun, 06 Nov 1994 08:49:37 GMT')
+
def testRoundtrip(self):
- for i in range(2000):
+ for _ignore in range(2000):
time = random.randint(0, 2000000000)
timestr = http_headers.generateDateTime(time)
time2 = http_headers.parseDateTime(timestr)
self.assertEquals(time, time2)
+
class TestMimeType(unittest.TestCase):
+
def testEquality(self):
"""Test that various uses of the constructer are equal
"""
@@ -748,6 +819,28 @@
+class TestMimeDisposition(unittest.TestCase):
+
+ def testEquality(self):
+ """Test that various uses of the constructer are equal
+ """
+
+ kwargMime = http_headers.MimeDisposition('attachment',
+ key='value')
+ dictMime = http_headers.MimeDisposition('attachment',
+ {'key': 'value'})
+ tupleMime = http_headers.MimeDisposition('attachment',
+ (('key', 'value'),))
+
+ stringMime = http_headers.MimeDisposition.fromString('attachment;key=value')
+
+ self.assertEquals(kwargMime, dictMime)
+ self.assertEquals(dictMime, tupleMime)
+ self.assertEquals(kwargMime, tupleMime)
+ self.assertEquals(kwargMime, stringMime)
+
+
+
class FormattingUtilityTests(unittest.TestCase):
"""
Tests for various string formatting functionality required to generate
Modified: CalendarServer/trunk/twistedcaldav/caldavxml.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/caldavxml.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/caldavxml.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -1,4 +1,4 @@
-##
+# #
# Copyright (c) 2005-2012 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-##
+# #
"""
CalDAV XML Support.
@@ -40,9 +40,9 @@
log = Logger()
-##
+#
# CalDAV objects
-##
+#
caldav_namespace = "urn:ietf:params:xml:ns:caldav"
@@ -73,6 +73,7 @@
namespace = caldav_namespace
+
class CalDAVEmptyElement (WebDAVEmptyElement):
"""
CalDAV element with no contents.
@@ -80,6 +81,7 @@
namespace = caldav_namespace
+
class CalDAVTextElement (WebDAVTextElement):
"""
CalDAV element containing PCDATA.
@@ -87,6 +89,7 @@
namespace = caldav_namespace
+
class CalDAVTimeRangeElement (CalDAVEmptyElement):
"""
CalDAV element containing a time range.
@@ -102,17 +105,18 @@
# One of start or end must be present
if "start" not in attributes and "end" not in attributes:
raise ValueError("One of 'start' or 'end' must be present in CALDAV:time-range")
-
+
self.start = PyCalendarDateTime.parseText(attributes["start"]) if "start" in attributes else None
self.end = PyCalendarDateTime.parseText(attributes["end"]) if "end" in attributes else None
+
def valid(self, level=0):
"""
Indicate whether the time-range is valid (must be date-time in UTC).
-
+
@return: True if valid, False otherwise
"""
-
+
if self.start is not None and self.start.isDateOnly():
log.msg("start attribute in <time-range> is not a date-time: %s" % (self.start,))
return False
@@ -130,6 +134,7 @@
return True
+
class CalDAVTimeZoneElement (CalDAVTextElement):
"""
CalDAV element containing iCalendar data with a single VTIMEZONE component.
@@ -141,6 +146,7 @@
"""
return iComponent.fromString(str(self))
+
def gettimezone(self):
"""
Get the timezone to use. If none, return UTC timezone.
@@ -156,13 +162,14 @@
# Default to using utc tzinfo
return PyCalendarTimezone(utc=True)
+
def valid(self):
"""
Determine whether the content of this element is a valid single VTIMEZONE component.
-
+
@return: True if valid, False if not.
"""
-
+
try:
calendar = self.calendar()
if calendar is None:
@@ -182,8 +189,9 @@
return False
return found
-
+
+
@registerElement
class CalendarHomeSet (CalDAVElement):
"""
@@ -193,9 +201,10 @@
name = "calendar-home-set"
hidden = True
- allowed_children = { (dav_namespace, "href"): (0, None) }
+ allowed_children = {(dav_namespace, "href"): (0, None)}
+
@registerElement
class CalendarDescription (CalDAVTextElement):
"""
@@ -208,6 +217,7 @@
# May be protected; but we'll let the client set this if they like.
+
@registerElement
class CalendarTimeZone (CalDAVTimeZoneElement):
"""
@@ -218,6 +228,7 @@
hidden = True
+
@registerElement
class SupportedCalendarComponentSets (CalDAVElement):
"""
@@ -229,9 +240,10 @@
hidden = True
protected = True
- allowed_children = { (caldav_namespace, "supported-calendar-component-set"): (0, None) }
+ allowed_children = {(caldav_namespace, "supported-calendar-component-set"): (0, None)}
+
@registerElement
class SupportedCalendarComponentSet (CalDAVElement):
"""
@@ -242,9 +254,10 @@
hidden = True
protected = True
- allowed_children = { (caldav_namespace, "comp"): (0, None) }
+ allowed_children = {(caldav_namespace, "comp"): (0, None)}
+
@registerElement
class SupportedCalendarData (CalDAVElement):
"""
@@ -255,9 +268,10 @@
hidden = True
protected = True
- allowed_children = { (caldav_namespace, "calendar-data"): (0, None) }
+ allowed_children = {(caldav_namespace, "calendar-data"): (0, None)}
+
@registerElement
class MaxResourceSize (CalDAVTextElement):
"""
@@ -269,6 +283,7 @@
protected = True
+
@registerElement
class MinDateTime (CalDAVTextElement):
"""
@@ -280,6 +295,7 @@
protected = True
+
@registerElement
class MaxDateTime (CalDAVTextElement):
"""
@@ -291,6 +307,7 @@
protected = True
+
@registerElement
class MaxInstances (CalDAVTextElement):
"""
@@ -302,6 +319,7 @@
protected = True
+
@registerElement
class MaxAttendeesPerInstance (CalDAVTextElement):
"""
@@ -313,6 +331,7 @@
protected = True
+
@registerElement
class Calendar (CalDAVEmptyElement):
"""
@@ -322,6 +341,7 @@
name = "calendar"
+
@registerElement
class MakeCalendar (CalDAVElement):
"""
@@ -330,11 +350,12 @@
"""
name = "mkcalendar"
- allowed_children = { (dav_namespace, "set"): (0, 1) }
+ allowed_children = {(dav_namespace, "set"): (0, 1)}
- child_types = { "WebDAVUnknownElement": (0, None) }
+ child_types = {"WebDAVUnknownElement": (0, None)}
+
@registerElement
class MakeCalendarResponse (CalDAVElement):
"""
@@ -343,9 +364,10 @@
"""
name = "mkcalendar-response"
- allowed_children = { WebDAVElement: (0, None) }
+ allowed_children = {WebDAVElement: (0, None)}
+
@registerElement
class CalendarQuery (CalDAVElement):
"""
@@ -355,13 +377,14 @@
name = "calendar-query"
allowed_children = {
- (dav_namespace, "allprop" ): (0, None),
- (dav_namespace, "propname"): (0, None),
- (dav_namespace, "prop" ): (0, None),
+ (dav_namespace, "allprop"): (0, None),
+ (dav_namespace, "propname"): (0, None),
+ (dav_namespace, "prop"): (0, None),
(caldav_namespace, "timezone"): (0, 1),
- (caldav_namespace, "filter" ): (0, 1), # Actually (1, 1) unless element is empty
+ (caldav_namespace, "filter"): (0, 1), # Actually (1, 1) unless element is empty
}
+
def __init__(self, *children, **attributes):
super(CalendarQuery, self).__init__(*children, **attributes)
@@ -373,9 +396,9 @@
qname = child.qname()
if qname in (
- (dav_namespace, "allprop" ),
+ (dav_namespace, "allprop"),
(dav_namespace, "propname"),
- (dav_namespace, "prop" ),
+ (dav_namespace, "prop"),
):
if props is not None:
raise ValueError("Only one of CalDAV:allprop, CalDAV:propname, CalDAV:prop allowed")
@@ -384,7 +407,7 @@
elif qname == (caldav_namespace, "filter"):
filter = child
- elif qname ==(caldav_namespace, "timezone"):
+ elif qname == (caldav_namespace, "timezone"):
timezone = child
else:
@@ -394,11 +417,12 @@
if filter is None:
raise ValueError("CALDAV:filter required")
- self.props = props
+ self.props = props
self.filter = filter
self.timezone = timezone
+
@registerElement
class CalendarData (CalDAVElement):
"""
@@ -409,10 +433,10 @@
name = "calendar-data"
allowed_children = {
- (caldav_namespace, "comp" ): (0, None),
- (caldav_namespace, "expand" ): (0, 1),
- (caldav_namespace, "limit-recurrence-set" ): (0, 1),
- (caldav_namespace, "limit-freebusy-set" ): (0, 1),
+ (caldav_namespace, "comp"): (0, None),
+ (caldav_namespace, "expand"): (0, 1),
+ (caldav_namespace, "limit-recurrence-set"): (0, 1),
+ (caldav_namespace, "limit-freebusy-set"): (0, 1),
PCDATAElement: (0, None),
}
allowed_attributes = {
@@ -434,13 +458,14 @@
fromTextData = fromCalendar
+
def __init__(self, *children, **attributes):
super(CalendarData, self).__init__(*children, **attributes)
- component = None
+ component = None
recurrence_set = None
- freebusy_set = None
- data = None
+ freebusy_set = None
+ data = None
for child in self.children:
qname = child.qname()
@@ -450,7 +475,7 @@
elif qname in (
(caldav_namespace, "expand"),
- (caldav_namespace, "limit-recurrence-set" ),
+ (caldav_namespace, "limit-recurrence-set"),
):
if recurrence_set is not None:
raise ValueError("Only one of CalDAV:expand, CalDAV:limit-recurrence-set allowed")
@@ -465,20 +490,22 @@
else:
data += child
- else: raise AssertionError("We shouldn't be here")
+ else:
+ raise AssertionError("We shouldn't be here")
- self.component = component
+ self.component = component
self.recurrence_set = recurrence_set
- self.freebusy_set = freebusy_set
+ self.freebusy_set = freebusy_set
if data is not None:
try:
if component is not None:
- raise ValueError("Only one of CalDAV:comp (%r) or PCDATA (%r) allowed"% (component, str(data)))
+ raise ValueError("Only one of CalDAV:comp (%r) or PCDATA (%r) allowed" % (component, str(data)))
if recurrence_set is not None:
- raise ValueError("%s not allowed with PCDATA (%r)"% (recurrence_set, str(data)))
+ raise ValueError("%s not allowed with PCDATA (%r)" % (recurrence_set, str(data)))
except ValueError:
- if not data.isWhitespace(): raise
+ if not data.isWhitespace():
+ raise
else:
# Since we've already combined PCDATA elements, we'd may as well
# optimize them originals away
@@ -494,19 +521,21 @@
else:
self.version = "2.0"
+
def verifyTypeVersion(self, types_and_versions):
"""
Make sure any content-type and version matches at least one of the supplied set.
-
+
@param types_and_versions: a list of (content-type, version) tuples to test against.
@return: True if there is at least one match, False otherwise.
"""
for item in types_and_versions:
if (item[0] == self.content_type) and (item[1] == self.version):
return True
-
+
return False
+
def calendar(self):
"""
Returns a calendar component derived from this element.
@@ -519,6 +548,7 @@
generateComponent = calendar
+
def calendarData(self):
"""
Returns the calendar data derived from this element.
@@ -535,6 +565,7 @@
textData = calendarData
+
@registerElement
class CalendarComponent (CalDAVElement):
"""
@@ -545,12 +576,13 @@
allowed_children = {
(caldav_namespace, "allcomp"): (0, 1),
- (caldav_namespace, "comp" ): (0, None),
+ (caldav_namespace, "comp"): (0, None),
(caldav_namespace, "allprop"): (0, 1),
- (caldav_namespace, "prop" ): (0, None),
+ (caldav_namespace, "prop"): (0, None),
}
- allowed_attributes = { "name": True }
+ allowed_attributes = {"name": True}
+
def __init__(self, *children, **attributes):
super(CalendarComponent, self).__init__(*children, **attributes)
@@ -597,6 +629,7 @@
self.properties = properties
self.type = self.attributes["name"]
+
def getFromICalendar(self, component):
"""
Returns a calendar component object containing the data in the given
@@ -639,6 +672,7 @@
return result
+
@registerElement
class AllComponents (CalDAVEmptyElement):
"""
@@ -648,6 +682,7 @@
name = "allcomp"
+
@registerElement
class AllProperties (CalDAVEmptyElement):
"""
@@ -657,6 +692,7 @@
name = "allprop"
+
@registerElement
class Property (CalDAVEmptyElement):
"""
@@ -670,6 +706,7 @@
"novalue": False,
}
+
def __init__(self, *children, **attributes):
super(Property, self).__init__(*children, **attributes)
@@ -687,6 +724,7 @@
self.novalue = False
+
@registerElement
class Expand (CalDAVTimeRangeElement):
"""
@@ -697,6 +735,7 @@
name = "expand"
+
@registerElement
class LimitRecurrenceSet (CalDAVTimeRangeElement):
"""
@@ -707,6 +746,7 @@
name = "limit-recurrence-set"
+
@registerElement
class LimitFreeBusySet (CalDAVTimeRangeElement):
"""
@@ -717,6 +757,7 @@
name = "limit-freebusy-set"
+
@registerElement
class Filter (CalDAVElement):
"""
@@ -725,9 +766,10 @@
"""
name = "filter"
- allowed_children = { (caldav_namespace, "comp-filter"): (1, 1) }
+ allowed_children = {(caldav_namespace, "comp-filter"): (1, 1)}
+
@registerElement
class ComponentFilter (CalDAVElement):
"""
@@ -737,10 +779,10 @@
name = "comp-filter"
allowed_children = {
- (caldav_namespace, "is-not-defined" ): (0, 1),
- (caldav_namespace, "time-range" ): (0, 1),
- (caldav_namespace, "comp-filter" ): (0, None),
- (caldav_namespace, "prop-filter" ): (0, None),
+ (caldav_namespace, "is-not-defined"): (0, 1),
+ (caldav_namespace, "time-range"): (0, 1),
+ (caldav_namespace, "comp-filter"): (0, None),
+ (caldav_namespace, "prop-filter"): (0, None),
}
allowed_attributes = {
"name": True,
@@ -748,6 +790,7 @@
}
+
@registerElement
class PropertyFilter (CalDAVElement):
"""
@@ -757,10 +800,10 @@
name = "prop-filter"
allowed_children = {
- (caldav_namespace, "is-not-defined" ): (0, 1),
- (caldav_namespace, "time-range" ): (0, 1),
- (caldav_namespace, "text-match" ): (0, 1),
- (caldav_namespace, "param-filter" ): (0, None),
+ (caldav_namespace, "is-not-defined"): (0, 1),
+ (caldav_namespace, "time-range"): (0, 1),
+ (caldav_namespace, "text-match"): (0, 1),
+ (caldav_namespace, "param-filter"): (0, None),
}
allowed_attributes = {
"name": True,
@@ -768,6 +811,7 @@
}
+
@registerElement
class ParameterFilter (CalDAVElement):
"""
@@ -777,12 +821,13 @@
name = "param-filter"
allowed_children = {
- (caldav_namespace, "is-not-defined" ): (0, 1),
- (caldav_namespace, "text-match" ): (0, 1),
+ (caldav_namespace, "is-not-defined"): (0, 1),
+ (caldav_namespace, "text-match"): (0, 1),
}
- allowed_attributes = { "name": True }
+ allowed_attributes = {"name": True}
+
@registerElement
class IsNotDefined (CalDAVEmptyElement):
"""
@@ -792,6 +837,7 @@
name = "is-not-defined"
+
@registerElement
class TextMatch (CalDAVTextElement):
"""
@@ -800,7 +846,8 @@
"""
name = "text-match"
- def fromString(clazz, string, caseless=False): #@NoSelf
+
+ def fromString(clazz, string, caseless=False): # @NoSelf
if caseless:
caseless = "yes"
else:
@@ -822,6 +869,7 @@
}
+
@registerElement
class TimeZone (CalDAVTimeZoneElement):
"""
@@ -831,6 +879,7 @@
name = "timezone"
+
@registerElement
class TimeRange (CalDAVTimeRangeElement):
"""
@@ -840,6 +889,7 @@
name = "time-range"
+
@registerElement
class CalendarMultiGet (CalDAVElement):
"""
@@ -852,12 +902,13 @@
# To allow for an empty element in a supported-report-set property we need
# to relax the child restrictions
allowed_children = {
- (dav_namespace, "allprop" ): (0, 1),
+ (dav_namespace, "allprop"): (0, 1),
(dav_namespace, "propname"): (0, 1),
- (dav_namespace, "prop" ): (0, 1),
- (dav_namespace, "href" ): (0, None), # Actually ought to be (1, None)
+ (dav_namespace, "prop"): (0, 1),
+ (dav_namespace, "href"): (0, None), # Actually ought to be (1, None)
}
+
def __init__(self, *children, **attributes):
super(CalendarMultiGet, self).__init__(*children, **attributes)
@@ -868,9 +919,9 @@
qname = child.qname()
if qname in (
- (dav_namespace, "allprop" ),
+ (dav_namespace, "allprop"),
(dav_namespace, "propname"),
- (dav_namespace, "prop" ),
+ (dav_namespace, "prop"),
):
if property is not None:
raise ValueError("Only one of DAV:allprop, DAV:propname, DAV:prop allowed")
@@ -879,10 +930,11 @@
elif qname == (dav_namespace, "href"):
resources.append(child)
- self.property = property
+ self.property = property
self.resources = resources
+
@registerElement
class FreeBusyQuery (CalDAVElement):
"""
@@ -894,8 +946,9 @@
# To allow for an empty element in a supported-report-set property we need
# to relax the child restrictions
- allowed_children = { (caldav_namespace, "time-range" ): (0, 1) } # Actually ought to be (1, 1)
+ allowed_children = {(caldav_namespace, "time-range"): (0, 1)} # Actually ought to be (1, 1)
+
def __init__(self, *children, **attributes):
super(FreeBusyQuery, self).__init__(*children, **attributes)
@@ -909,11 +962,12 @@
raise ValueError("Only one time-range element allowed in free-busy-query: %r" % (self,))
timerange = child
else:
- raise ValueError("Unknown element %r in free-busy-query: %r" % (child,self))
+ raise ValueError("Unknown element %r in free-busy-query: %r" % (child, self))
- self.timerange = timerange
+ self.timerange = timerange
+
@registerElement
class ReadFreeBusy(CalDAVEmptyElement):
"""
@@ -921,8 +975,9 @@
(CalDAV-access, RFC 4791 section 6.1.1)
"""
name = "read-free-busy"
-
+
+
@registerElement
class NoUIDConflict(CalDAVElement):
"""
@@ -931,9 +986,10 @@
"""
name = "no-uid-conflict"
- allowed_children = { (dav_namespace, "href"): (1, 1) }
-
+ allowed_children = {(dav_namespace, "href"): (1, 1)}
+
+
@registerElement
class SupportedFilter(CalDAVElement):
"""
@@ -944,15 +1000,17 @@
name = "supported-filter"
allowed_children = {
- (caldav_namespace, "comp-filter" ): (0, None),
- (caldav_namespace, "prop-filter" ): (0, None),
+ (caldav_namespace, "comp-filter"): (0, None),
+ (caldav_namespace, "prop-filter"): (0, None),
(caldav_namespace, "param-filter"): (0, None)
}
-
-##
+
+#
# CalDAV Schedule objects
-##
+#
+
+
@registerElement
class CalendarUserAddressSet (CalDAVElement):
"""
@@ -962,9 +1020,10 @@
name = "calendar-user-address-set"
hidden = True
- allowed_children = { (dav_namespace, "href"): (0, None) }
+ allowed_children = {(dav_namespace, "href"): (0, None)}
+
@registerElement
class CalendarFreeBusySet (CalDAVElement):
"""
@@ -975,9 +1034,10 @@
name = "calendar-free-busy-set"
hidden = True
- allowed_children = { (dav_namespace, "href"): (0, None) }
+ allowed_children = {(dav_namespace, "href"): (0, None)}
+
@registerElement
class ScheduleCalendarTransp (CalDAVElement):
"""
@@ -986,11 +1046,12 @@
name = "schedule-calendar-transp"
allowed_children = {
- (caldav_namespace, "opaque" ): (0, 1),
- (caldav_namespace, "transparent" ): (0, 1),
+ (caldav_namespace, "opaque"): (0, 1),
+ (caldav_namespace, "transparent"): (0, 1),
}
+
@registerElement
class Opaque (CalDAVEmptyElement):
"""
@@ -999,6 +1060,7 @@
name = "opaque"
+
@registerElement
class Transparent (CalDAVEmptyElement):
"""
@@ -1007,6 +1069,7 @@
name = "transparent"
+
@registerElement
class ScheduleDefaultCalendarURL (CalDAVElement):
"""
@@ -1014,9 +1077,10 @@
"""
name = "schedule-default-calendar-URL"
- allowed_children = { (dav_namespace, "href"): (0, 1) }
+ allowed_children = {(dav_namespace, "href"): (0, 1)}
+
@registerElement
class ScheduleInboxURL (CalDAVElement):
"""
@@ -1027,9 +1091,10 @@
hidden = True
protected = True
- allowed_children = { (dav_namespace, "href"): (0, 1) }
+ allowed_children = {(dav_namespace, "href"): (0, 1)}
+
@registerElement
class ScheduleOutboxURL (CalDAVElement):
"""
@@ -1040,9 +1105,10 @@
hidden = True
protected = True
- allowed_children = { (dav_namespace, "href"): (0, 1) }
+ allowed_children = {(dav_namespace, "href"): (0, 1)}
+
@registerElement
class Originator (CalDAVElement):
"""
@@ -1054,16 +1120,17 @@
hidden = True
protected = True
- allowed_children = { (dav_namespace, "href"): (0, 1) } # NB Minimum is zero because this is a property name
+ allowed_children = {(dav_namespace, "href"): (0, 1)} # NB Minimum is zero because this is a property name
+
@registerElement
class Recipient (CalDAVElement):
"""
A property on resources in schedule Inbox indicating the Recipients targeted
by the SCHEDULE operation.
(CalDAV-schedule, section x.x.x)
-
+
The recipient for whom this response is for.
(CalDAV-schedule, section x.x.x)
"""
@@ -1071,9 +1138,10 @@
hidden = True
protected = True
- allowed_children = { (dav_namespace, "href"): (0, None) } # NB Minimum is zero because this is a property name
+ allowed_children = {(dav_namespace, "href"): (0, None)} # NB Minimum is zero because this is a property name
+
@registerElement
class ScheduleTag (CalDAVTextElement):
"""
@@ -1085,6 +1153,7 @@
protected = True
+
@registerElement
class ScheduleInbox (CalDAVEmptyElement):
"""
@@ -1094,6 +1163,7 @@
name = "schedule-inbox"
+
@registerElement
class ScheduleOutbox (CalDAVEmptyElement):
"""
@@ -1103,6 +1173,7 @@
name = "schedule-outbox"
+
@registerElement
class ScheduleResponse (CalDAVElement):
"""
@@ -1111,9 +1182,10 @@
"""
name = "schedule-response"
- allowed_children = { (caldav_namespace, "response"): (0, None) }
+ allowed_children = {(caldav_namespace, "response"): (0, None)}
+
@registerElement
class Response (CalDAVElement):
"""
@@ -1123,14 +1195,15 @@
name = "response"
allowed_children = {
- (caldav_namespace, "recipient" ): (1, 1),
- (caldav_namespace, "request-status" ): (1, 1),
- (caldav_namespace, "calendar-data" ): (0, 1),
- (dav_namespace, "error" ): (0, 1), # 2518bis
- (dav_namespace, "responsedescription"): (0, 1)
+ (caldav_namespace, "recipient"): (1, 1),
+ (caldav_namespace, "request-status"): (1, 1),
+ (caldav_namespace, "calendar-data"): (0, 1),
+ (dav_namespace, "error"): (0, 1), # 2518bis
+ (dav_namespace, "responsedescription"): (0, 1)
}
+
@registerElement
class RequestStatus (CalDAVTextElement):
"""
@@ -1140,6 +1213,7 @@
name = "request-status"
+
@registerElement
class Schedule (CalDAVEmptyElement):
"""
@@ -1147,8 +1221,9 @@
(CalDAV-schedule, section x.x.x)
"""
name = "schedule"
-
+
+
@registerElement
class ScheduleDeliver (CalDAVEmptyElement):
"""
@@ -1156,8 +1231,9 @@
(CalDAV-schedule, section x.x.x)
"""
name = "schedule-deliver"
-
+
+
@registerElement
class ScheduleSend (CalDAVEmptyElement):
"""
@@ -1165,8 +1241,9 @@
(CalDAV-schedule, section x.x.x)
"""
name = "schedule-send"
-
+
+
@registerElement
class CalendarUserType (CalDAVTextElement):
"""
@@ -1176,9 +1253,9 @@
protected = True
-##
+#
# draft-daboo-valarm-extensions
-##
+#
caldav_default_alarms_compliance = (
"calendar-default-alarms",
@@ -1192,6 +1269,7 @@
calendartxt = None
+
def calendar(self):
"""
Returns a calendar component derived from this element, which contains
@@ -1200,13 +1278,14 @@
valarm = str(self)
return iComponent.fromString(self.calendartxt % str(self)) if valarm else None
+
def valid(self):
"""
Determine whether the content of this element is a valid single VALARM component or empty.
-
+
@return: True if valid, False if not.
"""
-
+
if str(self):
try:
calendar = self.calendar()
@@ -1214,7 +1293,7 @@
return False
except ValueError:
return False
-
+
# Make sure there is one alarm component
try:
valarm = tuple(tuple(calendar.subcomponents())[0].subcomponents())[0]
@@ -1222,10 +1301,11 @@
return False
if valarm.name().upper() != "VALARM":
return False
-
+
return True
+
@registerElement
class DefaultAlarmVEventDateTime (DefaultAlarmBase):
name = "default-alarm-vevent-datetime"
@@ -1242,8 +1322,9 @@
%sEND:VEVENT
END:VCALENDAR
"""
-
+
+
@registerElement
class DefaultAlarmVEventDate (DefaultAlarmBase):
name = "default-alarm-vevent-date"
@@ -1260,8 +1341,9 @@
%sEND:VEVENT
END:VCALENDAR
"""
-
+
+
@registerElement
class DefaultAlarmVToDoDateTime (DefaultAlarmBase):
name = "default-alarm-vtodo-datetime"
@@ -1279,6 +1361,7 @@
"""
+
@registerElement
class DefaultAlarmVToDoDate (DefaultAlarmBase):
name = "default-alarm-vtodo-date"
@@ -1296,13 +1379,37 @@
"""
-##
+
+#
+# draft-daboo-caldav-attachments
+#
+
+caldav_managed_attachments_compliance = (
+ "calendar-managed-attachments",
+)
+
+
+
+ at registerElement
+class ManagedAttachmentsServerURL (CalDAVElement):
+ """
+ Zero or one href elements defining the base scheme/host for attachments.
+ """
+ name = "managed-attachments-server-URL"
+
+ allowed_children = {(dav_namespace, "href"): (0, 1)}
+
+
+
+#
# Extensions to ResourceType
-##
+#
-def _isCalendar(self): return bool(self.childrenOfType(Calendar))
+def _isCalendar(self):
+ return bool(self.childrenOfType(Calendar))
+
ResourceType.isCalendar = _isCalendar
-ResourceType.calendar = ResourceType(Collection(), Calendar())
-ResourceType.scheduleInbox = ResourceType(Collection(), ScheduleInbox())
+ResourceType.calendar = ResourceType(Collection(), Calendar())
+ResourceType.scheduleInbox = ResourceType(Collection(), ScheduleInbox())
ResourceType.scheduleOutbox = ResourceType(Collection(), ScheduleOutbox())
Modified: CalendarServer/trunk/twistedcaldav/ical.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/ical.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/ical.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -67,9 +67,9 @@
"VEVENT",
"VTODO",
"VTIMEZONE",
- #"VJOURNAL",
+ # "VJOURNAL",
"VFREEBUSY",
- #"VAVAILABILITY",
+ # "VAVAILABILITY",
)
# 2445 default values and parameters
@@ -172,7 +172,8 @@
self._pycalendar = pyobj
else:
# Convert params dictionary to list of lists format used by pycalendar
- self._pycalendar = PyCalendarProperty(name, value)
+ valuetype = kwargs.get("valuetype")
+ self._pycalendar = PyCalendarProperty(name, value, valuetype=valuetype)
for attrname, attrvalue in params.items():
self._pycalendar.addAttribute(PyCalendarAttribute(attrname, attrvalue))
@@ -1639,8 +1640,8 @@
timezone_refs = set()
timezones = set()
got_master = False
- #got_override = False
- #master_recurring = False
+ # got_override = False
+ # master_recurring = False
for subcomponent in self.subcomponents():
if subcomponent.name() == "VTIMEZONE":
@@ -1790,9 +1791,9 @@
else:
return None
- ##
+ # #
# iTIP stuff
- ##
+ # #
def isValidMethod(self):
@@ -2249,6 +2250,77 @@
component.replaceProperty(property)
+ def hasPropertyWithParameterMatch(self, propname, param_name, param_value, param_value_is_default=False):
+ """
+ See if property whose name, and parameter name, value match in any components.
+
+ @param property: the L{Property} to replace in this component.
+ @param param_name: the C{str} of parameter name to match.
+ @param param_value: the C{str} of parameter value to match, if C{None} then just match on the
+ presence of the parameter name.
+ @param param_value_is_default: C{bool} to indicate whether absence of the named parameter
+ also implies a match
+
+ @return: C{True} if matching property found, C{False} if not
+ @rtype: C{bool}
+ """
+
+ if self.name() == "VCALENDAR":
+ for component in self.subcomponents():
+ if component.name() in ignoredComponents:
+ continue
+ if component.hasPropertyWithParameterMatch(propname, param_name, param_value, param_value_is_default):
+ return True
+ else:
+ for oldprop in tuple(self.properties(propname)):
+ pvalue = oldprop.parameterValue(param_name)
+ if pvalue is None and param_value_is_default or pvalue == param_value or param_value is None:
+ return True
+
+ return False
+
+
+ def replaceAllPropertiesWithParameterMatch(self, property, param_name, param_value, param_value_is_default=False):
+ """
+ Replace a property whose name, and parameter name, value match in all components.
+
+ @param property: the L{Property} to replace in this component.
+ @param param_name: the C{str} of parameter name to match.
+ @param param_value: the C{str} of parameter value to match.
+ @param param_value_is_default: C{bool} to indicate whether absence of the named parameter
+ also implies a match
+ """
+
+ if self.name() == "VCALENDAR":
+ for component in self.subcomponents():
+ if component.name() in ignoredComponents:
+ continue
+ component.replaceAllPropertiesWithParameterMatch(property, param_name, param_value, param_value_is_default)
+ else:
+ for oldprop in tuple(self.properties(property.name())):
+ pvalue = oldprop.parameterValue(param_name)
+ if pvalue is None and param_value_is_default or pvalue == param_value:
+ self.removeProperty(oldprop)
+ self.addProperty(property)
+
+
+ def removeAllPropertiesWithParameterMatch(self, propname, param_name, param_value, param_value_is_default=False):
+ """
+ Remove all properties whose name, and parameter name, value match in all components.
+ """
+
+ if self.name() == "VCALENDAR":
+ for component in self.subcomponents():
+ if component.name() in ignoredComponents:
+ continue
+ component.removeAllPropertiesWithParameterMatch(propname, param_name, param_value, param_value_is_default)
+ else:
+ for oldprop in tuple(self.properties(propname)):
+ pvalue = oldprop.parameterValue(param_name)
+ if pvalue is None and param_value_is_default or pvalue == param_value:
+ self.removeProperty(oldprop)
+
+
def transferProperties(self, from_calendar, properties):
"""
Transfer specified properties from old calendar into all components
@@ -3054,9 +3126,9 @@
-##
+# #
# Timezones
-##
+# #
def tzexpand(tzdata, start, end):
"""
@@ -3160,9 +3232,9 @@
-##
+# #
# Utilities
-##
+# #
def normalizeCUAddress(cuaddr, lookupFunction, principalFunction, toUUID=True):
# Check that we can lookup this calendar user address - if not
Modified: CalendarServer/trunk/twistedcaldav/method/post.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/post.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/method/post.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -1,4 +1,4 @@
-##
+# #
# Copyright (c) 2005-2012 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +12,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-##
+# #
from hashlib import md5
@@ -44,7 +44,7 @@
def http_POST(self, request):
# POST can support many different APIs
-
+
# First look at query params
if request.params:
if request.params == "add-member":
@@ -52,22 +52,36 @@
result = (yield POST_handler_add_member(self, request))
returnValue(result)
- else:
- # Content-type handlers
- contentType = request.headers.getHeader("content-type")
- if contentType:
- if hasattr(self, "POST_handler_content_type"):
- result = (yield self.POST_handler_content_type(request, (contentType.mediaType, contentType.mediaSubtype)))
- returnValue(result)
+ # Look for query arguments
+ if request.args:
+ action = request.args.get("action", ("",))
+ if len(action) == 1:
+ action = action[0]
+ if action in ("attachment-add", "attachment-update", "attachment-remove") and \
+ hasattr(self, "POST_handler_attachment"):
+ if config.EnableManagedAttachments:
+ result = (yield self.POST_handler_attachment(request, action))
+ returnValue(result)
+ else:
+ returnValue(StatusResponse(responsecode.FORBIDDEN, "Managed Attachments not supported."))
+ # Content-type handlers
+ contentType = request.headers.getHeader("content-type")
+ if contentType:
+ if hasattr(self, "POST_handler_content_type"):
+ result = (yield self.POST_handler_content_type(request, (contentType.mediaType, contentType.mediaSubtype)))
+ returnValue(result)
+
returnValue(responsecode.FORBIDDEN)
+
+
@inlineCallbacks
def POST_handler_add_member(self, request):
# Handle ;add-member
if self.isCalendarCollection():
-
+
parentURL = request.path
parent = self
@@ -80,7 +94,7 @@
(caldav_namespace, "supported-calendar-data"),
"Wrong MIME type for calendar collection",
))
-
+
# Read the calendar component from the stream
try:
calendardata = (yield allDataFromStream(request.stream))
@@ -98,19 +112,19 @@
))
# Create a new name if one was not provided
- name = md5(str(calendardata) + str(time.time()) + request.path).hexdigest() + ".ics"
-
+ name = md5(str(calendardata) + str(time.time()) + request.path).hexdigest() + ".ics"
+
# Get a resource for the new item
newchildURL = joinURL(parentURL, name)
newchild = (yield request.locateResource(newchildURL))
storer = StoreCalendarObjectResource(
- request = request,
- destination = newchild,
- destination_uri = newchildURL,
- destinationcal = True,
- destinationparent = parent,
- calendar = calendardata,
+ request=request,
+ destination=newchild,
+ destination_uri=newchildURL,
+ destinationcal=True,
+ destinationparent=parent,
+ calendar=calendardata,
)
result = (yield storer.run())
@@ -134,7 +148,7 @@
raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, str(e)))
elif self.isAddressBookCollection():
-
+
parentURL = request.path
parent = self
@@ -147,7 +161,7 @@
(carddav_namespace, "supported-address-data"),
"Wrong MIME type for address book collection",
))
-
+
# Read the calendar component from the stream
try:
vcarddata = (yield allDataFromStream(request.stream))
@@ -165,20 +179,20 @@
))
# Create a new name if one was not provided
- name = md5(str(vcarddata) + str(time.time()) + request.path).hexdigest() + ".vcf"
-
+ name = md5(str(vcarddata) + str(time.time()) + request.path).hexdigest() + ".vcf"
+
# Get a resource for the new item
newchildURL = joinURL(parentURL, name)
newchild = (yield request.locateResource(newchildURL))
storer = StoreAddressObjectResource(
- request = request,
- sourceadbk = False,
- destination = newchild,
- destination_uri = newchildURL,
- destinationadbk = True,
- destinationparent = parent,
- vcard = vcarddata,
+ request=request,
+ sourceadbk=False,
+ destination=newchild,
+ destination_uri=newchildURL,
+ destinationadbk=True,
+ destinationparent=parent,
+ vcard=vcarddata,
)
result = (yield storer.run())
@@ -203,4 +217,3 @@
# Default behavior
returnValue(responsecode.FORBIDDEN)
-
Modified: CalendarServer/trunk/twistedcaldav/method/put_addressbook_common.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/put_addressbook_common.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/method/put_addressbook_common.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -45,18 +45,18 @@
log = Logger()
class StoreAddressObjectResource(object):
-
+
class UIDReservation(object):
-
+
def __init__(self, index, uid, uri):
self.reserved = False
self.index = index
self.uid = uid
self.uri = uri
-
+
@inlineCallbacks
def reserve(self):
-
+
# Lets use a deferred for this and loop a few times if we cannot reserve so that we give
# time to whoever has the reservation to finish and release it.
failure_count = 0
@@ -68,34 +68,35 @@
except ReservationError:
self.reserved = False
failure_count += 1
-
+
pause = Deferred()
def _timedDeferred():
pause.callback(True)
reactor.callLater(0.5, _timedDeferred) #@UndefinedVariable
yield pause
-
+
if self.uri and not self.reserved:
raise HTTPError(StatusResponse(responsecode.CONFLICT, "Resource: %s currently in use." % (self.uri,)))
-
+
@inlineCallbacks
def unreserve(self):
if self.reserved:
yield self.index.unreserveUID(self.uid)
self.reserved = False
+
def __init__(
self,
request,
source=None, source_uri=None, sourceparent=None, sourceadbk=False, deletesource=False,
destination=None, destination_uri=None, destinationparent=None, destinationadbk=True,
vcard=None,
- indexdestination = True,
+ indexdestination=True,
returnData=False,
):
"""
Function that does common PUT/COPY/MOVE behavior.
-
+
@param request: the L{twext.web2.server.Request} for the current HTTP request.
@param source: the L{CalDAVResource} for the source resource to copy from, or None if source data
is to be read from the request.
@@ -110,7 +111,7 @@
@param deletesource: True if the source resource is to be deleted on successful completion, False otherwise.
@param returnData: True if the caller wants the actual data written to the store returned
"""
-
+
# Check that all arguments are valid
try:
assert destination is not None and destinationparent is not None and destination_uri is not None
@@ -131,7 +132,7 @@
log.err("vcard=%s\n" % (vcard,))
log.err("deletesource=%s\n" % (deletesource,))
raise
-
+
self.request = request
self.sourceadbk = sourceadbk
self.destinationadbk = destinationadbk
@@ -146,7 +147,7 @@
self.deletesource = deletesource
self.indexdestination = indexdestination
self.returnData = returnData
-
+
self.access = None
@@ -161,7 +162,7 @@
result, message = self.validResourceName()
if not result:
log.err(message)
- raise HTTPError(StatusResponse(responsecode.FORBIDDEN, "Resource name not allowed"))
+ raise HTTPError(StatusResponse(responsecode.FORBIDDEN, message))
# Valid collection size check on the destination parent resource
result, message = (yield self.validCollectionSize())
@@ -182,9 +183,9 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(carddav_namespace, "supported-address-data"),
- "Invalid content-type",
+ message,
))
-
+
# At this point we need the calendar data to do more tests
self.vcard = (yield self.source.vCard())
else:
@@ -198,7 +199,7 @@
(carddav_namespace, "valid-address-data"),
"Could not parse vCard",
))
-
+
# Valid vcard data check
result, message = self.validAddressDataCheck()
if not result:
@@ -206,9 +207,9 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(carddav_namespace, "valid-address-data"),
- description=message
+ message
))
-
+
# Valid vcard data for CalDAV check
result, message = self.validCardDAVDataCheck()
if not result:
@@ -216,7 +217,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(carddav_namespace, "valid-addressbook-object-resource"),
- "Invalid vCard data",
+ message,
))
# Must have a valid UID at this point
@@ -244,12 +245,13 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(carddav_namespace, "max-resource-size"),
- "Address data too large",
+ message,
))
# Check access
returnValue(None)
-
+
+
def validResourceName(self):
"""
Make sure that the resource name for the new resource is valid.
@@ -259,10 +261,11 @@
filename = self.destination.name()
if filename.startswith("."):
result = False
- message = "File name %s not allowed in vcard collection" % (filename,)
+ message = "Resource name %s not allowed in vcard collection" % (filename,)
return result, message
-
+
+
def validContentType(self):
"""
Make sure that the content-type of the source resource is text/vcard.
@@ -276,7 +279,8 @@
message = "MIME type %s not allowed in vcard collection" % (content_type,)
return result, message
-
+
+
@inlineCallbacks
def validCollectionSize(self):
"""
@@ -291,7 +295,8 @@
message = "Too many resources in collection %s" % (self.destinationparent,)
returnValue((result, message,))
-
+
+
def validAddressDataCheck(self):
"""
Check that the calendar data is valid iCalendar.
@@ -309,9 +314,10 @@
except ValueError, e:
result = False
message = "Invalid vcard data: %s" % (e,)
-
+
return result, message
-
+
+
def validCardDAVDataCheck(self):
"""
Check that the vcard data is valid vCard.
@@ -325,9 +331,10 @@
except ValueError, e:
result = False
message = "vCard data does not conform to CardDAV requirements: %s" % (e,)
-
+
return result, message
-
+
+
def validSizeCheck(self):
"""
Make sure that the content-type of the source resource is text/vcard.
@@ -373,7 +380,7 @@
# the other PUT tries to reserve and fails but no index entry exists yet.
if rname is None:
rname = "<<Unknown Resource>>"
-
+
result = False
message = "Address book resource %s already exists with same UID %s" % (rname, uid)
else:
@@ -384,7 +391,7 @@
rname = self.destination.name()
result = False
message = "Cannot overwrite vcard resource %s with different UID %s" % (rname, olduid)
-
+
returnValue((result, message, rname))
@@ -408,9 +415,10 @@
self.destination.newStoreProperties().update(sourceProperties)
else:
response = (yield self.doStorePut())
-
+
returnValue(response)
+
@inlineCallbacks
def doStorePut(self):
@@ -418,6 +426,7 @@
response = (yield self.destination.storeStream(stream))
returnValue(response)
+
@inlineCallbacks
def doSourceDelete(self):
# Delete the source resource
@@ -425,6 +434,7 @@
log.debug("Source removed %s" % (self.source,))
returnValue(None)
+
@inlineCallbacks
def run(self):
"""
@@ -435,12 +445,12 @@
try:
reservation = None
-
+
# Handle all validation operations here.
yield self.fullValidation()
# Reservation and UID conflict checking is next.
- if self.destinationadbk:
+ if self.destinationadbk:
# Reserve UID
self.destination_index = self.destinationparent.index()
reservation = StoreAddressObjectResource.UIDReservation(
@@ -448,7 +458,7 @@
)
if self.indexdestination:
yield reservation.reserve()
-
+
# UID conflict check - note we do this after reserving the UID to avoid a race condition where two requests
# try to write the same vcard data to two different resource URIs.
result, message, rname = yield self.noUIDConflict(self.uid)
@@ -466,18 +476,109 @@
),
"UID already used in another resource",
))
-
+
# Do the actual put or copy
response = (yield self.doStore())
-
+
if reservation:
yield reservation.unreserve()
-
+
returnValue(response)
-
+
except Exception, err:
if reservation:
yield reservation.unreserve()
raise err
+
+
+ @inlineCallbacks
+ def moveValidation(self):
+ """
+ Do full validation of source and destination calendar data.
+ """
+
+ # Valid resource name check
+ result, message = self.validResourceName()
+ if not result:
+ log.err(message)
+ raise HTTPError(StatusResponse(responsecode.FORBIDDEN, message))
+
+ # Valid collection size check on the destination parent resource
+ result, message = (yield self.validCollectionSize())
+ if not result:
+ log.err(message)
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ customxml.MaxResources(),
+ message,
+ ))
+
+ returnValue(None)
+
+
+ @inlineCallbacks
+ def doStoreMove(self):
+
+ # Do move
+ response = (yield self.source.storeMove(self.request, self.destinationparent, self.destination._name))
+ returnValue(response)
+
+
+ @inlineCallbacks
+ def move(self):
+ """
+ Function that does common MOVE behavior.
+
+ @return: a Deferred with a status response result.
+ """
+
+ try:
+ reservation = None
+
+ # Handle all validation operations here.
+ yield self.moveValidation()
+
+ # Reservation and UID conflict checking is next.
+
+ # Reserve UID
+ self.destination_index = self.destinationparent.index()
+ reservation = StoreAddressObjectResource.UIDReservation(
+ self.destination_index, self.source.uid(), self.destination_uri
+ )
+ if self.indexdestination:
+ yield reservation.reserve()
+
+ # UID conflict check - note we do this after reserving the UID to avoid a race condition where two requests
+ # try to write the same vcard data to two different resource URIs.
+ result, message, rname = yield self.noUIDConflict(self.source.uid())
+ if not result:
+ log.err(message)
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ NoUIDConflict(
+ davxml.HRef.fromString(
+ joinURL(
+ parentForURL(self.destination_uri),
+ rname.encode("utf-8")
+ )
+ )
+ ),
+ "UID already used in another resource",
+ ))
+
+ # Do the actual put or copy
+ response = (yield self.doStoreMove())
+
+ if reservation:
+ yield reservation.unreserve()
+
+ returnValue(response)
+
+ except Exception, err:
+
+ if reservation:
+ yield reservation.unreserve()
+
+ raise err
Modified: CalendarServer/trunk/twistedcaldav/method/put_common.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/put_common.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/method/put_common.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -42,6 +42,7 @@
from twext.python.log import Logger
from twext.web2.dav.http import ErrorResponse
+from txdav.caldav.icalendarstore import AttachmentStoreValidManagedID
from txdav.common.icommondatastore import ReservationError
from twistedcaldav.config import config
@@ -51,7 +52,7 @@
from twistedcaldav.datafilters.peruserdata import PerUserDataFilter
from twistedcaldav.ical import Component, Property
-from twistedcaldav.instance import TooManyInstancesError,\
+from twistedcaldav.instance import TooManyInstancesError, \
InvalidOverriddenInstanceError
from twistedcaldav.memcachelock import MemcacheLock, MemcacheLockTimeoutError
from twistedcaldav.scheduling.implicit import ImplicitScheduler
@@ -61,7 +62,7 @@
class StoreCalendarObjectResource(object):
class UIDReservation(object):
-
+
def __init__(self, index, uid, uri, internal_request, transaction):
if internal_request:
self.lock = None
@@ -77,10 +78,10 @@
self.uid = uid
self.uri = uri
self.transaction = transaction
-
+
@inlineCallbacks
def reserve(self):
-
+
# Implicit lock
if self.lock:
try:
@@ -99,19 +100,19 @@
except ReservationError:
self.reserved = False
failure_count += 1
-
+
pause = Deferred()
def _timedDeferred():
pause.callback(True)
reactor.callLater(0.5, _timedDeferred)
yield pause
-
+
if self.uri and not self.reserved:
if self.lock:
# Can release immediately as nothing happened
yield self.lock.release()
raise HTTPError(StatusResponse(responsecode.CONFLICT, "Resource: %s currently in use in calendar." % (self.uri,)))
-
+
@inlineCallbacks
def unreserve(self):
if self.reserved:
@@ -122,6 +123,7 @@
self.transaction.postCommit(self.lock.clean)
self.transaction.postAbort(self.lock.clean)
+
def __init__(
self,
request,
@@ -133,10 +135,11 @@
internal_request=False,
processing_organizer=None,
returnData=False,
+ attachmentProcessingDone=False,
):
"""
Function that does common PUT/COPY/MOVE behavior.
-
+
@param request: the L{twext.web2.server.Request} for the current HTTP request.
@param source: the L{CalDAVResource} for the source resource to copy from, or None if source data
is to be read from the request.
@@ -154,8 +157,9 @@
@param internal_request: True if this request originates internally and needs to bypass scheduling authorization checks.
@param processing_organizer: True if implicit processing for an organizer, False if for an attendee, None if not implicit processing.
@param returnData: True if the caller wants the actual data written to the store returned
+ @param attachmentProcessingDone True if the caller has already processed managed attachment changes
"""
-
+
# Check that all arguments are valid
try:
assert destination is not None and destinationparent is not None and destination_uri is not None
@@ -194,11 +198,13 @@
self.internal_request = internal_request
self.processing_organizer = processing_organizer
self.returnData = returnData
+ self.attachmentProcessingDone = attachmentProcessingDone
self.access = None
self.hasPrivateComments = False
self.isScheduleResource = False
+
@inlineCallbacks
def fullValidation(self):
"""
@@ -217,7 +223,7 @@
result, message = self.validResourceName()
if not result:
log.err(message)
- raise HTTPError(StatusResponse(responsecode.FORBIDDEN, "Resource name not allowed"))
+ raise HTTPError(StatusResponse(responsecode.FORBIDDEN, message))
# Valid collection size check on the destination parent resource
result, message = (yield self.validCollectionSize())
@@ -226,7 +232,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
customxml.MaxResources(),
- "Too many resources in collection",
+ message,
))
# Valid data sizes - do before parsing the data
@@ -238,7 +244,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "max-resource-size"),
- "Calendar data too large",
+ message,
))
else:
# Valid calendar data size check
@@ -248,7 +254,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "max-resource-size"),
- "Calendar data too large",
+ message,
))
if not self.sourcecal:
@@ -260,9 +266,9 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "supported-calendar-data"),
- "Invalid content-type for data",
+ message,
))
-
+
# At this point we need the calendar data to do more tests
try:
self.calendar = (yield self.source.iCalendarForUser(self.request))
@@ -271,7 +277,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "valid-calendar-data"),
- description="Can't parse calendar data"
+ "Can't parse calendar data"
))
else:
try:
@@ -282,7 +288,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "valid-calendar-data"),
- description="Can't parse calendar data"
+ "Can't parse calendar data"
))
# Possible timezone stripping
@@ -299,7 +305,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "valid-calendar-data"),
- description=message
+ message
))
# Valid calendar data for CalDAV check
@@ -309,7 +315,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "valid-calendar-object-resource"),
- "Invalid calendar data",
+ message,
))
# Valid calendar component for check
@@ -319,7 +325,7 @@
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(caldav_namespace, "supported-component"),
- "Invalid calendar data",
+ message,
))
# Valid attendee list size check
@@ -330,7 +336,7 @@
ErrorResponse(
responsecode.FORBIDDEN,
MaxAttendeesPerInstance.fromString(str(config.MaxAttendeesPerInstance)),
- "Too many attendees in calendar data",
+ message,
)
)
@@ -400,7 +406,7 @@
msg = "Calendar-to-calendar %s with different homes are not supported" % ("moves" if self.deletesource else "copies",)
log.debug(msg)
raise HTTPError(StatusResponse(responsecode.FORBIDDEN, msg))
-
+
# Calendar to calendar moves where Organizer is present are not OK if the owners are different.
sourceowner = (yield self.sourceparent.ownerPrincipal(self.request))
destowner = (yield self.destinationparent.ownerPrincipal(self.request))
@@ -411,7 +417,7 @@
if organizer is None and self.destination.exists() and self.destinationcal:
oldCal = yield self.destination.iCalendar()
organizer = oldCal.getOrganizer()
-
+
if organizer is not None:
msg = "Calendar-to-calendar %s with an organizer property present and different owners are not supported" % ("moves" if self.deletesource else "copies",)
log.debug(msg)
@@ -448,6 +454,7 @@
# always do smart merge now if If-Match is present.
self.schedule_tag_match = self.request.headers.getHeader("If-Match") is not None
+
def validResourceName(self):
"""
Make sure that the resource name for the new resource is valid.
@@ -460,7 +467,8 @@
message = "File name %s not allowed in calendar collection" % (filename,)
return result, message
-
+
+
def validContentType(self):
"""
Make sure that the content-type of the source resource is text/calendar.
@@ -474,7 +482,8 @@
message = "MIME type %s not allowed in calendar collection" % (content_type,)
return result, message
-
+
+
def validContentLength(self):
"""
Make sure that the length of the source data is within bounds.
@@ -488,7 +497,8 @@
message = "File size %d bytes is larger than allowed limit %d bytes" % (calsize, config.MaxResourceSize)
return result, message
-
+
+
@inlineCallbacks
def validCollectionSize(self):
"""
@@ -503,7 +513,8 @@
message = "Too many resources in collection %s" % (self.destinationparent,)
returnValue((result, message,))
-
+
+
def validCalendarDataCheck(self):
"""
Check that the calendar data is valid iCalendar.
@@ -521,9 +532,10 @@
except ValueError, e:
result = False
message = "Invalid calendar data: %s" % (e,)
-
+
return result, message
-
+
+
def validCalDAVDataCheck(self):
"""
Check that the calendar data is valid as a CalDAV calendar object resource.
@@ -537,22 +549,24 @@
except ValueError, e:
result = False
message = "Calendar data does not conform to CalDAV requirements: %s" % (e,)
-
+
return result, message
-
+
+
def validComponentType(self):
"""
Make sure that any limits on the number of resources in a collection are enforced.
"""
result = True
message = ""
-
+
if not self.destinationparent.isSupportedComponent(self.calendar.mainType()):
result = False
message = "Invalid component type %s for calendar: %s" % (self.calendar.mainType(), self.destinationparent,)
return result, message
-
+
+
def validSizeCheck(self):
"""
Make sure that the content-type of the source resource is text/calendar.
@@ -569,12 +583,13 @@
return result, message
+
@inlineCallbacks
def validAttendeeListSizeCheck(self):
"""
Make sure that the Attendee list length is within bounds. We don't do this check for inbox because we
will assume that the limit has been applied on the PUT causing the iTIP message to be created.
-
+
FIXME: The inbox check might not take into account iSchedule stuff from outside. That needs to have
the max attendees check applied at the time of delivery.
"""
@@ -586,7 +601,7 @@
uniqueAttendees.add(attendee.value())
attendeeListLength = len(uniqueAttendees)
if attendeeListLength > config.MaxAttendeesPerInstance:
-
+
# Check to see whether we are increasing the count on an existing resource
if self.destination.exists() and self.destinationcal:
oldcalendar = (yield self.destination.iCalendarForUser(self.request))
@@ -596,20 +611,21 @@
oldAttendeeListLength = len(uniqueAttendees)
else:
oldAttendeeListLength = 0
-
+
if attendeeListLength > oldAttendeeListLength:
result = False
message = "Attendee list size %d is larger than allowed limit %d" % (attendeeListLength, config.MaxAttendeesPerInstance)
returnValue((result, message,))
+
def validAccess(self):
"""
Make sure that the X-CALENDARSERVER-ACCESS property is properly dealt with.
"""
-
+
if self.calendar.hasProperty(Component.ACCESS_PROPERTY):
-
+
# Must be a value we know about
self.access = self.calendar.accessLevel(default=None)
if self.access is None:
@@ -618,11 +634,11 @@
(calendarserver_namespace, "valid-access-restriction"),
"Private event access level not allowed",
))
-
+
# Only DAV:owner is able to set the property to other than PUBLIC
if not self.internal_request:
def _callback(parent_owner):
-
+
authz = self.destinationparent.currentPrincipal(self.request)
if davxml.Principal(parent_owner) != authz and self.access != Component.ACCESS_PUBLIC:
raise HTTPError(ErrorResponse(
@@ -630,9 +646,9 @@
(calendarserver_namespace, "valid-access-restriction-change"),
"Private event access level change not allowed",
))
-
+
return None
-
+
d = self.destinationparent.owner(self.request)
d.addCallback(_callback)
return d
@@ -641,7 +657,7 @@
if not self.source and self.destination.exists() and self.destination.accessMode:
old_access = self.destination.accessMode
self.calendar.addProperty(Property(name=Component.ACCESS_PROPERTY, value=old_access))
-
+
return succeed(None)
@@ -659,7 +675,7 @@
"X-CALENDARSERVER-PRIVATE-COMMENT",
"X-CALENDARSERVER-ATTENDEE-COMMENT",
))
-
+
if old_has_private_comments and not self.hasPrivateComments:
# Transfer old comments to new calendar
log.debug("Private Comments properties were entirely removed by the client. Restoring existing properties.")
@@ -684,18 +700,18 @@
old_organizer = old_calendar.getOrganizerProperty()
new_attendees = self.calendar.getAttendees()
old_attendees = tuple(old_calendar.getAllAttendeeProperties())
-
+
new_completed = self.calendar.mainComponent().hasProperty("COMPLETED")
old_completed = old_calendar.mainComponent().hasProperty("COMPLETED")
-
+
if old_organizer and not new_organizer and len(old_attendees) > 0 and len(new_attendees) == 0:
# Transfer old organizer and attendees to new calendar
log.debug("Organizer and attendee properties were entirely removed by the client. Restoring existing properties.")
-
+
# Get the originator who is the owner of the calendar resource being modified
originatorPrincipal = (yield self.destination.ownerPrincipal(self.request))
originatorAddresses = originatorPrincipal.calendarUserAddresses()
-
+
for component in self.calendar.subcomponents():
if component.name() != "VTODO":
continue
@@ -706,17 +722,18 @@
if old_component.name() != "VTODO":
continue
if old_component.hasProperty("DTSTART"):
- component.addProperty(old_component.getProperty("DTSTART"))
+ component.addProperty(old_component.getProperty("DTSTART").duplicate())
break
-
+
# Add organizer back in from previous resource
- component.addProperty(old_organizer)
-
+ component.addProperty(old_organizer.duplicate())
+
# Add attendees back in from previous resource
for anAttendee in old_attendees:
+ anAttendee = anAttendee.duplicate()
if component.hasProperty("COMPLETED") and anAttendee.value() in originatorAddresses:
anAttendee.setParameter("PARTSTAT", "COMPLETED")
- component.addProperty(anAttendee)
+ component.addProperty(anAttendee)
elif new_completed ^ old_completed and not self.internal_request:
# COMPLETED changed - sync up attendee state
@@ -727,15 +744,15 @@
# Transfer old organizer and attendees to new calendar
log.debug("Sync COMPLETED property change.")
-
+
# Get the originator who is the owner of the calendar resource being modified
originatorPrincipal = (yield self.destination.ownerPrincipal(self.request))
originatorAddresses = originatorPrincipal.calendarUserAddresses()
-
+
for component in self.calendar.subcomponents():
if component.name() != "VTODO":
continue
-
+
# Change owner partstat
for anAttendee in component.properties("ATTENDEE"):
if anAttendee.value() in originatorAddresses:
@@ -743,18 +760,18 @@
newpartstat = "COMPLETED" if component.hasProperty("COMPLETED") else "IN-PROCESS"
if newpartstat != oldpartstat:
anAttendee.setParameter("PARTSTAT", newpartstat)
-
+
@inlineCallbacks
def dropboxPathNormalization(self):
"""
Make sure sharees only use dropbox paths of the sharer.
"""
-
+
# Only relevant if calendar is sharee collection
changed = False
if self.destinationparent.isShareeCollection():
-
+
# Get all X-APPLE-DROPBOX's and ATTACH's that are http URIs
xdropboxes = self.calendar.getAllPropertiesInAnyComponent(
"X-APPLE-DROPBOX",
@@ -770,7 +787,7 @@
]
if len(xdropboxes) or len(attachments):
-
+
# Determine owner GUID
ownerPrincipal = (yield self.destinationparent.ownerPrincipal(self.request))
owner = ownerPrincipal.principalURL().split("/")[-2]
@@ -802,13 +819,14 @@
if uri:
attachment.setValue(uri)
changed = True
-
+
returnValue(changed)
+
def processAlarms(self):
"""
Remove duplicate alarms. Add a default alarm if required.
-
+
@return: indicate whether a change was made
@rtype: C{bool}
"""
@@ -825,7 +843,7 @@
# Check that we are creating and this is not the inbox
if not self.destinationcal or self.destination.exists() or self.isiTIP:
return changed
-
+
# Never add default alarms to calendar data in shared calendars
if self.destinationparent.isShareeCollection():
return changed
@@ -835,60 +853,61 @@
if self.calendar.mainType().upper() not in ("VEVENT", "VTODO"):
return changed
vevent = mtype == "VEVENT"
-
+
# Check timed or all-day
start, _ignore_end = self.calendar.mainComponent(allow_multiple=True).getEffectiveStartEnd()
if start is None:
# Yes VTODOs might have no DTSTART or DUE - in this case we do not add a default
return changed
timed = not start.isDateOnly()
-
+
# See if default exists and add using appropriate logic
alarm = self.destinationparent.getDefaultAlarm(vevent, timed)
if alarm:
changed = self.calendar.addAlarms(alarm)
return changed
+
@inlineCallbacks
- def noUIDConflict(self, uid):
- """
- Check that the UID of the new calendar object conforms to the requirements of
- CalDAV, i.e. it must be unique in the collection and we must not overwrite a
- different UID.
- @param uid: the UID for the resource being stored.
- @return: tuple: (True/False if the UID is valid, log message string,
- name of conflicted resource).
- """
-
- result = True
- message = ""
- rname = ""
+ def noUIDConflict(self, uid):
+ """
+ Check that the UID of the new calendar object conforms to the requirements of
+ CalDAV, i.e. it must be unique in the collection and we must not overwrite a
+ different UID.
+ @param uid: the UID for the resource being stored.
+ @return: tuple: (True/False if the UID is valid, log message string,
+ name of conflicted resource).
+ """
- # Adjust for a move into same calendar collection
- oldname = None
- if self.sourceparent and (self.sourceparent == self.destinationparent) and self.deletesource:
- oldname = self.source.name()
-
- # UID must be unique
- index = self.destinationparent.index()
- if not (yield index.isAllowedUID(uid, oldname, self.destination.name())):
- rname = yield index.resourceNameForUID(uid)
- # This can happen if two simultaneous PUTs occur with the same UID.
- # i.e. one PUT has reserved the UID but has not yet written the resource,
- # the other PUT tries to reserve and fails but no index entry exists yet.
- if rname is None:
- rname = "<<Unknown Resource>>"
- result = False
- message = "Calendar resource %s already exists with same UID %s" % (rname, uid)
- else:
- # Cannot overwrite a resource with different UID
- if self.destination.exists():
- olduid = yield index.resourceUIDForName(self.destination.name())
- if olduid != uid:
- rname = self.destination.name()
- result = False
- message = "Cannot overwrite calendar resource %s with different UID %s" % (rname, olduid)
-
+ result = True
+ message = ""
+ rname = ""
+
+ # Adjust for a move into same calendar collection
+ oldname = None
+ if self.sourceparent and (self.sourceparent == self.destinationparent) and self.deletesource:
+ oldname = self.source.name()
+
+ # UID must be unique
+ index = self.destinationparent.index()
+ if not (yield index.isAllowedUID(uid, oldname, self.destination.name())):
+ rname = yield index.resourceNameForUID(uid)
+ # This can happen if two simultaneous PUTs occur with the same UID.
+ # i.e. one PUT has reserved the UID but has not yet written the resource,
+ # the other PUT tries to reserve and fails but no index entry exists yet.
+ if rname is None:
+ rname = "<<Unknown Resource>>"
+ result = False
+ message = "Calendar resource %s already exists with same UID %s" % (rname, uid)
+ else:
+ # Cannot overwrite a resource with different UID
+ if self.destination.exists():
+ olduid = yield index.resourceUIDForName(self.destination.name())
+ if olduid != uid:
+ rname = self.destination.name()
+ result = False
+ message = "Cannot overwrite calendar resource %s with different UID %s" % (rname, olduid)
+
returnValue((result, message, rname))
@@ -901,7 +920,7 @@
# Do scheduling
if not self.isiTIP:
scheduler = ImplicitScheduler()
-
+
# Determine type of operation PUT, COPY or DELETE
if not self.source:
# PUT
@@ -938,7 +957,7 @@
self.calendar,
internal_request=self.internal_request,
))
-
+
if do_implicit_action and self.allowImplicitSchedule:
# Cannot do implicit in sharee's shared calendar
@@ -960,7 +979,7 @@
did_implicit_action = True
else:
is_scheduling_resource = False
-
+
returnValue((is_scheduling_resource, data_changed, did_implicit_action,))
@@ -1041,7 +1060,7 @@
# store as the store will "commit" the new value.
if self.access:
self.destination.accessMode = self.access
-
+
# Do not remove the property if access was not specified and we are storing in a calendar.
# This ensure that clients that do not preserve the iCalendar property do not cause access
# restrictions to be lost.
@@ -1076,7 +1095,6 @@
if change_scheduletag or not self.destination.scheduleTag:
self.destination.scheduleTag = str(uuid.uuid4())
-
# Handle weak etag compatibility
if config.Scheduling.CalDAV.ScheduleTagCompatibility:
if change_scheduletag:
@@ -1091,12 +1109,11 @@
etags += (hashlib.md5(data + (self.destination.scheduleTag if self.destination.scheduleTag else "")).hexdigest(),)
self.destination.scheduleEtags = etags
else:
- self.destination.scheduleEtags = ()
+ self.destination.scheduleEtags = ()
else:
self.destination.scheduleTag = ""
- self.destination.scheduleEtags = ()
+ self.destination.scheduleEtags = ()
-
if componentToStore is None:
stream = MemoryStream(data)
response = yield self.destination.storeStream(stream)
@@ -1107,10 +1124,11 @@
if self.isScheduleResource:
# Add a response header
- response.headers.setHeader("Schedule-Tag", self.destination.scheduleTag)
+ response.headers.setHeader("Schedule-Tag", self.destination.scheduleTag)
returnValue(response)
+
@inlineCallbacks
def doSourceDelete(self):
# Delete the source resource
@@ -1118,6 +1136,7 @@
log.debug("Source removed %s" % (self.source,))
returnValue(None)
+
@inlineCallbacks
def run(self):
"""
@@ -1142,11 +1161,11 @@
self.destination._associatedTransaction,
)
yield reservation.reserve()
- # UID conflict check - note we do this after reserving the UID to avoid a race condition where two requests
- # try to write the same calendar data to two different resource URIs.
- if not self.isiTIP:
- result, message, rname = yield self.noUIDConflict(self.uid)
- if not result:
+ # UID conflict check - note we do this after reserving the UID to avoid a race condition where two requests
+ # try to write the same calendar data to two different resource URIs.
+ if not self.isiTIP:
+ result, message, rname = yield self.noUIDConflict(self.uid)
+ if not result:
log.err(message)
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
@@ -1161,16 +1180,19 @@
"UID already exists",
))
-
# Preserve private comments
yield self.preservePrivateComments()
-
+
# Fix broken VTODOs
yield self.replaceMissingToDoProperties()
# Handle sharing dropbox normalization
dropboxChanged = (yield self.dropboxPathNormalization())
+ # Pre-process managed attachments
+ if not self.internal_request and not self.attachmentProcessingDone:
+ managed_copied, managed_removed = (yield self.destination.preProcessManagedAttachments(self.calendar))
+
# Default/duplicate alarms
alarmChanged = self.processAlarms()
@@ -1180,13 +1202,13 @@
if implicit_result == ImplicitScheduler.STATUS_ORPHANED_CANCELLED_EVENT:
if reservation:
yield reservation.unreserve()
-
+
returnValue(StatusResponse(responsecode.CREATED, "Resource created but immediately deleted by the server."))
elif implicit_result == ImplicitScheduler.STATUS_ORPHANED_EVENT:
if reservation:
yield reservation.unreserve()
-
+
# Now forcibly delete the event
if self.destination.exists():
yield self.destination.storeRemove(self.request, False, self.destination_uri)
@@ -1215,6 +1237,10 @@
# Do the actual put or copy
response = (yield self.doStore(data_changed))
+ # Post process managed attachments
+ if not self.internal_request and not self.attachmentProcessingDone:
+ yield self.destination.postProcessManagedAttachments(managed_copied, managed_removed)
+
# Must not set ETag in response if data changed
if did_implicit_action or dropboxChanged or alarmChanged:
def _removeEtag(request, response):
@@ -1226,14 +1252,14 @@
if reservation:
yield reservation.unreserve()
-
+
returnValue(response)
-
+
except Exception, err:
if reservation:
yield reservation.unreserve()
-
+
if isinstance(err, InvalidOverriddenInstanceError):
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
@@ -1246,5 +1272,129 @@
MaxInstances.fromString(str(err.max_allowed)),
"Too many recurrence instances",
))
+ elif isinstance(err, AttachmentStoreValidManagedID):
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ (caldav_namespace, "valid-managed-id"),
+ "Invalid Managed-ID parameter in calendar data",
+ ))
else:
raise err
+
+
+ @inlineCallbacks
+ def moveValidation(self):
+ """
+ Do full validation of source and destination calendar data.
+ """
+
+ # Basic validation
+ self.validIfScheduleMatch()
+
+ # Valid resource name check
+ result, message = self.validResourceName()
+ if not result:
+ log.err(message)
+ raise HTTPError(StatusResponse(responsecode.FORBIDDEN, message))
+
+ # Valid collection size check on the destination parent resource
+ result, message = (yield self.validCollectionSize())
+ if not result:
+ log.err(message)
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ customxml.MaxResources(),
+ message,
+ ))
+
+ # Check that moves to shared calendars are OK
+ yield self.validCopyMoveOperation()
+
+ returnValue(None)
+
+
+ @inlineCallbacks
+ def doStoreMove(self):
+
+ # Do move
+ response = (yield self.source.storeMove(self.request, self.destinationparent, self.destination._name))
+ returnValue(response)
+
+
+ @inlineCallbacks
+ def move(self):
+ """
+ Function that does common MOVE behavior.
+
+ @return: a Deferred with a status response result.
+ """
+
+ try:
+ reservation = None
+
+ # Handle all validation operations here.
+ self.calendar = (yield self.source.iCalendarForUser(self.request))
+ yield self.moveValidation()
+
+ # Reservation and UID conflict checking is next.
+
+ # Reserve UID
+ self.destination_index = self.destinationparent.index()
+ reservation = StoreCalendarObjectResource.UIDReservation(
+ self.destination_index, self.source.uid(), self.destination_uri,
+ self.internal_request or self.isiTIP,
+ self.destination._associatedTransaction,
+ )
+ yield reservation.reserve()
+ # UID conflict check - note we do this after reserving the UID to avoid a race condition where two requests
+ # try to write the same calendar data to two different resource URIs.
+ if not self.isiTIP:
+ result, message, rname = yield self.noUIDConflict(self.source.uid())
+ if not result:
+ log.err(message)
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ NoUIDConflict(
+ davxml.HRef.fromString(
+ joinURL(
+ parentForURL(self.destination_uri),
+ rname.encode("utf-8")
+ )
+ )
+ ),
+ "UID already exists",
+ ))
+
+ # Do the actual put or copy
+ response = (yield self.doStoreMove())
+
+ if reservation:
+ yield reservation.unreserve()
+
+ returnValue(response)
+
+ except Exception, err:
+
+ if reservation:
+ yield reservation.unreserve()
+
+ if isinstance(err, InvalidOverriddenInstanceError):
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ (caldav_namespace, "valid-calendar-data"),
+ description="Invalid overridden instance"
+ ))
+ elif isinstance(err, TooManyInstancesError):
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ MaxInstances.fromString(str(err.max_allowed)),
+ "Too many recurrence instances",
+ ))
+ elif isinstance(err, AttachmentStoreValidManagedID):
+ raise HTTPError(ErrorResponse(
+ responsecode.FORBIDDEN,
+ (caldav_namespace, "valid-managed-id"),
+ "Invalid Managed-ID parameter in calendar data",
+ ))
+ else:
+ raise err
Modified: CalendarServer/trunk/twistedcaldav/resource.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/resource.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/resource.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -2551,6 +2551,12 @@
(customxml.calendarserver_namespace, "xmpp-heartbeat-uri"),
(customxml.calendarserver_namespace, "xmpp-server"),
)
+
+ if config.EnableManagedAttachments:
+ existing += (
+ caldavxml.ManagedAttachmentsServerURL.qname(),
+ )
+
return existing
@@ -2579,6 +2585,14 @@
prop = caldavxml.SupportedCalendarComponentSets()
returnValue(prop)
+ elif qname == caldavxml.ManagedAttachmentsServerURL.qname():
+ if config.EnableManagedAttachments:
+ # The HRef is empty - this will force the client to treat all managed attachment URLs
+ # as relative to this server scheme/host.
+ returnValue(caldavxml.ManagedAttachmentsServerURL(element.HRef.fromString("")))
+ else:
+ returnValue(None)
+
result = (yield super(CalendarHomeResource, self).readProperty(property, request))
returnValue(result)
@@ -2596,6 +2610,10 @@
from twistedcaldav.storebridge import DropboxCollection
self._provisionedChildren["dropbox"] = DropboxCollection
+ if config.EnableManagedAttachments:
+ from twistedcaldav.storebridge import AttachmentsCollection
+ self._provisionedChildren["attachments"] = AttachmentsCollection
+
if config.FreeBusyURL.Enabled:
from twistedcaldav.freebusyurl import FreeBusyURLResource
self._provisionedChildren["freebusy"] = FreeBusyURLResource
Modified: CalendarServer/trunk/twistedcaldav/scheduling/implicit.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/scheduling/implicit.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/scheduling/implicit.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -211,6 +211,32 @@
returnValue((self.action != "none", False,))
+ @inlineCallbacks
+ def testAttendeeEvent(self, request, resource, calendar):
+ """
+ Test the existing resource to see if it is an Attendee scheduling object resource.
+
+ @param request: the request object
+ @type request: L{Request}
+ @param resource: the existing resource to test
+ @type resource: L{Resource}
+ """
+
+ self.request = request
+ self.resource = resource
+ self.calendar = calendar
+ self.internal_request = False
+ self.action = "modify"
+
+ is_scheduling_object = (yield self.checkSchedulingObjectResource(resource))
+ if not is_scheduling_object:
+ returnValue(False)
+
+ yield self.checkImplicitState()
+
+ returnValue(self.state in ("attendee", "attendee-missing",))
+
+
def checkValidOrganizer(self):
"""
Make sure the ORGANIZER is allowed to do certain scheduling operations.
@@ -1051,7 +1077,7 @@
if not doITipReply:
log.debug("Implicit - attendee '%s' is updating UID: '%s' but change is not significant" % (self.attendee, self.uid))
- returnValue(None)
+ returnValue(self.return_calendar)
log.debug("Attendee '%s' is allowed to update UID: '%s' with local organizer '%s'" % (self.attendee, self.uid, self.organizer))
elif isinstance(self.organizerAddress, LocalCalendarUser):
Modified: CalendarServer/trunk/twistedcaldav/stdconfig.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/stdconfig.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/stdconfig.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -525,6 +525,8 @@
"EnableWellKnown" : True, # /.well-known resource
"EnableCalendarQueryExtended" : True, # Extended calendar-query REPORT
+ "EnableManagedAttachments" : True, # Support Managed Attachments
+
#
# Non-standard CalDAV extensions
#
@@ -1530,6 +1532,8 @@
compliance += caldavxml.caldav_query_extended_compliance
if configDict.EnableDefaultAlarms:
compliance += caldavxml.caldav_default_alarms_compliance
+ if configDict.EnableManagedAttachments:
+ compliance += caldavxml.caldav_managed_attachments_compliance
else:
compliance = ()
Modified: CalendarServer/trunk/twistedcaldav/storebridge.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/storebridge.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/storebridge.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -1,5 +1,5 @@
# -*- test-case-name: twistedcaldav.test.test_wrapping -*-
-# #
+##
# Copyright (c) 2005-2012 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-# #
+##
import time
import hashlib
@@ -30,23 +30,24 @@
from txdav.xml import element as davxml
from txdav.xml.base import dav_namespace, WebDAVUnknownElement, encodeXMLName
from txdav.base.propertystore.base import PropertyName
-from txdav.caldav.icalendarstore import QuotaExceeded
+from txdav.caldav.icalendarstore import QuotaExceeded, AttachmentStoreFailed, \
+ AttachmentStoreValidManagedID, AttachmentRemoveFailed
from txdav.common.icommondatastore import NoSuchObjectResourceError
from txdav.common.datastore.sql_tables import _BIND_MODE_READ, _BIND_MODE_WRITE
from txdav.idav import PropertyChangeNotAllowedError
-from twext.web2 import responsecode
from twext.web2.stream import ProducerStream, readStream, MemoryStream
from twext.web2.http import HTTPError, StatusResponse, Response
-from twext.web2.http_headers import ETag, MimeType
+from twext.web2.http_headers import ETag, MimeType, MimeDisposition
from twext.web2.dav.http import ErrorResponse, ResponseQueue, MultiStatusResponse
from twext.web2.dav.noneprops import NonePropertyStore
-from twext.web2.dav.resource import TwistedACLInheritable, AccessDeniedError
+from twext.web2.dav.resource import TwistedACLInheritable, AccessDeniedError, \
+ davPrivilegeSet
from twext.web2.dav.util import parentForURL, allDataFromStream, joinURL, davXMLFromStream
from twext.web2.responsecode import (
FORBIDDEN, NO_CONTENT, NOT_FOUND, CREATED, CONFLICT, PRECONDITION_FAILED,
BAD_REQUEST, OK, INSUFFICIENT_STORAGE_SPACE, SERVICE_UNAVAILABLE
-)
+, INTERNAL_SERVER_ERROR)
from twistedcaldav import customxml, carddavxml, caldavxml
from twistedcaldav.cache import CacheStoreNotifier, ResponseCacheMixin, \
@@ -67,6 +68,9 @@
from twistedcaldav.scheduling.caldav.resource import ScheduleInboxResource
from twistedcaldav.scheduling.implicit import ImplicitScheduler
from twistedcaldav.vcard import Component as VCard, InvalidVCardDataError
+from pycalendar.datetime import PyCalendarDateTime
+import uuid
+from twext.web2.filter.location import addLocation
"""
Wrappers to translate between the APIs in L{txdav.caldav.icalendarstore} and
@@ -108,13 +112,13 @@
))
- def set(self, property):
+ def set(self, prop):
try:
- self._newPropertyStore[self._convertKey(property.qname())] = property
+ self._newPropertyStore[self._convertKey(prop.qname())] = prop
except PropertyChangeNotAllowedError:
raise HTTPError(StatusResponse(
FORBIDDEN,
- "Property cannot be changed: %s" % (property.sname(),)
+ "Property cannot be changed: %s" % (prop.sname(),)
))
@@ -146,7 +150,7 @@
fromParent = kw.get('fromParent')
# FIXME: direct unit tests
def wrap(thunk):
- def authAndContinue(self, request):
+ def authAndContinue(self, request, *args, **kwargs):
if permissions:
d = self.authorize(request, permissions)
else:
@@ -159,7 +163,7 @@
lambda parent:
parent.authorize(request, fromParent)
)
- d.addCallback(lambda whatever: thunk(self, request))
+ d.addCallback(lambda whatever: thunk(self, request, *args, **kwargs))
return d
return authAndContinue
return wrap
@@ -244,16 +248,16 @@
@inlineCallbacks
- def readProperty(self, property, request):
- if type(property) is tuple:
- qname = property
+ def readProperty(self, prop, request):
+ if type(prop) is tuple:
+ qname = prop
else:
- qname = property.qname()
+ qname = prop.qname()
if qname == customxml.MaxResources.qname() and config.MaxResourcesPerCollection:
returnValue(customxml.MaxResources.fromString(config.MaxResourcesPerCollection))
- returnValue((yield super(_CommonHomeChildCollectionMixin, self).readProperty(property, request)))
+ returnValue((yield super(_CommonHomeChildCollectionMixin, self).readProperty(prop, request)))
def url(self):
@@ -396,7 +400,7 @@
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
depth = request.headers.getHeader("depth", "infinity")
if depth != "infinity":
@@ -508,7 +512,7 @@
"""
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
# Can not move outside of home or to existing collection
sourceURI = request.uri
@@ -531,7 +535,7 @@
@inlineCallbacks
- def _readGlobalProperty(self, qname, property, request):
+ def _readGlobalProperty(self, qname, prop, request):
if config.EnableBatchUpload and qname == customxml.BulkRequests.qname():
returnValue(customxml.BulkRequests(
@@ -545,7 +549,7 @@
),
))
else:
- result = (yield super(_CommonHomeChildCollectionMixin, self)._readGlobalProperty(qname, property, request))
+ result = (yield super(_CommonHomeChildCollectionMixin, self)._readGlobalProperty(qname, prop, request))
returnValue(result)
@@ -560,7 +564,7 @@
testctag = testctag.split(">", 1)[0]
ctag = (yield self.getInternalSyncToken())
if testctag != ctag:
- raise HTTPError(StatusResponse(responsecode.PRECONDITION_FAILED, "CTag pre-condition failure"))
+ raise HTTPError(StatusResponse(PRECONDITION_FAILED, "CTag pre-condition failure"))
def checkReturnChanged(self, request):
@@ -586,7 +590,7 @@
components = self.componentsFromData(data)
if components is None:
- raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body"))
+ raise HTTPError(StatusResponse(BAD_REQUEST, "Could not parse valid data from request body"))
# Build response
xmlresponses = []
@@ -611,7 +615,7 @@
error = e.response.error
error = (error.namespace, error.name,)
except Exception:
- code = responsecode.BAD_REQUEST
+ code = BAD_REQUEST
if code is None:
@@ -625,7 +629,7 @@
davxml.GETETag.fromString(etag.generate()),
customxml.UID.fromString(component.resourceUID()),
),
- davxml.Status.fromResponseCode(responsecode.OK),
+ davxml.Status.fromResponseCode(OK),
)
)
)
@@ -638,7 +642,7 @@
davxml.GETETag.fromString(etag.generate()),
self.xmlDataElementType().fromTextData(dataChanged),
),
- davxml.Status.fromResponseCode(responsecode.OK),
+ davxml.Status.fromResponseCode(OK),
)
)
)
@@ -692,14 +696,14 @@
# Determine the multiput operation: create, update, delete
href = xmlchild.childOfType(davxml.HRef.qname())
- set = xmlchild.childOfType(davxml.Set.qname())
- prop = set.childOfType(davxml.PropertyContainer.qname()) if set is not None else None
- xmldata_root = prop if prop else set
+ set_items = xmlchild.childOfType(davxml.Set.qname())
+ prop = set_items.childOfType(davxml.PropertyContainer.qname()) if set_items is not None else None
+ xmldata_root = prop if prop else set_items
xmldata = xmldata_root.childOfType(self.xmlDataElementType().qname()) if xmldata_root is not None else None
if href is None:
if xmldata is None:
- raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body without a DAV:Href present"))
+ raise HTTPError(StatusResponse(BAD_REQUEST, "Could not parse valid data from request body without a DAV:Href present"))
# Do privilege check on collection once
if checkedBindPrivelege is None:
@@ -718,10 +722,10 @@
if ifmatch:
ifmatch = str(ifmatch.children[0]) if len(ifmatch.children) == 1 else None
if delete is None:
- if set is None:
- raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body - no set of delete operation"))
+ if set_items is None:
+ raise HTTPError(StatusResponse(BAD_REQUEST, "Could not parse valid data from request body - no set_items of delete operation"))
if xmldata is None:
- raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body for set operation"))
+ raise HTTPError(StatusResponse(BAD_REQUEST, "Could not parse valid data from request body for set_items operation"))
yield self.crudUpdate(request, str(href), xmldata, ifmatch, return_changed, xmlresponses)
updateCount += 1
else:
@@ -786,7 +790,7 @@
error = (error.namespace, error.name,)
except Exception:
- code = responsecode.BAD_REQUEST
+ code = BAD_REQUEST
if code is None:
etag = (yield newchild.etag())
@@ -798,7 +802,7 @@
davxml.GETETag.fromString(etag.generate()),
customxml.UID.fromString(component.resourceUID()),
),
- davxml.Status.fromResponseCode(responsecode.OK),
+ davxml.Status.fromResponseCode(OK),
)
)
)
@@ -825,7 +829,7 @@
updateResource = (yield request.locateResource(href))
if not updateResource.exists():
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
# Check privilege
yield updateResource.authorize(request, (davxml.Write(),))
@@ -833,7 +837,7 @@
# Check if match
etag = (yield updateResource.etag())
if ifmatch and ifmatch != etag.generate():
- raise HTTPError(responsecode.PRECONDITION_FAILED)
+ raise HTTPError(PRECONDITION_FAILED)
yield self.storeResourceData(request, updateResource, href, component, componentdata)
@@ -847,7 +851,7 @@
error = (error.namespace, error.name,)
except Exception:
- code = responsecode.BAD_REQUEST
+ code = BAD_REQUEST
if code is None:
xmlresponses.append(
@@ -857,7 +861,7 @@
davxml.PropertyContainer(
davxml.GETETag.fromString(etag.generate()),
),
- davxml.Status.fromResponseCode(responsecode.OK),
+ davxml.Status.fromResponseCode(OK),
)
)
)
@@ -883,12 +887,12 @@
deleteResource = (yield request.locateResource(href))
if not deleteResource.exists():
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
# Check if match
etag = (yield deleteResource.etag())
if ifmatch and ifmatch != etag.generate():
- raise HTTPError(responsecode.PRECONDITION_FAILED)
+ raise HTTPError(PRECONDITION_FAILED)
yield deleteResource.storeRemove(
request,
@@ -904,13 +908,13 @@
error = (error.namespace, error.name,)
except Exception:
- code = responsecode.BAD_REQUEST
+ code = BAD_REQUEST
if code is None:
xmlresponses.append(
davxml.StatusResponse(
davxml.HRef.fromString(href),
- davxml.Status.fromResponseCode(responsecode.OK),
+ davxml.Status.fromResponseCode(OK),
)
)
else:
@@ -969,7 +973,7 @@
# Validate them first - raise on failure
if not self.validSupportedComponents(components):
- raise HTTPError(StatusResponse(responsecode.FORBIDDEN, "Invalid CALDAV:supported-calendar-component-set"))
+ raise HTTPError(StatusResponse(FORBIDDEN, "Invalid CALDAV:supported-calendar-component-set"))
support_components = ",".join(sorted([comp.upper() for comp in components]))
return maybeDeferred(self._newStoreObject.setSupportedComponents, support_components)
@@ -1062,7 +1066,7 @@
isowner = (yield self.isOwner(request))
accessPrincipal = (yield self.resourceOwnerPrincipal(request))
- for name, uid, type in (yield maybeDeferred(self.index().bruteForceSearch)): # @UnusedVariable
+ for name, _ignore_uid, _ignore_type in (yield maybeDeferred(self.index().bruteForceSearch)):
try:
child = yield request.locateChildResource(self, name)
except TypeError:
@@ -1314,15 +1318,15 @@
return None
- def readProperty(self, property, request):
- if type(property) is tuple:
- qname = property
+ def readProperty(self, prop, request):
+ if type(prop) is tuple:
+ qname = prop
else:
- qname = property.qname()
+ qname = prop.qname()
if qname == (dav_namespace, "resourcetype"):
return succeed(self.resourceType())
- return super(_GetChildHelper, self).readProperty(property, request)
+ return super(_GetChildHelper, self).readProperty(prop, request)
def davComplianceClasses(self):
@@ -1370,7 +1374,7 @@
def resourceType(self,):
- return davxml.ResourceType.dropboxhome # @UndefinedVariable
+ return davxml.ResourceType.dropboxhome # @UndefinedVariable
def listChildren(self):
@@ -1418,7 +1422,7 @@
def resourceType(self):
- return davxml.ResourceType.dropbox # @UndefinedVariable
+ return davxml.ResourceType.dropbox # @UndefinedVariable
@inlineCallbacks
@@ -1428,6 +1432,7 @@
self._newStoreCalendarObject,
attachment,
name,
+ False,
principalCollections=self.principalCollections()
)
self.propagateTransaction(result)
@@ -1619,12 +1624,116 @@
+class AttachmentsCollection(_GetChildHelper):
+ """
+ A collection of all managed attachments, presented as a
+ resource under the user's calendar home.
+ """
+ # FIXME: no direct tests for this class at all.
+
+ def __init__(self, parent, *a, **kw):
+ kw.update(principalCollections=parent.principalCollections())
+ super(AttachmentsCollection, self).__init__(*a, **kw)
+ self.parent = parent
+ self._newStoreHome = self.parent._newStoreHome
+ self.parent.propagateTransaction(self)
+
+
+ def isCollection(self):
+ """
+ It is a collection.
+ """
+ return True
+
+
+ @inlineCallbacks
+ def getChild(self, name):
+ attachmentObject = yield self._newStoreHome.attachmentObjectWithID(name)
+ result = CalendarAttachment(
+ None,
+ attachmentObject,
+ name,
+ True,
+ principalCollections=self.principalCollections()
+ )
+ self.propagateTransaction(result)
+ returnValue(result)
+
+
+ def resourceType(self,):
+ return davxml.ResourceType.collection # @UndefinedVariable
+
+
+ def listChildren(self):
+ return self._newStoreHome.getAllAttachmentNames()
+
+
+ def supportedPrivileges(self, request):
+ # Just DAV standard privileges - no CalDAV ones
+ return succeed(davPrivilegeSet)
+
+
+ def defaultAccessControlList(self):
+ """
+ Only read privileges allowed for managed attachments.
+ """
+ myPrincipal = self.parent.principalForRecord()
+
+ read_privs = (
+ davxml.Privilege(davxml.Read()),
+ davxml.Privilege(davxml.ReadCurrentUserPrivilegeSet()),
+ )
+
+ aces = (
+ # Inheritable access for the resource's associated principal.
+ davxml.ACE(
+ davxml.Principal(davxml.HRef(myPrincipal.principalURL())),
+ davxml.Grant(*read_privs),
+ davxml.Protected(),
+ TwistedACLInheritable(),
+ ),
+ )
+
+ # Give read access to config.ReadPrincipals
+ aces += config.ReadACEs
+
+ # Give all access to config.AdminPrincipals
+ aces += config.AdminACEs
+
+ if config.EnableProxyPrincipals:
+ aces += (
+ # DAV:read/DAV:read-current-user-privilege-set access for this principal's calendar-proxy-read users.
+ davxml.ACE(
+ davxml.Principal(davxml.HRef(joinURL(myPrincipal.principalURL(), "calendar-proxy-read/"))),
+ davxml.Grant(*read_privs),
+ davxml.Protected(),
+ TwistedACLInheritable(),
+ ),
+ # DAV:read/DAV:read-current-user-privilege-set access for this principal's calendar-proxy-write users.
+ davxml.ACE(
+ davxml.Principal(davxml.HRef(joinURL(myPrincipal.principalURL(), "calendar-proxy-write/"))),
+ davxml.Grant(*read_privs),
+ davxml.Protected(),
+ TwistedACLInheritable(),
+ ),
+ )
+
+ return davxml.ACL(*aces)
+
+
+ def accessControlList(self, request, inheritance=True, expanding=False, inherited_aces=None):
+ # Permissions here are fixed, and are not subject to inheritance rules, etc.
+ return succeed(self.defaultAccessControlList())
+
+
+
class CalendarAttachment(_NewStoreFileMetaDataHelper, _GetChildHelper):
- def __init__(self, calendarObject, attachment, attachmentName, **kw):
+ def __init__(self, calendarObject, attachment, attachmentName, managed, **kw):
super(CalendarAttachment, self).__init__(**kw)
- self._newStoreCalendarObject = calendarObject
+ self._newStoreCalendarObject = calendarObject # This can be None for a managed attachment
self._newStoreAttachment = self._newStoreObject = attachment
+ self._managed = managed
self._dead_properties = NonePropertyStore(self)
self.attachmentName = attachmentName
@@ -1633,12 +1742,20 @@
return None
+ def displayName(self):
+ return self.name()
+
+
@requiresPermissions(davxml.WriteContent())
@inlineCallbacks
def http_PUT(self, request):
# FIXME: direct test
# FIXME: CDT test to make sure that permissions are enforced.
+ # Cannot PUT to a managed attachment
+ if self._managed:
+ raise HTTPError(FORBIDDEN)
+
content_type = request.headers.getHeader("content-type")
if content_type is None:
content_type = MimeType("application", "octet-stream")
@@ -1670,7 +1787,7 @@
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
stream = ProducerStream()
class StreamProtocol(Protocol):
@@ -1684,16 +1801,23 @@
self._newStoreAttachment.retrieve(StreamProtocol())
except IOError, e:
log.error("Unable to read attachment: %s, due to: %s" % (self, e,))
- raise HTTPError(responsecode.NOT_FOUND)
- return Response(OK, {"content-type": self.contentType()}, stream)
+ raise HTTPError(NOT_FOUND)
+ headers = {"content-type": self.contentType()}
+ headers["content-disposition"] = MimeDisposition("attachment", params={"filename": self.displayName()})
+ return Response(OK, headers, stream)
+
@requiresPermissions(fromParent=[davxml.Unbind()])
@inlineCallbacks
def http_DELETE(self, request):
+ # Cannot DELETE a managed attachment
+ if self._managed:
+ raise HTTPError(FORBIDDEN)
+
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
yield self._newStoreCalendarObject.removeAttachmentWithName(
self._newStoreAttachment.name()
@@ -1716,7 +1840,106 @@
return False
+ def supportedPrivileges(self, request):
+ # Just DAV standard privileges - no CalDAV ones
+ return succeed(davPrivilegeSet)
+
+ @inlineCallbacks
+ def accessControlList(self, request, *a, **kw):
+ """
+ Special case managed attachments, but not dropbox (which is handled by parent collection).
+ All principals identified as ATTENDEEs on the event for this attachment
+ may read it. Also include proxies of ATTENDEEs. Ignore unknown attendees.
+ """
+
+ originalACL = yield super(CalendarAttachment, self).accessControlList(request, *a, **kw)
+ if not self._managed or not self.exists():
+ returnValue(originalACL)
+ originalACEs = list(originalACL.children)
+
+ # Look at attendees
+ if self._newStoreCalendarObject is None:
+ self._newStoreCalendarObject = (yield self._newStoreAttachment.objectResource())
+
+ cuas = (yield self._newStoreCalendarObject.component()).getAttendees()
+ newACEs = []
+ for calendarUserAddress in cuas:
+ principal = self.principalForCalendarUserAddress(
+ calendarUserAddress
+ )
+ if principal is None:
+ continue
+
+ principalURL = principal.principalURL()
+ privileges = (
+ davxml.Privilege(davxml.Read()),
+ davxml.Privilege(davxml.ReadCurrentUserPrivilegeSet()),
+ )
+ newACEs.append(davxml.ACE(
+ davxml.Principal(davxml.HRef(principalURL)),
+ davxml.Grant(*privileges),
+ davxml.Protected(),
+ ))
+ newACEs.append(davxml.ACE(
+ davxml.Principal(davxml.HRef(joinURL(principalURL, "calendar-proxy-write/"))),
+ davxml.Grant(*privileges),
+ davxml.Protected(),
+ ))
+ newACEs.append(davxml.ACE(
+ davxml.Principal(davxml.HRef(joinURL(principalURL, "calendar-proxy-read/"))),
+ davxml.Grant(*privileges),
+ davxml.Protected(),
+ ))
+
+ # Now also need sharees
+ newACEs.extend((yield self.sharedManagedACEs()))
+
+ returnValue(davxml.ACL(*tuple(originalACEs + newACEs)))
+
+
+ @inlineCallbacks
+ def sharedManagedACEs(self):
+
+ aces = ()
+ calendars = yield self._newStoreCalendarObject._parentCollection.asShared()
+ for calendar in calendars:
+
+ read_privs = (
+ davxml.Privilege(davxml.Read()),
+ davxml.Privilege(davxml.ReadCurrentUserPrivilegeSet()),
+ )
+
+ principal = self.principalForUID(calendar._home.uid())
+ aces += (
+ # Specific access for the resource's associated principal.
+ davxml.ACE(
+ davxml.Principal(davxml.HRef(principal.principalURL())),
+ davxml.Grant(*read_privs),
+ davxml.Protected(),
+ ),
+ )
+
+ if config.EnableProxyPrincipals:
+ aces += (
+ # DAV:read/DAV:read-current-user-privilege-set access for this principal's calendar-proxy-read users.
+ davxml.ACE(
+ davxml.Principal(davxml.HRef(joinURL(principal.principalURL(), "calendar-proxy-read/"))),
+ davxml.Grant(*read_privs),
+ davxml.Protected(),
+ ),
+ # DAV:read/DAV:read-current-user-privilege-set/DAV:write access for this principal's calendar-proxy-write users.
+ davxml.ACE(
+ davxml.Principal(davxml.HRef(joinURL(principal.principalURL(), "calendar-proxy-write/"))),
+ davxml.Grant(*read_privs),
+ davxml.Protected(),
+ ),
+ )
+
+ returnValue(aces)
+
+
+
class NoParent(CalDAVResource):
def http_MKCALENDAR(self, request):
@@ -1731,7 +1954,11 @@
return False
+ def exists(self):
+ return False
+
+
class _CommonObjectResource(_NewStoreFileMetaDataHelper, CalDAVResource, FancyEqMixin):
_componentFromStream = None
@@ -1765,6 +1992,10 @@
return succeed(self._newStoreObject.size())
+ def uid(self):
+ return self._newStoreObject.uid()
+
+
def component(self):
return self._newStoreObject.component()
@@ -1773,11 +2004,11 @@
def render(self, request):
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
output = yield self.component()
- response = Response(200, {}, str(output))
+ response = Response(OK, {}, str(output))
response.headers.setHeader("content-type", self.contentType())
returnValue(response)
@@ -1789,11 +2020,77 @@
"""
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
return self.storeRemove(request, True, request.uri)
+ @inlineCallbacks
+ def http_MOVE(self, request):
+ """
+ MOVE for object resources.
+ """
+
+ # Do some pre-flight checks - must exist, must be move to another
+ # CommonHomeChild in the same Home, destination resource must not exist
+ if not self.exists():
+ log.debug("Resource not found: %s" % (self,))
+ raise HTTPError(NOT_FOUND)
+
+ parent = (yield request.locateResource(parentForURL(request.uri)))
+
+ #
+ # Find the destination resource
+ #
+ destination_uri = request.headers.getHeader("destination")
+ overwrite = request.headers.getHeader("overwrite", True)
+
+ if not destination_uri:
+ msg = "No destination header in MOVE request."
+ log.err(msg)
+ raise HTTPError(StatusResponse(BAD_REQUEST, msg))
+
+ destination = (yield request.locateResource(destination_uri))
+ if destination is None:
+ msg = "Destination of MOVE does not exist: %s" % (destination_uri,)
+ log.debug(msg)
+ raise HTTPError(StatusResponse(BAD_REQUEST, msg))
+ if destination.exists():
+ if overwrite:
+ msg = "Cannot overwrite existing resource with a MOVE"
+ log.debug(msg)
+ raise HTTPError(StatusResponse(FORBIDDEN, msg))
+ else:
+ msg = "Cannot MOVE to existing resource without overwrite flag enabled"
+ log.debug(msg)
+ raise HTTPError(StatusResponse(PRECONDITION_FAILED, msg))
+
+ # Check for parent calendar collection
+ destination_uri = urlsplit(destination_uri)[2]
+ destinationparent = (yield request.locateResource(parentForURL(destination_uri)))
+ if not isinstance(destinationparent, _CommonHomeChildCollectionMixin):
+ msg = "Destination of MOVE is not valid: %s" % (destination_uri,)
+ log.debug(msg)
+ raise HTTPError(StatusResponse(FORBIDDEN, msg))
+ if parentForURL(parentForURL(destination_uri)) != parentForURL(parentForURL(request.uri)):
+ msg = "Can only MOVE within the same home collection: %s" % (destination_uri,)
+ log.debug(msg)
+ raise HTTPError(StatusResponse(FORBIDDEN, msg))
+
+ #
+ # Check authentication and access controls
+ #
+ yield parent.authorize(request, (davxml.Unbind(),))
+ yield destinationparent.authorize(request, (davxml.Bind(),))
+
+ # May need to add a location header
+ addLocation(request, destination_uri)
+
+ storer = self.storeResource(request, parent, destination, destination_uri, destinationparent, True, None)
+ result = (yield storer.move())
+ returnValue(result)
+
+
def http_PROPPATCH(self, request):
"""
No dead properties allowed on object resources.
@@ -1804,13 +2101,25 @@
return FORBIDDEN
+ def storeResource(self, request, parent, destination, destination_uri, destination_parent, hasSource, component):
+ """
+ Create the appropriate StoreXXX class for storing of data.
+ """
+ raise NotImplementedError
+
+
@inlineCallbacks
def storeStream(self, stream):
# FIXME: direct tests
- component = self._componentFromStream(
- (yield allDataFromStream(stream))
- )
+ component = self._componentFromStream((yield allDataFromStream(stream)))
+ result = (yield self.storeComponent(component))
+ returnValue(result)
+
+
+ @inlineCallbacks
+ def storeComponent(self, component):
+
if self._newStoreObject:
yield self._newStoreObject.setComponent(component)
returnValue(NO_CONTENT)
@@ -1826,20 +2135,25 @@
@inlineCallbacks
- def storeComponent(self, component):
+ def storeMove(self, request, destinationparent, destination_name):
+ """
+ Move this object to a different parent.
- if self._newStoreObject:
- yield self._newStoreObject.setComponent(component)
- returnValue(NO_CONTENT)
- else:
- self._newStoreObject = (yield self._newStoreParent.createObjectResourceWithName(
- self.name(), component, self._metadata
- ))
+ @param request:
+ @type request: L{twext.web2.iweb.IRequest}
+ @param destinationparent: Parent to move to
+ @type destinationparent: L{CommonHomeChild}
+ @param destination_name: name of new resource
+ @type destination_name: C{str}
+ """
- # Re-initialize to get stuff setup again now we have no object
- self._initializeWithObject(self._newStoreObject, self._newStoreParent)
+ try:
+ yield self._newStoreObject.moveTo(destinationparent._newStoreObject, destination_name)
+ except Exception, e:
+ log.err(e)
+ raise HTTPError(INTERNAL_SERVER_ERROR)
- returnValue(CREATED)
+ returnValue(CREATED)
@inlineCallbacks
@@ -1865,7 +2179,7 @@
self._newStoreObject.name()
)
except NoSuchObjectResourceError:
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
# Re-initialize to get stuff setup again now we have no object
self._initializeWithObject(None, self._newStoreParent)
@@ -1873,7 +2187,28 @@
returnValue(NO_CONTENT)
+ @inlineCallbacks
+ def preProcessManagedAttachments(self, calendar):
+ # If store object exists pass through, otherwise use underlying store ManagedAttachments object to determine changes
+ if self._newStoreObject:
+ copied, removed = (yield self._newStoreObject.updatingResourceCheckAttachments(calendar))
+ else:
+ copied = (yield self._newStoreParent.creatingResourceCheckAttachments(calendar))
+ removed = None
+ returnValue((copied, removed,))
+
+
+ @inlineCallbacks
+ def postProcessManagedAttachments(self, copied, removed):
+ # Pass through directly to store object
+ if copied:
+ yield self._newStoreObject.copyResourceAttachments(copied)
+ if removed:
+ yield self._newStoreObject.removeResourceAttachments(removed)
+
+
+
class _MetadataProperty(object):
"""
A python property which can be set either on a _newStoreObject or on some
@@ -1884,7 +2219,7 @@
self.name = name
- def __get__(self, oself, type=None):
+ def __get__(self, oself, ptype=None):
if oself._newStoreObject:
return getattr(oself._newStoreObject, self.name)
else:
@@ -1979,6 +2314,23 @@
raise HTTPError(PRECONDITION_FAILED)
+ def storeResource(self, request, parent, destination, destination_uri, destination_parent, hasSource, component, attachmentProcessingDone=False):
+ return StoreCalendarObjectResource(
+ request=request,
+ source=self if hasSource else None,
+ source_uri=request.uri if hasSource else None,
+ sourceparent=parent if hasSource else None,
+ sourcecal=hasSource,
+ deletesource=hasSource,
+ destination=destination,
+ destination_uri=destination_uri,
+ destinationparent=destination_parent,
+ destinationcal=True,
+ calendar=component,
+ attachmentProcessingDone=attachmentProcessingDone,
+ )
+
+
@inlineCallbacks
def storeRemove(self, request, implicitly, where):
"""
@@ -2062,7 +2414,173 @@
returnValue(NO_CONTENT)
+ @requiresPermissions(davxml.WriteContent())
+ @inlineCallbacks
+ def POST_handler_attachment(self, request, action):
+ """
+ Handle a managed attachments request on the calendar object resource.
+ @param request: HTTP request object
+ @type request: L{Request}
+ @param action: The request-URI 'action' argument
+ @type action: C{str}
+
+ @return: an HTTP response
+ """
+
+ # Resource must exist to allow attachment operations
+ if not self.exists():
+ raise HTTPError(NOT_FOUND)
+
+ def _getRIDs():
+ rids = request.args.get("rid")
+ if rids is not None:
+ rids = rids[0].split(",")
+ try:
+ rids = [PyCalendarDateTime.parseText(rid) if rid != "M" else None for rid in rids]
+ except ValueError:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-rid-parameter",),
+ "The rid parameter in the request-URI contains an invalid value",
+ ))
+ return rids
+
+ def _getMID():
+ mid = request.args.get("managed-id")
+ if mid is None:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-managed-id-parameter",),
+ "The managed-id parameter is missing from the request-URI",
+ ))
+ return mid[0]
+
+ def _getContentInfo():
+ content_type = request.headers.getHeader("content-type")
+ if content_type is None:
+ content_type = MimeType("application", "octet-stream")
+ content_disposition = request.headers.getHeader("content-disposition")
+ if content_disposition is None or "filename" not in content_disposition.params:
+ filename = str(uuid.uuid4())
+ else:
+ filename = content_disposition.params["filename"]
+ return content_type, filename
+
+ valid_preconditions = {
+ "attachment-add": "valid-attachment-add",
+ "attachment-update": "valid-attachment-update",
+ "attachment-remove": "valid-attachment-remove",
+ }
+
+ # Only allow organizers to manipulate managed attachments for now
+ calendar = (yield self.iCalendarForUser(request))
+ scheduler = ImplicitScheduler()
+ is_attendee = (yield scheduler.testAttendeeEvent(request, self, calendar,))
+ if is_attendee and action in valid_preconditions:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, valid_preconditions[action],),
+ "Attendees are not allowed to manipulate managed attachments",
+ ))
+
+ # Dispatch to store object
+ if action == "attachment-add":
+
+ # Add an attachment property
+ rids = _getRIDs()
+ content_type, filename = _getContentInfo()
+ try:
+ attachment, location = (yield self._newStoreObject.addAttachment(rids, content_type, filename, request.stream, calendar))
+ except AttachmentStoreFailed:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-attachment-add",),
+ "Could not store the supplied attachment",
+ ))
+ except QuotaExceeded:
+ raise HTTPError(ErrorResponse(
+ INSUFFICIENT_STORAGE_SPACE,
+ (dav_namespace, "quota-not-exceeded"),
+ "Could not store the supplied attachment because user quota would be exceeded",
+ ))
+
+ post_result = Response(CREATED)
+
+ elif action == "attachment-update":
+ mid = _getMID()
+ content_type, filename = _getContentInfo()
+ try:
+ attachment, location = (yield self._newStoreObject.updateAttachment(mid, content_type, filename, request.stream, calendar))
+ except AttachmentStoreValidManagedID:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-managed-id-parameter",),
+ "The managed-id parameter does not refer to an attachment in this calendar object resource",
+ ))
+ except AttachmentStoreFailed:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-attachment-update",),
+ "Could not store the supplied attachment",
+ ))
+ except QuotaExceeded:
+ raise HTTPError(ErrorResponse(
+ INSUFFICIENT_STORAGE_SPACE,
+ (dav_namespace, "quota-not-exceeded"),
+ "Could not store the supplied attachment because user quota would be exceeded",
+ ))
+
+ post_result = Response(NO_CONTENT)
+
+ elif action == "attachment-remove":
+ rids = _getRIDs()
+ mid = _getMID()
+ try:
+ yield self._newStoreObject.removeAttachment(rids, mid, calendar)
+ except AttachmentStoreValidManagedID:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-managed-id-parameter",),
+ "The managed-id parameter does not refer to an attachment in this calendar object resource",
+ ))
+ except AttachmentRemoveFailed:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-attachment-remove",),
+ "Could not remove the specified attachment",
+ ))
+
+ post_result = Response(NO_CONTENT)
+
+ else:
+ raise HTTPError(ErrorResponse(
+ FORBIDDEN,
+ (caldav_namespace, "valid-action-parameter",),
+ "The action parameter in the request-URI is not valid",
+ ))
+
+ # TODO: The storing piece here should go away once we do implicit in the store
+ # Store new resource
+ parent = (yield request.locateResource(parentForURL(request.path)))
+ storer = self.storeResource(request, None, self, request.uri, parent, False, calendar, attachmentProcessingDone=True)
+ result = (yield storer.run())
+
+ # Look for Prefer header
+ if "return-representation" in request.headers.getHeader("prefer", {}) and result.code / 100 == 2:
+ result = (yield self.render(request))
+ result.code = OK
+ result.headers.setHeader("content-location", request.path)
+ else:
+ result = post_result
+ if action == "attachment-add":
+ result.headers.setHeader("location", location)
+ if action in ("attachment-add", "attachment-update",):
+ result.headers.addRawHeader("Cal-Managed-ID", attachment.dropboxID())
+ returnValue(result)
+
+
+
class AddressBookCollectionResource(_CommonHomeChildCollectionMixin, CalDAVResource):
"""
Wrapper around a L{txdav.carddav.iaddressbook.IAddressBook}.
@@ -2234,7 +2752,23 @@
vCard = _CommonObjectResource.component
+ def storeResource(self, request, parent, destination, destination_uri, destination_parent, hasSource, component):
+ return StoreAddressObjectResource(
+ request=request,
+ source=self if hasSource else None,
+ source_uri=request.uri if hasSource else None,
+ sourceparent=parent if hasSource else None,
+ sourceadbk=hasSource,
+ deletesource=hasSource,
+ destination=destination,
+ destination_uri=destination_uri,
+ destinationparent=destination_parent,
+ destinationadbk=True,
+ vcard=component,
+ )
+
+
class _NotificationChildHelper(object):
"""
Methods for things which are like notification objects.
@@ -2398,16 +2932,16 @@
@inlineCallbacks
- def readProperty(self, property, request):
- if type(property) is tuple:
- qname = property
+ def readProperty(self, prop, request):
+ if type(prop) is tuple:
+ qname = prop
else:
- qname = property.qname()
+ qname = prop.qname()
if qname == customxml.NotificationType.qname():
returnValue(self._newStoreObject.xmlType())
- returnValue((yield super(StoreNotificationObjectFile, self).readProperty(property, request)))
+ returnValue((yield super(StoreNotificationObjectFile, self).readProperty(prop, request)))
def isCollection(self):
@@ -2428,7 +2962,7 @@
def http_GET(self, request):
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
returnValue(
Response(OK, {"content-type": self.contentType()},
@@ -2443,7 +2977,7 @@
"""
if not self.exists():
log.debug("Resource not found: %s" % (self,))
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
return self.storeRemove(request, request.uri)
@@ -2476,6 +3010,6 @@
except MemcacheLockTimeoutError:
raise HTTPError(StatusResponse(CONFLICT, "Resource: %s currently in use on the server." % (where,)))
except NoSuchObjectResourceError:
- raise HTTPError(responsecode.NOT_FOUND)
+ raise HTTPError(NOT_FOUND)
returnValue(NO_CONTENT)
Modified: CalendarServer/trunk/twistedcaldav/test/test_icalendar.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_icalendar.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/twistedcaldav/test/test_icalendar.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -31,6 +31,7 @@
from twistedcaldav.ical import iCalendarProductID
from pycalendar.duration import PyCalendarDuration
from twistedcaldav.dateops import normalizeForExpand
+from pycalendar.value import PyCalendarValue
class iCalendar (twistedcaldav.test.util.TestCase):
"""
@@ -651,7 +652,7 @@
self.assertEqual(end, PyCalendarDateTime(2004, 11, 27))
break
- #test_component_timerange.todo = "recurrence expansion should give us no end date here"
+ # test_component_timerange.todo = "recurrence expansion should give us no end date here"
def test_parse_date(self):
@@ -693,7 +694,7 @@
"""
self.assertEqual(PyCalendarDuration.parseText("P15DT5H0M20S"), PyCalendarDuration(days=15, hours=5, minutes=0, seconds=20))
self.assertEqual(PyCalendarDuration.parseText("+P15DT5H0M20S"), PyCalendarDuration(days=15, hours=5, minutes=0, seconds=20))
- self.assertEqual(PyCalendarDuration.parseText("-P15DT5H0M20S"), PyCalendarDuration(days=-15, hours=-5, minutes=0, seconds=-20))
+ self.assertEqual(PyCalendarDuration.parseText("-P15DT5H0M20S"), PyCalendarDuration(days=15 * -1, hours=5 * -1, minutes=0, seconds=20 * -1))
self.assertEqual(PyCalendarDuration.parseText("P7W"), PyCalendarDuration(weeks=7))
@@ -1152,6 +1153,35 @@
self.assertEqual(result, str(component).replace("\r", ""))
+ def test_add_property_with_valuetype(self):
+ data = """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20071114T000000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+"""
+ result = """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20071114T000000Z
+ATTACH;VALUE=BINARY:foobar
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""".replace("\n", "\r\n")
+
+ component = Component.fromString(data)
+ vevent = component.mainComponent()
+ vevent.addProperty(Property("ATTACH", "foobar", valuetype=PyCalendarValue.VALUETYPE_BINARY))
+ self.assertEqual(str(component), result)
+
+
def test_add_property(self):
data = (
# Simple component
@@ -8170,3 +8200,838 @@
for cuaddr, result in data:
new_cuaddr = normalizeCUAddress(cuaddr, lookupFunction, None, toUUID=True)
self.assertEquals(new_cuaddr, result)
+
+
+ def test_hasPropertyWithParameterMatch(self):
+
+ data = (
+ (
+ "1.1 - nothing to match, with param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ False,
+ ),
+ (
+ "1.2 - nothing to match, without param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", None, False,
+ False,
+ ),
+ (
+ "1.3 - match with param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ True,
+ ),
+ (
+ "1.4 - match without param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", None, False,
+ True,
+ ),
+ (
+ "1.5 - simple not match with param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ False,
+ ),
+ (
+ "1.6 - simple match with default param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", True,
+ True,
+ ),
+ (
+ "2.1 - overrides no match with param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ False,
+ ),
+ (
+ "2.2 - overrides no match without param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", None, False,
+ False,
+ ),
+ (
+ "2.3 - overrides match in all with param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ True,
+ ),
+ (
+ "2.4 - overrides match in all without param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", None, False,
+ True,
+ ),
+ (
+ "2.5 - match in one override with param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ True,
+ ),
+ (
+ "2.6 - match in one override without param value",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", None, False,
+ True,
+ ),
+ )
+
+ for title, calendar, property, param_name, param_value, param_default, result in data:
+ ical = Component.fromString(calendar)
+ has_property = ical.hasPropertyWithParameterMatch(property, param_name, param_value, param_default)
+ self.assertEqual(has_property, result, "Failed has property: %s" % (title,))
+
+
+ def test_replaceAllPropertiesWithParameterMatch(self):
+
+ data = (
+ (
+ "1.1 - nothing to change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MANAGED-ID": "1", "MTAG": "2"}),
+ "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "1.2 - simple change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MANAGED-ID": "1", "MTAG": "2"}),
+ "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=1;MTAG=2:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "1.3 - simple no change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MANAGED-ID": "1", "MTAG": "2"}),
+ "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "1.4 - simple change default",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MTAG": "2"}),
+ "MANAGED-ID", "1", True,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+ATTACH;MTAG=2:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "2.1 - overrides nothing to change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MANAGED-ID": "1", "MTAG": "2"}),
+ "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "2.2 - overrides change in all",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MANAGED-ID": "1", "MTAG": "2"}),
+ "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=1;MTAG=2:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=1;MTAG=2:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "2.3 - overrides change one",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ Property("ATTACH", "http://example.com/attachment", {"MANAGED-ID": "1", "MTAG": "2"}),
+ "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=1;MTAG=2:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ )
+
+ for title, calendar, property, param_name, param_value, param_default, result in data:
+ ical = Component.fromString(calendar)
+ ical.replaceAllPropertiesWithParameterMatch(property, param_name, param_value, param_default)
+ self.assertEqual(str(ical), result.replace("\n", "\r\n"), "Failed replace property: %s" % (title,))
+
+
+ def test_removeAllPropertiesWithParameterMatch(self):
+
+ data = (
+ (
+ "1.1 - nothing to change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "1.2 - simple change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "1.3 - simple no change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "1.4 - simple change default",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", True,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "2.1 - overrides nothing to change",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "2.2 - overrides change in all",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ (
+ "2.3 - overrides change one",
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=1;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ "ATTACH", "MANAGED-ID", "1", False,
+ """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
+BEGIN:VEVENT
+UID:12345-67890-1
+DTSTART:20090101T080000Z
+DTEND:20090101T090000Z
+ATTACH;MANAGED-ID=3;MTAG=1:http://example.com/attachment
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+RRULE:FREQ=DAILY
+END:VEVENT
+BEGIN:VEVENT
+UID:12345-67890-1
+RECURRENCE-ID:20090102T080000Z
+DTSTART:20090102T080000Z
+DTEND:20090102T090000Z
+ATTACH;MANAGED-ID=2;MTAG=1:http://example.com/attachment
+DTSTAMP:20080601T120000Z
+END:VEVENT
+END:VCALENDAR
+""",
+ ),
+ )
+
+ for title, calendar, property, param_name, param_value, param_default, result in data:
+ ical = Component.fromString(calendar)
+ ical.removeAllPropertiesWithParameterMatch(property, param_name, param_value, param_default)
+ self.assertEqual(str(ical), result.replace("\n", "\r\n"), "Failed remove property: %s" % (title,))
Modified: CalendarServer/trunk/txdav/caldav/datastore/file.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/datastore/file.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/datastore/file.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -50,7 +50,7 @@
from txdav.caldav.icalendarstore import ICalendar, ICalendarObject
from txdav.caldav.icalendarstore import ICalendarHome
-from txdav.caldav.datastore.index_file import Index as OldIndex,\
+from txdav.caldav.datastore.index_file import Index as OldIndex, \
IndexSchedule as OldInboxIndex
from txdav.caldav.datastore.util import (
validateCalendarComponent, dropboxIDFromCalendarObject, CalendarObjectBase,
@@ -90,7 +90,6 @@
self._childClass = Calendar
-
createCalendarWithName = CommonHome.createChildWithName
removeCalendarWithName = CommonHome.removeChildWithName
@@ -129,7 +128,7 @@
@inlineCallbacks
def hasCalendarResourceUIDSomewhereElse(self, uid, ok_object, type):
-
+
objectResources = (yield self.objectResourcesWithUID(uid, ("inbox",)))
for objectResource in objectResources:
if ok_object and objectResource._path == ok_object._path:
@@ -137,20 +136,22 @@
matched_type = "schedule" if objectResource.isScheduleObject else "calendar"
if type == "schedule" or matched_type == "schedule":
returnValue(True)
-
+
returnValue(False)
+
@inlineCallbacks
def getCalendarResourcesForUID(self, uid, allow_shared=False):
-
+
results = []
objectResources = (yield self.objectResourcesWithUID(uid, ("inbox",)))
for objectResource in objectResources:
if allow_shared or objectResource._parentCollection.owned():
results.append(objectResource)
-
+
returnValue(results)
+
@inlineCallbacks
def calendarObjectWithDropboxID(self, dropboxID):
"""
@@ -186,18 +187,19 @@
defaultCal = self.createCalendarWithName("calendar")
props = defaultCal.properties()
props[PropertyName(*ScheduleCalendarTransp.qname())] = ScheduleCalendarTransp(Opaque())
-
+
# Check whether components type must be separate
if config.RestrictCalendarsToOneComponentType:
defaultCal.setSupportedComponents("VEVENT")
-
+
# Default tasks
defaultTasks = self.createCalendarWithName("tasks")
props = defaultTasks.properties()
defaultTasks.setSupportedComponents("VTODO")
-
+
self.createCalendarWithName("inbox")
+
def ensureDefaultCalendarsExist(self):
"""
Double check that we have calendars supporting at least VEVENT and VTODO,
@@ -222,10 +224,12 @@
newname = str(uuid.uuid4())
newcal = self.createCalendarWithName(newname)
newcal.setSupportedComponents(support_component)
-
+
_requireCalendarWithType("VEVENT", "calendar")
_requireCalendarWithType("VTODO", "tasks")
+
+
class Calendar(CommonHomeChild):
"""
File-based implementation of L{ICalendar}.
@@ -270,7 +274,6 @@
# TODO: implement me.
raise NotImplementedError()
-
ownerCalendarHome = CommonHomeChild.ownerHome
viewerCalendarHome = CommonHomeChild.viewerHome
calendarObjects = CommonHomeChild.objectResources
@@ -292,17 +295,19 @@
Update the private property with the supported components. Technically this should only happen once
on collection creation, but for migration we may need to change after the fact - hence a separate api.
"""
-
+
pname = PropertyName.fromElement(customxml.TwistedCalendarSupportedComponents)
if supported_components:
self.properties()[pname] = customxml.TwistedCalendarSupportedComponents.fromString(supported_components)
elif pname in self.properties():
del self.properties()[pname]
+
def getSupportedComponents(self):
result = str(self.properties().get(PropertyName.fromElement(customxml.TwistedCalendarSupportedComponents), ""))
return result if result else None
+
def isSupportedComponent(self, componentType):
supported = self.getSupportedComponents()
if supported:
@@ -310,6 +315,7 @@
else:
return True
+
def initPropertyStore(self, props):
# Setup peruser special properties
props.setSpecialProperties(
@@ -323,61 +329,79 @@
),
)
+
def contentType(self):
"""
The content type of Calendar objects is text/calendar.
"""
return MimeType.fromString("text/calendar; charset=utf-8")
+
def splitCollectionByComponentTypes(self):
"""
If the calendar contains iCalendar data with different component types, then split it into separate collections
each containing only one component type. When doing this make sure properties and sharing state are preserved
on any new calendars created.
"""
-
+
# TODO: implement this for filestore
pass
+
def _countComponentTypes(self):
"""
Count each component type in this calendar.
-
- @return: a C{tuple} of C{tuple} containing the component type name and count.
+
+ @return: a C{tuple} of C{tuple} containing the component type name and count.
"""
rows = self._index._oldIndex.componentTypeCounts()
result = tuple([(componentType, componentCount) for componentType, componentCount in sorted(rows, key=lambda x:x[0])])
return result
+
def _splitComponentType(self, component):
"""
Create a new calendar and move all components of the specified component type into the new one.
Make sure properties and sharing state is preserved on the new calendar.
-
+
@param component: Component type to split out
@type component: C{str}
"""
-
+
# TODO: implement this for filestore
pass
+
def _transferSharingDetails(self, newcalendar, component):
"""
If the current calendar is shared, make the new calendar shared in the same way, but tweak the name.
"""
-
+
# TODO: implement this for filestore
pass
-
+
+
def _transferCalendarObjects(self, newcalendar, component):
"""
Move all calendar components of the specified type to the specified calendar.
"""
-
+
# TODO: implement this for filestore
pass
+
+ def creatingResourceCheckAttachments(self, component):
+ """
+ When component data is created or changed we need to look for changes related to managed attachments.
+
+ @param component: the new calendar data
+ @type component: L{Component}
+ """
+ return succeed(None)
+
+
+
class CalendarObject(CommonObjectResource, CalendarObjectBase):
"""
@ivar _path: The path of the .ics file on disk
@@ -389,7 +413,7 @@
def __init__(self, name, calendar, metadata=None):
super(CalendarObject, self).__init__(name, calendar)
self._attachments = {}
-
+
if metadata is not None:
self.accessMode = metadata.get("accessMode", "")
self.isScheduleObject = metadata.get("isScheduleObject", False)
@@ -428,7 +452,7 @@
if self._path.exists():
backup = hidden(self._path.temporarySibling())
self._path.moveTo(backup)
-
+
fh = self._path.open("w")
try:
# FIXME: concurrency problem; if this write is interrupted
@@ -476,7 +500,7 @@
if unfixed:
self.log_error("Calendar data at %s had unfixable problems:\n %s" % (self._path.path, "\n ".join(unfixed),))
-
+
if fixed:
self.log_error("Calendar data at %s had fixable problems:\n %s" % (self._path.path, "\n ".join(fixed),))
@@ -516,35 +540,41 @@
"File corruption detected (improper start) in file: %s"
% (self._path.path,)
)
-
+
self._objectText = text
return text
+
def uid(self):
if not hasattr(self, "_uid"):
self._uid = self.component().resourceUID()
return self._uid
+
def componentType(self):
if not hasattr(self, "_componentType"):
self._componentType = self.component().mainType()
return self._componentType
+
def organizer(self):
return self.component().getOrganizer()
+
def getMetadata(self):
metadata = {}
- metadata["accessMode"] = self.accessMode
+ metadata["accessMode"] = self.accessMode
metadata["isScheduleObject"] = self.isScheduleObject
metadata["scheduleTag"] = self.scheduleTag
metadata["scheduleEtags"] = self.scheduleEtags
metadata["hasPrivateComment"] = self.hasPrivateComment
return metadata
+
def _get_accessMode(self):
return str(self.properties().get(PropertyName.fromElement(customxml.TwistedCalendarAccessProperty), ""))
+
def _set_accessMode(self, value):
pname = PropertyName.fromElement(customxml.TwistedCalendarAccessProperty)
if value:
@@ -564,6 +594,7 @@
prop = str(prop) == "true"
return prop
+
def _set_isScheduleObject(self, value):
pname = PropertyName.fromElement(customxml.TwistedSchedulingObjectResource)
if value is not None:
@@ -576,6 +607,7 @@
def _get_scheduleTag(self):
return str(self.properties().get(PropertyName.fromElement(caldavxml.ScheduleTag), ""))
+
def _set_scheduleTag(self, value):
pname = PropertyName.fromElement(caldavxml.ScheduleTag)
if value:
@@ -588,6 +620,7 @@
def _get_scheduleEtags(self):
return tuple([str(etag) for etag in self.properties().get(PropertyName.fromElement(customxml.TwistedScheduleMatchETags), customxml.TwistedScheduleMatchETags()).children])
+
def _set_scheduleEtags(self, value):
if value:
etags = [davxml.GETETag.fromString(etag) for etag in value]
@@ -603,6 +636,7 @@
def _get_hasPrivateComment(self):
return PropertyName.fromElement(customxml.TwistedCalendarHasPrivateCommentsProperty) in self.properties()
+
def _set_hasPrivateComment(self, value):
pname = PropertyName.fromElement(customxml.TwistedCalendarHasPrivateCommentsProperty)
if value:
@@ -612,6 +646,19 @@
hasPrivateComment = property(_get_hasPrivateComment, _set_hasPrivateComment)
+
+ def addAttachment(self, pathpattern, rids, content_type, filename, stream):
+ raise NotImplementedError
+
+
+ def updateAttachment(self, pathpattern, managed_id, content_type, filename, stream):
+ raise NotImplementedError
+
+
+ def removeAttachment(self, rids, managed_id):
+ raise NotImplementedError
+
+
@inlineCallbacks
def createAttachmentWithName(self, name):
"""
@@ -707,6 +754,7 @@
),
)
+
# IDataStoreObject
def contentType(self):
"""
@@ -718,7 +766,7 @@
class AttachmentStorageTransport(StorageTransportBase):
- def __init__(self, attachment, contentType):
+ def __init__(self, attachment, contentType, dispositionName):
"""
Initialize this L{AttachmentStorageTransport} and open its file for
writing.
@@ -727,7 +775,7 @@
@type attachment: L{Attachment}
"""
super(AttachmentStorageTransport, self).__init__(
- attachment, contentType)
+ attachment, contentType, dispositionName)
self._path = self._attachment._path.temporarySibling()
self._file = self._path.open("w")
@@ -795,8 +843,8 @@
return propStoreClass(uid, lambda: self._path)
- def store(self, contentType):
- return AttachmentStorageTransport(self, contentType)
+ def store(self, contentType, dispositionName=None):
+ return AttachmentStorageTransport(self, contentType, dispositionName)
def retrieve(self, protocol):
@@ -861,6 +909,7 @@
yield calendarObject
+
class Invites(object):
#
# OK, here's where we get ugly.
Modified: CalendarServer/trunk/txdav/caldav/datastore/sql.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/datastore/sql.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/datastore/sql.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -25,11 +25,18 @@
"CalendarObject",
]
+from twext.enterprise.dal.syntax import Delete
+from twext.enterprise.dal.syntax import Insert
+from twext.enterprise.dal.syntax import Len
+from twext.enterprise.dal.syntax import Parameter
+from twext.enterprise.dal.syntax import Select, Count, ColumnSyntax
+from twext.enterprise.dal.syntax import Update
+from twext.enterprise.dal.syntax import utcNowSQL
from twext.python.clsprop import classproperty
+from twext.python.filepath import CachingFilePath
from twext.python.vcomponent import VComponent
-from txdav.xml.rfc2518 import ResourceType
from twext.web2.http_headers import MimeType, generateContentType
-from twext.python.filepath import CachingFilePath
+from twext.web2.stream import readStream
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.python import hashlib
@@ -37,51 +44,44 @@
from twistedcaldav import caldavxml, customxml
from twistedcaldav.caldavxml import ScheduleCalendarTransp, Opaque
from twistedcaldav.config import config
-from twistedcaldav.dateops import normalizeForIndex, datetimeMktime,\
+from twistedcaldav.dateops import normalizeForIndex, datetimeMktime, \
parseSQLTimestamp, pyCalendarTodatetime, parseSQLDateToPyCalendar
-from twistedcaldav.ical import Component, InvalidICalendarDataError
+from twistedcaldav.ical import Component, InvalidICalendarDataError, Property
from twistedcaldav.instance import InvalidOverriddenInstanceError
from twistedcaldav.memcacher import Memcacher
from txdav.base.propertystore.base import PropertyName
-from txdav.caldav.datastore.util import validateCalendarComponent,\
+from txdav.caldav.datastore.util import AttachmentRetrievalTransport
+from txdav.caldav.datastore.util import CalendarObjectBase
+from txdav.caldav.datastore.util import StorageTransportBase
+from txdav.caldav.datastore.util import validateCalendarComponent, \
dropboxIDFromCalendarObject
-from txdav.caldav.icalendarstore import ICalendarHome, ICalendar, ICalendarObject,\
- IAttachment
-from txdav.common.datastore.sql import CommonHome, CommonHomeChild,\
+from txdav.caldav.icalendarstore import ICalendarHome, ICalendar, ICalendarObject, \
+ IAttachment, AttachmentStoreFailed, AttachmentStoreValidManagedID
+from txdav.caldav.icalendarstore import QuotaExceeded
+from txdav.common.datastore.sql import CommonHome, CommonHomeChild, \
CommonObjectResource, ECALENDARTYPE
-from txdav.common.datastore.sql_legacy import PostgresLegacyIndexEmulator,\
+from txdav.common.datastore.sql_legacy import PostgresLegacyIndexEmulator, \
PostgresLegacyInboxIndexEmulator
-from txdav.common.datastore.sql_tables import CALENDAR_TABLE,\
- CALENDAR_BIND_TABLE, CALENDAR_OBJECT_REVISIONS_TABLE, CALENDAR_OBJECT_TABLE,\
- _ATTACHMENTS_MODE_NONE, _ATTACHMENTS_MODE_READ, _ATTACHMENTS_MODE_WRITE,\
- CALENDAR_HOME_TABLE, CALENDAR_HOME_METADATA_TABLE,\
- CALENDAR_AND_CALENDAR_BIND, CALENDAR_OBJECT_REVISIONS_AND_BIND_TABLE,\
+from txdav.common.datastore.sql_tables import CALENDAR_TABLE, \
+ CALENDAR_BIND_TABLE, CALENDAR_OBJECT_REVISIONS_TABLE, CALENDAR_OBJECT_TABLE, \
+ _ATTACHMENTS_MODE_NONE, _ATTACHMENTS_MODE_READ, _ATTACHMENTS_MODE_WRITE, \
+ CALENDAR_HOME_TABLE, CALENDAR_HOME_METADATA_TABLE, \
+ CALENDAR_AND_CALENDAR_BIND, CALENDAR_OBJECT_REVISIONS_AND_BIND_TABLE, \
CALENDAR_OBJECT_AND_BIND_TABLE, _BIND_STATUS_INVITED, schema
-from twext.enterprise.dal.syntax import Select, Count, ColumnSyntax
-from twext.enterprise.dal.syntax import Insert
-from twext.enterprise.dal.syntax import Update
-from twext.enterprise.dal.syntax import Delete
-from twext.enterprise.dal.syntax import Parameter
-from twext.enterprise.dal.syntax import utcNowSQL
-from twext.enterprise.dal.syntax import Len
-
-from txdav.caldav.datastore.util import CalendarObjectBase
-from txdav.caldav.icalendarstore import QuotaExceeded
-
-from txdav.caldav.datastore.util import StorageTransportBase
-from txdav.common.icommondatastore import IndexedSearchException,\
- InternalDataStoreError, HomeChildNameAlreadyExistsError,\
+from txdav.common.icommondatastore import IndexedSearchException, \
+ InternalDataStoreError, HomeChildNameAlreadyExistsError, \
HomeChildNameNotAllowedError
+from txdav.xml.rfc2518 import ResourceType
from pycalendar.datetime import PyCalendarDateTime
from pycalendar.duration import PyCalendarDuration
from pycalendar.timezone import PyCalendarTimezone
+from pycalendar.value import PyCalendarValue
-from txdav.caldav.datastore.util import AttachmentRetrievalTransport
-
from zope.interface.declarations import implements
+import collections
import os
import tempfile
import uuid
@@ -115,7 +115,6 @@
self._childClass = Calendar
super(CalendarHome, self).__init__(transaction, ownerUID, notifiers)
-
createCalendarWithName = CommonHome.createChildWithName
removeCalendarWithName = CommonHome.removeChildWithName
calendarWithName = CommonHome.childWithName
@@ -128,30 +127,12 @@
ch = schema.CALENDAR_HOME
cb = schema.CALENDAR_BIND
cor = schema.CALENDAR_OBJECT_REVISIONS
- at = schema.ATTACHMENT
rp = schema.RESOURCE_PROPERTY
# delete attachments corresponding to this home, also removing from disk
- rows = (yield Select(
- [at.DROPBOX_ID, at.PATH, ],
- From=at,
- Where=(
- at.CALENDAR_HOME_RESOURCE_ID == self._resourceID
- ),
- ).on(self._txn))
- for dropboxID, path in rows:
- attachment = Attachment._attachmentPathRoot(self._txn, dropboxID).child(path)
- if attachment.exists():
- yield attachment.remove()
+ yield Attachment.removedHome(self._txn, self._resourceID)
yield Delete(
- From=at,
- Where=(
- at.CALENDAR_HOME_RESOURCE_ID == self._resourceID
- ),
- ).on(self._txn)
-
- yield Delete(
From=cb,
Where=cb.CALENDAR_HOME_RESOURCE_ID == self._resourceID
).on(self._txn)
@@ -175,7 +156,7 @@
@inlineCallbacks
- def hasCalendarResourceUIDSomewhereElse(self, uid, ok_object, type):
+ def hasCalendarResourceUIDSomewhereElse(self, uid, ok_object, mode):
"""
Determine if this calendar home contains any calendar objects which
would potentially conflict with the given UID for scheduling purposes.
@@ -188,7 +169,7 @@
being updated). May be C{None} if all objects potentially count.
@type ok_object: L{CalendarObject} or C{NoneType}
- @param type: a string, indicating the mode to check for conflicts. If
+ @param mode: a string, indicating the mode to check for conflicts. If
this is the string "schedule", then we are checking for potential
conflicts with a new scheduled calendar object, which will conflict
with any calendar object matching the given C{uid} in the home.
@@ -210,9 +191,9 @@
for objectResource in objectResources:
if ok_object and objectResource._resourceID == ok_object._resourceID:
continue
- matched_type = ("schedule" if objectResource.isScheduleObject
+ matched_mode = ("schedule" if objectResource.isScheduleObject
else "calendar")
- if type == "schedule" or matched_type == "schedule":
+ if mode == "schedule" or matched_mode == "schedule":
returnValue(True)
returnValue(False)
@@ -270,42 +251,62 @@
@inlineCallbacks
+ def getAllAttachmentNames(self):
+ att = schema.ATTACHMENT
+ rows = (yield Select(
+ [att.DROPBOX_ID],
+ From=att,
+ Where=(att.CALENDAR_HOME_RESOURCE_ID == self._resourceID),
+ OrderBy=att.DROPBOX_ID
+ ).on(self._txn))
+ returnValue([row[0] for row in rows])
+
+
+ @inlineCallbacks
+ def attachmentObjectWithID(self, managedID):
+ attach = (yield ManagedAttachment.load(self._txn, managedID))
+ returnValue(attach)
+
+
+ @inlineCallbacks
def createdHome(self):
-
+
# Default calendar
defaultCal = yield self.createCalendarWithName("calendar")
props = defaultCal.properties()
props[PropertyName(*ScheduleCalendarTransp.qname())] = ScheduleCalendarTransp(Opaque())
-
+
# Check whether components type must be separate
if config.RestrictCalendarsToOneComponentType:
yield defaultCal.setSupportedComponents("VEVENT")
-
+
# Default tasks
defaultTasks = yield self.createCalendarWithName("tasks")
yield defaultTasks.setSupportedComponents("VTODO")
-
+
yield self.createCalendarWithName("inbox")
+
@inlineCallbacks
def splitCalendars(self):
"""
Split all regular calendars by component type
"""
-
+
# Make sure the loop does not operate on any new calendars created during the loop
self.log_warn("Splitting calendars for user %s" % (self._ownerUID,))
calendars = yield self.calendars()
for calendar in calendars:
-
- # Ignore inbox - also shared calendars are not part of .calendars()
+
+ # Ignore inbox - also shared calendars are not part of .calendars()
if calendar.name() == "inbox":
continue
split_count = yield calendar.splitCollectionByComponentTypes()
- self.log_warn(" Calendar: '%s', split into %d" % (calendar.name(), split_count+1,))
+ self.log_warn(" Calendar: '%s', split into %d" % (calendar.name(), split_count + 1,))
yield self.ensureDefaultCalendarsExist()
+
@inlineCallbacks
def ensureDefaultCalendarsExist(self):
"""
@@ -339,7 +340,7 @@
@classproperty
- def _unacceptedSharesQuery(cls): #@NoSelf
+ def _unacceptedSharesQuery(cls): # @NoSelf
cb = schema.CALENDAR_BIND
return Select([cb.CALENDAR_RESOURCE_NAME],
From=cb,
@@ -356,7 +357,7 @@
cb = schema.CALENDAR_BIND
rows = yield self._unacceptedSharesQuery.on(self._txn, homeResourceID=self._resourceID)
for (resourceName,) in rows:
- kwds = { "ResourceName" : resourceName }
+ kwds = {"ResourceName" : resourceName}
yield Delete(
From=inv,
Where=(
@@ -378,7 +379,7 @@
Remove all remaining invite entries for this home.
"""
inv = schema.INVITE
- kwds = { "HomeResourceID" : self._resourceID }
+ kwds = {"HomeResourceID" : self._resourceID}
yield Delete(
From=inv,
Where=(inv.HOME_RESOURCE_ID == Parameter("HomeResourceID"))
@@ -431,15 +432,16 @@
different child classes to have their own type specific data, but still make use of the
common base logic.
"""
-
+
# Common behavior is to have created and modified
-
+
return (
cls._homeChildMetaDataSchema.CREATED,
cls._homeChildMetaDataSchema.MODIFIED,
cls._homeChildMetaDataSchema.SUPPORTED_COMPONENTS,
)
-
+
+
@classmethod
def metadataAttributes(cls):
"""
@@ -447,15 +449,16 @@
different child classes to have their own type specific data, but still make use of the
common base logic.
"""
-
+
# Common behavior is to have created and modified
-
+
return (
"_created",
"_modified",
"_supportedComponents",
)
-
+
+
@property
def _calendarHome(self):
return self._home
@@ -463,9 +466,8 @@
# FIXME: resource type is DAV. This doesn't belong in the data store. -wsv
def resourceType(self):
- return ResourceType.calendar #@UndefinedVariable
+ return ResourceType.calendar # @UndefinedVariable
-
ownerCalendarHome = CommonHomeChild.ownerHome
viewerCalendarHome = CommonHomeChild.viewerHome
calendarObjects = CommonHomeChild.objectResources
@@ -511,15 +513,18 @@
cacheKey = queryCacher.keyForHomeChildMetaData(self._resourceID)
yield queryCacher.invalidateAfterCommit(self._txn, cacheKey)
+
def getSupportedComponents(self):
return self._supportedComponents
+
def isSupportedComponent(self, componentType):
if self._supportedComponents:
return componentType.upper() in self._supportedComponents.split(",")
else:
return True
+
def initPropertyStore(self, props):
# Setup peruser special properties
props.setSpecialProperties(
@@ -533,6 +538,7 @@
),
)
+
# FIXME: this is DAV-ish. Data store calendar objects don't have
# mime types. -wsv
def contentType(self):
@@ -541,6 +547,7 @@
"""
return MimeType.fromString("text/calendar; charset=utf-8")
+
@inlineCallbacks
def splitCollectionByComponentTypes(self):
"""
@@ -549,7 +556,7 @@
on any new calendars created. Also restrict the new calendars to only the one appropriate component type. Return
the number of splits done.
"""
-
+
# First see how many different component types there are
split_count = 0
components = yield self._countComponentTypes()
@@ -560,11 +567,11 @@
yield self.setSupportedComponents(component.upper())
returnValue(split_count)
-
+
# We will leave the component type with the highest count in the current calendar and create new calendars
# for the others which will be moved over
- maxComponent = max(components, key=lambda x:x[1])[0]
-
+ maxComponent = max(components, key=lambda x: x[1])[0]
+
for component, _ignore_count in components:
if component == maxComponent:
continue
@@ -576,12 +583,13 @@
returnValue(split_count)
+
@inlineCallbacks
def _countComponentTypes(self):
"""
Count each component type in this calendar.
-
- @return: a C{tuple} of C{tuple} containing the component type name and count.
+
+ @return: a C{tuple} of C{tuple} containing the component type name and count.
"""
ob = self._objectSchema
@@ -595,17 +603,18 @@
rows = yield _componentsQuery.on(self._txn, calID=self._resourceID)
result = tuple([(componentType, componentCount) for componentType, componentCount in sorted(rows, key=lambda x:x[0])])
returnValue(result)
-
+
+
@inlineCallbacks
def _splitComponentType(self, component):
"""
Create a new calendar and move all components of the specified component type into the new one.
Make sure properties and sharing state is preserved on the new calendar.
-
+
@param component: Component type to split out
@type component: C{str}
"""
-
+
# Create the new calendar
try:
newcalendar = yield self._home.createCalendarWithName("%s-%s" % (self._name, component.lower(),))
@@ -613,25 +622,26 @@
# If the name we want exists, try repeating with up to ten more
for ctr in range(10):
try:
- newcalendar = yield self._home.createCalendarWithName("%s-%s-%d" % (self._name, component.lower(), ctr+1,))
+ newcalendar = yield self._home.createCalendarWithName("%s-%s-%d" % (self._name, component.lower(), ctr + 1,))
except HomeChildNameAlreadyExistsError:
continue
else:
# At this point we are stuck
raise HomeChildNameNotAllowedError
-
+
# Restrict calendar to single component type
yield newcalendar.setSupportedComponents(component.upper())
-
+
# Transfer properties over
yield newcalendar._properties.copyAllProperties(self._properties)
-
+
# Transfer sharing
yield self._transferSharingDetails(newcalendar, component)
-
+
# Now move calendar data over
yield self._transferCalendarObjects(newcalendar, component)
-
+
+
@inlineCallbacks
def _transferSharingDetails(self, newcalendar, component):
"""
@@ -646,22 +656,23 @@
Where=(cb.CALENDAR_RESOURCE_ID == Parameter('calID')).And(
cb.CALENDAR_HOME_RESOURCE_ID != Parameter('homeID'))
)
-
+
rows = yield _bindQuery.on(
self._txn,
calID=self._resourceID,
homeID=self._home._resourceID,
)
-
+
if len(rows) == 0:
returnValue(None)
-
+
for row in rows:
columnMap = dict(zip(columns, row))
columnMap[cb.CALENDAR_RESOURCE_ID] = newcalendar._resourceID
columnMap[cb.CALENDAR_RESOURCE_NAME] = "%s-%s" % (columnMap[cb.CALENDAR_RESOURCE_NAME], component.lower(),)
- yield Insert(columnMap).on(self._txn)
+ yield Insert(columnMap).on(self._txn)
+
@inlineCallbacks
def _transferCalendarObjects(self, newcalendar, component):
"""
@@ -682,17 +693,18 @@
calID=self._resourceID,
componentType=component,
)
-
+
if len(rows) == 0:
returnValue(None)
-
+
for row in rows:
resourceID = row[0]
child = yield self.objectResourceWithID(resourceID)
yield self.moveObjectResource(child, newcalendar)
+
@classproperty
- def _moveTimeRangeUpdateQuery(cls): #@NoSelf
+ def _moveTimeRangeUpdateQuery(cls): # @NoSelf
"""
DAL query to update a child to be in a new parent.
"""
@@ -702,6 +714,7 @@
Where=tr.CALENDAR_OBJECT_RESOURCE_ID == Parameter("resourceID")
)
+
@inlineCallbacks
def _movedObjectResource(self, child, newparent):
"""
@@ -713,6 +726,7 @@
resourceID=child._resourceID
)
+
def unshare(self):
"""
Unshares a collection, regardless of which "direction" it was shared.
@@ -720,6 +734,16 @@
return super(Calendar, self).unshare(ECALENDARTYPE)
+ def creatingResourceCheckAttachments(self, component):
+ """
+ When component data is created or changed we need to look for changes related to managed attachments.
+
+ @param component: the new calendar data
+ @type component: L{Component}
+ """
+ return CalendarObject.creatingResourceCheckAttachments(self._txn, self, component)
+
+
icalfbtype_to_indexfbtype = {
"UNKNOWN" : 0,
"FREE" : 1,
@@ -743,7 +767,7 @@
Component.ACCESS_CONFIDENTIAL: 3,
Component.ACCESS_RESTRICTED : 4,
}
-accesstype_to_accessMode = dict([(v, k) for k,v in accessMode_to_type.items()])
+accesstype_to_accessMode = dict([(v, k) for k, v in accessMode_to_type.items()])
def _pathToName(path):
return path.rsplit(".", 1)[0]
@@ -768,7 +792,6 @@
self.scheduleEtags = metadata.get("scheduleEtags", "")
self.hasPrivateComment = metadata.get("hasPrivateComment", False)
-
_allColumns = [
_objectSchema.RESOURCE_ID,
_objectSchema.RESOURCE_NAME,
@@ -823,6 +846,7 @@
validateCalendarComponent(self, self._calendar, component, inserting, self._txn._migrating)
yield self.updateDatabase(component, inserting=inserting)
+
if inserting:
yield self._calendar._insertRevision(self._name)
else:
@@ -854,7 +878,7 @@
# freebusy related properties have changed (e.g. an attendee reply and refresh). In those cases
# the component will have a special attribute present to let us know to suppress the instance indexing.
instanceIndexingRequired = not hasattr(component, "noInstanceIndexing") or inserting or reCreate
-
+
if instanceIndexingRequired:
# Decide how far to expand based on the component. doInstanceIndexing will indicate whether we
@@ -862,28 +886,28 @@
# operation.
doInstanceIndexing = False
master = component.masterComponent()
- if ( master is None or not component.isRecurring() ):
+ if (master is None or not component.isRecurring()):
# When there is no master we have a set of overridden components -
# index them all.
# When there is one instance - index it.
expand = PyCalendarDateTime(2100, 1, 1, 0, 0, 0, tzid=PyCalendarTimezone(utc=True))
doInstanceIndexing = True
else:
-
+
# If migrating or re-creating or config option for delayed indexing is off, always index
if reCreate or txn._migrating or (not config.FreeBusyIndexDelayedExpand and not isInboxItem):
doInstanceIndexing = True
-
+
# Duration into the future through which recurrences are expanded in the index
# by default. This is a caching parameter which affects the size of the index;
# it does not affect search results beyond this period, but it may affect
# performance of such a search.
expand = (PyCalendarDateTime.getToday() +
PyCalendarDuration(days=config.FreeBusyIndexExpandAheadDays))
-
+
if expand_until and expand_until > expand:
expand = expand_until
-
+
# Maximum duration into the future through which recurrences are expanded in the
# index. This is a caching parameter which affects the size of the index; it
# does not affect search results beyond this period, but it may affect
@@ -899,7 +923,7 @@
if expand > (PyCalendarDateTime.getToday() +
PyCalendarDuration(days=config.FreeBusyIndexExpandMaxDays)):
raise IndexedSearchException
-
+
if config.FreeBusyIndexLowerLimitDays:
truncateLowerLimit = PyCalendarDateTime.getToday()
truncateLowerLimit.offsetDay(-config.FreeBusyIndexLowerLimitDays)
@@ -915,7 +939,7 @@
except InvalidOverriddenInstanceError, e:
self.log_error("Invalid instance %s when indexing %s in %s" %
(e.rid, self._name, self._calendar,))
-
+
if txn._migrating:
# TODO: fix the data here by re-writing component then re-index
instances = component.expandTimeRanges(expand, lowerLimit=truncateLowerLimit, ignoreInvalidInstances=True)
@@ -923,7 +947,7 @@
recurrenceLowerLimit = instances.lowerLimit
else:
raise
-
+
# Now coerce indexing to off if needed
if not doInstanceIndexing:
instances = None
@@ -1003,7 +1027,7 @@
Where=co.RESOURCE_ID == self._resourceID
).on(txn)
)[0][0]
-
+
# Need to wipe the existing time-range for this and rebuild if required
if instanceIndexingRequired:
yield Delete(
@@ -1011,9 +1035,11 @@
Where=tr.CALENDAR_OBJECT_RESOURCE_ID == self._resourceID
).on(txn)
else:
+ # Keep MODIFIED the same when doing an index-only update
values = {
co.RECURRANCE_MIN : pyCalendarTodatetime(normalizeForIndex(recurrenceLowerLimit)) if recurrenceLowerLimit else None,
co.RECURRANCE_MAX : pyCalendarTodatetime(normalizeForIndex(recurrenceLimit)) if recurrenceLimit else None,
+ co.MODIFIED : self._modified,
}
yield Update(
@@ -1029,8 +1055,8 @@
if instanceIndexingRequired and doInstanceIndexing:
yield self._addInstances(component, instances, truncateLowerLimit, txn)
-
-
+
+
@inlineCallbacks
def _addInstances(self, component, instances, truncateLowerLimit, txn):
"""
@@ -1052,18 +1078,18 @@
instance = instances[key]
start = instance.start
end = instance.end
- float = instance.start.floating()
+ floating = instance.start.floating()
transp = instance.component.propertyValue("TRANSP") == "TRANSPARENT"
fbtype = instance.component.getFBType()
start.setTimezoneUTC(True)
end.setTimezoneUTC(True)
- # Ignore if below the lower limit
+ # Ignore if below the lower limit
if truncateLowerLimit and end < truncateLowerLimit:
lowerLimitApplied = True
continue
- yield self._addInstanceDetails(component, instance.rid, start, end, float, transp, fbtype, txn)
+ yield self._addInstanceDetails(component, instance.rid, start, end, floating, transp, fbtype, txn)
# For truncated items we insert a tomb stone lower bound so that a time-range
# query with just an end bound will match
@@ -1075,7 +1101,7 @@
# Special - for unbounded recurrence we insert a value for "infinity"
# that will allow an open-ended time-range to always match it.
# We also need to add the "infinity" value if the event was bounded but
- # starts after the future expansion cut-off limit.
+ # starts after the future expansion cut-off limit.
if component.isRecurringUnbounded() or instances.limit and len(instances.instances) == 0:
start = PyCalendarDateTime(2100, 1, 1, 0, 0, 0, tzid=PyCalendarTimezone(utc=True))
end = PyCalendarDateTime(2100, 1, 1, 1, 0, 0, tzid=PyCalendarTimezone(utc=True))
@@ -1083,7 +1109,7 @@
@inlineCallbacks
- def _addInstanceDetails(self, component, rid, start, end, float, transp, fbtype, txn):
+ def _addInstanceDetails(self, component, rid, start, end, floating, transp, fbtype, txn):
tr = schema.TIME_RANGE
tpy = schema.TRANSPARENCY
@@ -1091,7 +1117,7 @@
instanceid = (yield Insert({
tr.CALENDAR_RESOURCE_ID : self._calendar._resourceID,
tr.CALENDAR_OBJECT_RESOURCE_ID : self._resourceID,
- tr.FLOATING : float,
+ tr.FLOATING : floating,
tr.START_DATE : pyCalendarTodatetime(start),
tr.END_DATE : pyCalendarTodatetime(end),
tr.FBTYPE : icalfbtype_to_indexfbtype.get(fbtype, icalfbtype_to_indexfbtype["FREE"]),
@@ -1116,6 +1142,7 @@
ideal but in theory we should have checked everything on the way in and
only allowed in good data.
"""
+
text = yield self._text()
try:
@@ -1141,14 +1168,21 @@
returnValue(component)
+ @inlineCallbacks
+ def remove(self):
+ # Need to also remove attachments
+ yield ManagedAttachment.resourceRemoved(self._txn, self._resourceID)
+ yield super(CalendarObject, self).remove()
+
+
@classproperty
- def _recurrenceMinMaxByIDQuery(cls): #@NoSelf
+ def _recurrenceMinMaxByIDQuery(cls): # @NoSelf
"""
DAL query to load RECURRANCE_MIN, RECURRANCE_MAX via an object's resource ID.
"""
co = schema.CALENDAR_OBJECT
return Select(
- [co.RECURRANCE_MIN, co.RECURRANCE_MAX,],
+ [co.RECURRANCE_MIN, co.RECURRANCE_MAX, ],
From=co,
Where=co.RESOURCE_ID == Parameter("resourceID"),
)
@@ -1159,7 +1193,7 @@
"""
Get the RECURRANCE_MIN, RECURRANCE_MAX value from the database. Occasionally we might need to do an
update to time-range data via a separate transaction, so we allow that to be passed in.
-
+
@return: L{PyCalendarDateTime} result
"""
# Setup appropriate txn
@@ -1176,7 +1210,7 @@
@classproperty
- def _instanceQuery(cls): #@NoSelf
+ def _instanceQuery(cls): # @NoSelf
"""
DAL query to load TIME_RANGE data via an object's resource ID.
"""
@@ -1196,7 +1230,7 @@
def instances(self, txn=None):
"""
Get the set of instances from the database.
-
+
@return: C{list} result
"""
# Setup appropriate txn
@@ -1223,9 +1257,11 @@
metadata["hasPrivateComment"] = self.hasPrivateComment
return metadata
+
def _get_accessMode(self):
return accesstype_to_accessMode[self._access]
+
def _set_accessMode(self, value):
self._access = accessMode_to_type[value]
@@ -1234,6 +1270,7 @@
def _get_isScheduleObject(self):
return self._schedule_object
+
def _set_isScheduleObject(self, value):
self._schedule_object = value
@@ -1242,6 +1279,7 @@
def _get_scheduleTag(self):
return self._schedule_tag
+
def _set_scheduleTag(self, value):
self._schedule_tag = value
@@ -1250,6 +1288,7 @@
def _get_scheduleEtags(self):
return tuple(self._schedule_etags.split(",")) if self._schedule_etags else ()
+
def _set_scheduleEtags(self, value):
self._schedule_etags = ",".join(value) if value else ""
@@ -1258,38 +1297,399 @@
def _get_hasPrivateComment(self):
return self._private_comments
+
def _set_hasPrivateComment(self, value):
self._private_comments = value
hasPrivateComment = property(_get_hasPrivateComment, _set_hasPrivateComment)
@inlineCallbacks
+ def _preProcessAttachmentsOnResourceChange(self, component, inserting):
+ """
+ When component data is created or changed we need to look for changes related to managed attachments.
+
+ @param component: the new calendar data
+ @type component: L{Component}
+ @param inserting: C{True} if resource is being created
+ @type inserting: C{bool}
+ """
+ if inserting:
+ self._copyAttachments = (yield self.creatingResourceCheckAttachments(component))
+ self._removeAttachments = None
+ else:
+ self._copyAttachments, self._removeAttachments = (yield self.updatingResourceCheckAttachments(component))
+
+
+ @classmethod
+ @inlineCallbacks
+ def creatingResourceCheckAttachments(cls, txn, parent, component):
+ """
+ A new component is going to be stored. Check any ATTACH properties that may be present
+ to verify they owned by the organizer/owner of the resource and re-write the managed-ids.
+
+ @param component: calendar component about to be stored
+ @type component: L{Component}
+ """
+
+ # Retrieve all ATTACH properties with a MANAGED-ID
+ attached = collections.defaultdict(list)
+ attachments = component.getAllPropertiesInAnyComponent("ATTACH", depth=1,)
+ for attachment in attachments:
+ managed_id = attachment.parameterValue("MANAGED-ID")
+ if managed_id is not None:
+ attached[managed_id].append(attachment)
+
+ # Punt if no managed attachments
+ if len(attached) == 0:
+ returnValue(None)
+
+ changes = yield cls._addingManagedIDs(txn, parent, attached, component.resourceUID())
+ returnValue(changes)
+
+
+ @inlineCallbacks
+ def updatingResourceCheckAttachments(self, component):
+ """
+ A component is being changed. Check any ATTACH properties that may be present
+ to verify they owned by the organizer/owner of the resource and re-write the managed-ids.
+
+ @param component: calendar component about to be stored
+ @type component: L{Component}
+ """
+
+ # Retrieve all ATTACH properties with a MANAGED-ID in new data
+ newattached = collections.defaultdict(list)
+ newattachments = component.getAllPropertiesInAnyComponent("ATTACH", depth=1,)
+ for attachment in newattachments:
+ managed_id = attachment.parameterValue("MANAGED-ID")
+ if managed_id is not None:
+ newattached[managed_id].append(attachment)
+
+ # Retrieve all ATTACH properties with a MANAGED-ID in old data
+ oldcomponent = (yield self.component())
+ oldattached = collections.defaultdict(list)
+ oldattachments = oldcomponent.getAllPropertiesInAnyComponent("ATTACH", depth=1,)
+ for attachment in oldattachments:
+ managed_id = attachment.parameterValue("MANAGED-ID")
+ if managed_id is not None:
+ oldattached[managed_id].append(attachment)
+
+ # Punt if no managed attachments
+ if len(newattached) + len(oldattached) == 0:
+ returnValue((None, None,))
+
+ newattached_keys = set(newattached.keys())
+ oldattached_keys = set(oldattached.keys())
+
+ # Determine what was removed
+ removed = set(oldattached_keys) - set(newattached_keys)
+
+ # Determine what was added
+ added = set(newattached_keys) - set(oldattached_keys)
+ changed = {}
+ for managed_id in added:
+ changed[managed_id] = newattached[managed_id]
+
+ changes = yield self._addingManagedIDs(self._txn, self._parentCollection, changed, component.resourceUID())
+
+ # Make sure existing data is not changed
+ same = oldattached_keys & newattached_keys
+ for managed_id in same:
+ newattachment = newattached[managed_id]
+ oldattachment = oldattached[managed_id][0]
+ for newattachment in newattached[managed_id]:
+ if newattachment != oldattachment:
+ newattachment.setParameter("MTAG", oldattachment.parameterValue("MTAG"))
+ newattachment.setParameter("FMTTYPE", oldattachment.parameterValue("FMTTYPE"))
+ newattachment.setParameter("FILENAME", oldattachment.parameterValue("FILENAME"))
+ newattachment.setParameter("SIZE", oldattachment.parameterValue("SIZE"))
+ newattachment.setValue(oldattachment.value())
+
+ returnValue((changes, removed,))
+
+
+ @classmethod
+ @inlineCallbacks
+ def _addingManagedIDs(cls, txn, parent, attached, newuid):
+ # Now check each managed-id
+ changes = []
+ for managed_id, attachments in attached.items():
+
+ # Must be in the same home as this resource
+ details = (yield ManagedAttachment.usedManagedID(txn, managed_id))
+ if len(details) == 0:
+ raise AttachmentStoreValidManagedID
+ if len(details) != 1:
+ # This is a bad store error - there should be only one home associated with a managed-id
+ raise InternalDataStoreError
+ home_id, _ignore_resource_id, uid = details[0]
+
+ # Policy:
+ #
+ # 1. If Managed-ID is re-used in a resource with the same UID - it is fine - just rewrite the details
+ # 2. If Managed-ID is re-used in a different resource but owned by the same user - change managed-id to new one
+ # 3. Otherwise, strip off the managed-id property and treat as unmanaged.
+
+ # 1. UID check
+ if uid == newuid:
+ yield cls._syncAttachmentProperty(txn, managed_id, attachments)
+
+ # 2. Same home
+ elif home_id == parent.ownerHome()._resourceID:
+
+ # Need to rewrite the managed-id, value in the properties
+ new_id = str(uuid.uuid4())
+ yield cls._syncAttachmentProperty(txn, managed_id, attachments, new_id)
+ changes.append((managed_id, new_id,))
+
+ else:
+ cls._stripAttachmentProperty(attachments)
+
+ returnValue(changes)
+
+
+ @classmethod
+ @inlineCallbacks
+ def _syncAttachmentProperty(cls, txn, managed_id, attachments, new_id=None):
+ """
+ Make sure the supplied set of attach properties are all sync'd with the current value of the
+ matching managed-id attachment.
+
+ @param managed_id: Managed-Id to sync with
+ @type managed_id: C{str}
+ @param attachments: list of attachment properties
+ @type attachments: C{list} of L{twistedcaldav.ical.Property}
+ @param new_id: Value of new Managed-ID to use
+ @type new_id: C{str}
+ """
+ original_attachment = (yield ManagedAttachment.load(txn, managed_id))
+ for attachment in attachments:
+ attachment.setParameter("MANAGED-ID", managed_id if new_id is None else new_id)
+ attachment.setParameter("MTAG", original_attachment.md5())
+ attachment.setParameter("FMTTYPE", "%s/%s" % (original_attachment.contentType().mediaType, original_attachment.contentType().mediaSubtype))
+ attachment.setParameter("FILENAME", original_attachment.name())
+ attachment.setParameter("SIZE", str(original_attachment.size()))
+ attachment.setValue((yield original_attachment.location(new_id)))
+
+
+ @classmethod
+ def _stripAttachmentProperty(cls, attachments):
+ """
+ Strip off managed-id related properties from an attachment.
+ """
+ for attachment in attachments:
+ attachment.removeParameter("MANAGED-ID")
+ attachment.removeParameter("MTAG")
+
+
+ @inlineCallbacks
+ def copyResourceAttachments(self, attached):
+ """
+ Copy an attachment reference for some other resource and link it to this resource.
+
+ @param attached: tuple of old, new managed ids for the attachments to copy
+ @type attached: C{tuple}
+ """
+ for old_id, new_id in attached:
+ yield ManagedAttachment.copyManagedID(self._txn, old_id, new_id, self._resourceID)
+
+
+ @inlineCallbacks
+ def removeResourceAttachments(self, attached):
+ """
+ Remove an attachment reference for this resource.
+
+ @param attached: managed-ids to remove
+ @type attached: C{tuple}
+ """
+ for managed_id in attached:
+ yield self.removeManagedAttachmentWithID(managed_id)
+
+
+ @inlineCallbacks
+ def addAttachment(self, rids, content_type, filename, stream, calendar):
+
+ # First write the data stream
+
+ # We need to know the resource_ID of the home collection of the owner
+ # (not sharee) of this event
+ try:
+ attachment = (yield self.createManagedAttachment())
+ t = attachment.store(content_type, filename)
+ yield readStream(stream, t.write)
+ except Exception, e:
+ self.log_error("Unable to store attachment: %s" % (e,))
+ raise AttachmentStoreFailed
+ yield t.loseConnection()
+
+ # Now try and adjust the actual calendar data
+ #calendar = (yield self.component())
+
+ location = (yield attachment.location())
+ attach = Property("ATTACH", location, params={
+ "MANAGED-ID": attachment.managedID(),
+ "MTAG": attachment.md5(),
+ "FMTTYPE": "%s/%s" % (attachment.contentType().mediaType, attachment.contentType().mediaSubtype),
+ "FILENAME": attachment.name(),
+ "SIZE": str(attachment.size()),
+ }, valuetype=PyCalendarValue.VALUETYPE_URI)
+ if rids is None:
+ calendar.addPropertyToAllComponents(attach)
+ else:
+ # TODO - per-recurrence attachments
+ pass
+
+ # TODO: Here is where we want to store data implicitly - for now we have to let app layer deal with it
+ #yield self.setComponent(calendar)
+
+ returnValue((attachment, location,))
+
+
+ @inlineCallbacks
+ def updateAttachment(self, managed_id, content_type, filename, stream, calendar):
+
+ # First check the supplied managed-id is associated with this resource
+ cobjs = (yield ManagedAttachment.referencesTo(self._txn, managed_id))
+ if self._resourceID not in cobjs:
+ raise AttachmentStoreValidManagedID
+
+ # Next write the data stream to existing attachment
+
+ # We need to know the resource_ID of the home collection of the owner
+ # (not sharee) of this event
+ try:
+ # Check that this is a proper update
+ oldattachment = (yield self.attachmentWithManagedID(managed_id))
+ if oldattachment is None:
+ self.log_error("Missing managed attachment even though ATTACHMENT_CALENDAR_OBJECT indicates it is present: %s" % (managed_id,))
+ raise AttachmentStoreFailed
+
+ # We actually create a brand new attachment object for the update, but with the same managed-id. That way, other resources
+ # referencing the old attachment data will still see that.
+ attachment = (yield self.updateManagedAttachment(managed_id, oldattachment))
+ t = attachment.store(content_type, filename)
+ yield readStream(stream, t.write)
+ except Exception, e:
+ self.log_error("Unable to store attachment: %s" % (e,))
+ raise AttachmentStoreFailed
+ yield t.loseConnection()
+
+ # Now try and adjust the actual calendar data
+ #calendar = (yield self.component())
+
+ location = self._txn._store.attachmentsURIPattern % {
+ "home": self._parentCollection.ownerHome().name(),
+ "name": attachment.managedID(),
+ }
+ attach = Property("ATTACH", location, params={
+ "MANAGED-ID": attachment.managedID(),
+ "MTAG": attachment.md5(),
+ "FMTTYPE": "%s/%s" % (attachment.contentType().mediaType, attachment.contentType().mediaSubtype),
+ "FILENAME": attachment.name(),
+ "SIZE": str(attachment.size()),
+ }, valuetype=PyCalendarValue.VALUETYPE_URI)
+ calendar.replaceAllPropertiesWithParameterMatch(attach, "MANAGED-ID", managed_id)
+
+ # TODO: Here is where we want to store data implicitly - for now we have to let app layer deal with it
+ #yield self.setComponent(calendar)
+
+ returnValue((attachment, location,))
+
+
+ @inlineCallbacks
+ def removeAttachment(self, rids, managed_id, calendar):
+
+ # First check the supplied managed-id is associated with this resource
+ cobjs = (yield ManagedAttachment.referencesTo(self._txn, managed_id))
+ if self._resourceID not in cobjs:
+ raise AttachmentStoreValidManagedID
+
+ # Now try and adjust the actual calendar data
+ all_removed = False
+ #calendar = (yield self.component())
+ if rids is None:
+ calendar.removeAllPropertiesWithParameterMatch("ATTACH", "MANAGED-ID", managed_id)
+ all_removed = True
+ else:
+ # TODO: per-recurrence removal
+ pass
+
+ # TODO: Here is where we want to store data implicitly - for now we have to let app layer deal with it
+ #yield self.setComponent(calendar)
+
+ # Remove it - this will take care of actually removing it from the store if there are
+ # no more references to the attachment
+ if all_removed:
+ yield self.removeManagedAttachmentWithID(managed_id)
+
+
+ @inlineCallbacks
+ def createManagedAttachment(self):
+
+ # We need to know the resource_ID of the home collection of the owner
+ # (not sharee) of this event
+ sharerHomeID = (yield self._parentCollection.sharerHomeID())
+ managedID = str(uuid.uuid4())
+ returnValue((
+ yield ManagedAttachment.create(
+ self._txn, managedID, sharerHomeID, self._resourceID,
+ )
+ ))
+
+
+ @inlineCallbacks
+ def updateManagedAttachment(self, managedID, oldattachment):
+
+ # We need to know the resource_ID of the home collection of the owner
+ # (not sharee) of this event
+ sharerHomeID = (yield self._parentCollection.sharerHomeID())
+ returnValue((
+ yield ManagedAttachment.update(
+ self._txn, managedID, sharerHomeID, self._resourceID, oldattachment._attachmentID,
+ )
+ ))
+
+
+ def attachmentWithManagedID(self, managed_id):
+ return ManagedAttachment.load(self._txn, managed_id)
+
+
+ @inlineCallbacks
+ def removeManagedAttachmentWithID(self, managed_id):
+ attachment = (yield self.attachmentWithManagedID(managed_id))
+ if attachment._objectResourceID == self._resourceID:
+ yield attachment.removeFromResource(self._resourceID)
+
+
+ @inlineCallbacks
def createAttachmentWithName(self, name):
# We need to know the resource_ID of the home collection of the owner
# (not sharee) of this event
sharerHomeID = (yield self._parentCollection.sharerHomeID())
+ dropboxID = (yield self.dropboxID())
returnValue((
- yield Attachment.create(
- self._txn, (yield self.dropboxID()), name, sharerHomeID
+ yield DropBoxAttachment.create(
+ self._txn, dropboxID, name, sharerHomeID,
)
))
+
@inlineCallbacks
def removeAttachmentWithName(self, name):
attachment = (yield self.attachmentWithName(name))
yield attachment.remove()
+
def attachmentWithName(self, name):
- return Attachment.loadWithName(self._txn, self._dropboxID, name)
+ return DropBoxAttachment.load(self._txn, self._dropboxID, name)
+
def attendeesCanManageAttachments(self):
return self._attachment == _ATTACHMENTS_MODE_WRITE
-
dropboxID = dropboxIDFromCalendarObject
-
_attachmentsQuery = Select(
[schema.ATTACHMENT.PATH],
From=schema.ATTACHMENT,
@@ -1322,6 +1722,7 @@
),
)
+
# IDataStoreObject
def contentType(self):
"""
@@ -1335,9 +1736,9 @@
_TEMPORARY_UPLOADS_DIRECTORY = "Temporary"
- def __init__(self, attachment, contentType, creating=False):
+ def __init__(self, attachment, contentType, dispositionName, creating=False):
super(AttachmentStorageTransport, self).__init__(
- attachment, contentType)
+ attachment, contentType, dispositionName)
fileDescriptor, fileName = self._temporaryFile()
# Wrap the file descriptor in a file object we can write to
@@ -1399,23 +1800,12 @@
raise QuotaExceeded()
self._path.moveTo(self._attachment._path)
- self._attachment._contentType = self._contentType
- self._attachment._md5 = self._hash.hexdigest()
- self._attachment._size = newSize
- att = schema.ATTACHMENT
- self._attachment._created, self._attachment._modified = map(
- sqltime,
- (yield Update(
- {
- att.CONTENT_TYPE : generateContentType(self._contentType),
- att.SIZE : self._attachment._size,
- att.MD5 : self._attachment._md5,
- att.MODIFIED : utcNowSQL
- },
- Where=(att.PATH == self._attachment.name()).And(
- att.DROPBOX_ID == self._attachment._dropboxID
- ),
- Return=(att.CREATED, att.MODIFIED)).on(self._txn))[0]
+
+ yield self._attachment.changed(
+ self._contentType,
+ self._dispositionName,
+ self._hash.hexdigest(),
+ newSize
)
if home:
@@ -1430,62 +1820,31 @@
def sqltime(value):
return datetimeMktime(parseSQLTimestamp(value))
+
+
class Attachment(object):
implements(IAttachment)
- def __init__(self, txn, dropboxID, name, ownerHomeID=None, justCreated=False):
+ def __init__(self, txn, a_id, dropboxID, name, ownerHomeID=None, justCreated=False):
self._txn = txn
+ self._attachmentID = a_id
+ self._ownerHomeID = ownerHomeID
self._dropboxID = dropboxID
+ self._contentType = None
+ self._size = 0
+ self._md5 = None
+ self._created = None
+ self._modified = None
self._name = name
- self._ownerHomeID = ownerHomeID
- self._size = 0
self._justCreated = justCreated
- @classmethod
- def _attachmentPathRoot(cls, txn, dropboxID):
- attachmentRoot = txn._store.attachmentsPath
+ def _attachmentPathRoot(self):
+ return self._txn._store.attachmentsPath
- # Use directory hashing scheme based on MD5 of dropboxID
- hasheduid = hashlib.md5(dropboxID).hexdigest()
- return attachmentRoot.child(hasheduid[0:2]).child(
- hasheduid[2:4]).child(hasheduid)
-
- @classmethod
@inlineCallbacks
- def create(cls, txn, dropboxID, name, ownerHomeID):
-
- # File system paths need to exist
- try:
- cls._attachmentPathRoot(txn, dropboxID).makedirs()
- except:
- pass
-
- # Now create the DB entry
- attachment = cls(txn, dropboxID, name, ownerHomeID, True)
- att = schema.ATTACHMENT
- yield Insert({
- att.CALENDAR_HOME_RESOURCE_ID : ownerHomeID,
- att.DROPBOX_ID : dropboxID,
- att.CONTENT_TYPE : "",
- att.SIZE : 0,
- att.MD5 : "",
- att.PATH : name
- }).on(txn)
- returnValue(attachment)
-
-
- @classmethod
- @inlineCallbacks
- def loadWithName(cls, txn, dropboxID, name):
- attachment = cls(txn, dropboxID, name)
- attachment = (yield attachment.initFromStore())
- returnValue(attachment)
-
-
- @inlineCallbacks
def initFromStore(self):
"""
Execute necessary SQL queries to retrieve attributes.
@@ -1493,58 +1852,81 @@
@return: C{True} if this attachment exists, C{False} otherwise.
"""
att = schema.ATTACHMENT
- rows = (yield Select([att.CALENDAR_HOME_RESOURCE_ID, att.CONTENT_TYPE,
- att.SIZE, att.MD5, att.CREATED, att.MODIFIED],
- From=att,
- Where=(att.DROPBOX_ID == self._dropboxID).And(
- att.PATH == self._name)).on(self._txn))
+ if self._dropboxID is not None:
+ where = (att.DROPBOX_ID == self._dropboxID).And(
+ att.PATH == self._name)
+ else:
+ where = (att.ATTACHMENT_ID == self._attachmentID)
+ rows = (yield Select(
+ [
+ att.ATTACHMENT_ID,
+ att.DROPBOX_ID,
+ att.CALENDAR_HOME_RESOURCE_ID,
+ att.CONTENT_TYPE,
+ att.SIZE,
+ att.MD5,
+ att.CREATED,
+ att.MODIFIED,
+ att.PATH,
+ ],
+ From=att,
+ Where=where
+ ).on(self._txn))
+
if not rows:
returnValue(None)
- self._ownerHomeID = rows[0][0]
- self._contentType = MimeType.fromString(rows[0][1])
- self._size = rows[0][2]
- self._md5 = rows[0][3]
- self._created = sqltime(rows[0][4])
- self._modified = sqltime(rows[0][5])
+
+ row_iter = iter(rows[0])
+ self._attachmentID = row_iter.next()
+ self._dropboxID = row_iter.next()
+ self._ownerHomeID = row_iter.next()
+ self._contentType = MimeType.fromString(row_iter.next())
+ self._size = row_iter.next()
+ self._md5 = row_iter.next()
+ self._created = sqltime(row_iter.next())
+ self._modified = sqltime(row_iter.next())
+ self._name = row_iter.next()
+
returnValue(self)
+ def dropboxID(self):
+ return self._dropboxID
+
+
+ def isManaged(self):
+ return not self._dropboxID
+
+
def name(self):
return self._name
- @property
- def _path(self):
- attachmentRoot = self._txn._store.attachmentsPath
- # Use directory hashing scheme based on MD5 of dropboxID
- hasheduid = hashlib.md5(self._dropboxID).hexdigest()
- return attachmentRoot.child(hasheduid[0:2]).child(
- hasheduid[2:4]).child(hasheduid).child(self.name())
-
-
def properties(self):
pass # stub
- def store(self, contentType):
- return AttachmentStorageTransport(self, contentType, self._justCreated)
+ def store(self, contentType, dispositionName=None):
+ return AttachmentStorageTransport(self, contentType, dispositionName, self._justCreated)
def retrieve(self, protocol):
return AttachmentRetrievalTransport(self._path).start(protocol)
+ def changed(self, contentType, dispositionName, md5, size):
+ raise NotImplementedError
+
_removeStatement = Delete(
From=schema.ATTACHMENT,
- Where=(schema.ATTACHMENT.DROPBOX_ID == Parameter("dropboxID")).And(
- schema.ATTACHMENT.PATH == Parameter("path")
- ))
+ Where=(schema.ATTACHMENT.ATTACHMENT_ID == Parameter("attachmentID"))
+ )
@inlineCallbacks
def remove(self):
oldSize = self._size
- self._txn.postCommit(self._path.remove)
+ self._txn.postCommit(self.removePaths)
yield self._internalRemove()
# Adjust quota
home = (yield self._txn.calendarHomeWithResourceID(self._ownerHomeID))
@@ -1555,16 +1937,80 @@
yield home.notifyChanged()
+ def removePaths(self):
+ """
+ Remove the actual file and up to attachment parent directory if empty.
+ """
+ self._path.remove()
+ parent = self._path.parent()
+ toppath = self._attachmentPathRoot().path
+ while parent.path != toppath:
+ if len(parent.listdir()) == 0:
+ parent.remove()
+ parent = parent.parent()
+ else:
+ break
+
+
def _internalRemove(self):
"""
Just delete the row; don't do any accounting / bookkeeping. (This is
for attachments that have failed to be created due to errors during
storage.)
"""
- return self._removeStatement.on(self._txn, dropboxID=self._dropboxID,
- path=self._name)
+ return self._removeStatement.on(self._txn, attachmentID=self._attachmentID)
+ @classmethod
+ @inlineCallbacks
+ def removedHome(cls, txn, homeID):
+ """
+ A calendar home is being removed so all of its attachments must go too. When removing,
+ we don't care about quota adjustment as there will be no quota once the home is removed.
+
+ TODO: this needs to be transactional wrt the actual file deletes.
+ """
+ att = schema.ATTACHMENT
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+
+ rows = (yield Select(
+ [att.ATTACHMENT_ID, att.DROPBOX_ID, ],
+ From=att,
+ Where=(
+ att.CALENDAR_HOME_RESOURCE_ID == homeID
+ ),
+ ).on(txn))
+
+ for attachmentID, dropboxID in rows:
+ if dropboxID:
+ attachment = DropBoxAttachment(txn, attachmentID, None, None)
+ else:
+ attachment = ManagedAttachment(txn, attachmentID, None, None)
+ attachment = (yield attachment.initFromStore())
+ if attachment._path.exists():
+ attachment.removePaths()
+
+ yield Delete(
+ From=attco,
+ Where=(
+ attco.ATTACHMENT_ID.In(Select(
+ [att.ATTACHMENT_ID, ],
+ From=att,
+ Where=(
+ att.CALENDAR_HOME_RESOURCE_ID == homeID
+ ),
+ ))
+ ),
+ ).on(txn)
+
+ yield Delete(
+ From=att,
+ Where=(
+ att.CALENDAR_HOME_RESOURCE_ID == homeID
+ ),
+ ).on(txn)
+
+
# IDataStoreObject
def contentType(self):
return self._contentType
@@ -1586,4 +2032,405 @@
return self._modified
+
+class DropBoxAttachment(Attachment):
+
+ @classmethod
+ @inlineCallbacks
+ def create(cls, txn, dropboxID, name, ownerHomeID):
+ """
+ Create a new Attachment object.
+
+ @param txn: The transaction to use
+ @type txn: L{CommonStoreTransaction}
+ @param dropboxID: the identifier for the attachment (dropbox id or managed id)
+ @type dropboxID: C{str}
+ @param name: the name of the attachment
+ @type name: C{str}
+ @param ownerHomeID: the resource-id of the home collection of the attachment owner
+ @type ownerHomeID: C{int}
+ """
+
+ # Now create the DB entry
+ att = schema.ATTACHMENT
+ rows = (yield Insert({
+ att.CALENDAR_HOME_RESOURCE_ID : ownerHomeID,
+ att.DROPBOX_ID : dropboxID,
+ att.CONTENT_TYPE : "",
+ att.SIZE : 0,
+ att.MD5 : "",
+ att.PATH : name,
+ }, Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))
+
+ row_iter = iter(rows[0])
+ a_id = row_iter.next()
+ created = sqltime(row_iter.next())
+ modified = sqltime(row_iter.next())
+
+ attachment = cls(txn, a_id, dropboxID, name, ownerHomeID, True)
+ attachment._created = created
+ attachment._modified = modified
+
+ # File system paths need to exist
+ try:
+ attachment._path.parent().makedirs()
+ except:
+ pass
+
+ returnValue(attachment)
+
+
+ @classmethod
+ @inlineCallbacks
+ def load(cls, txn, dropboxID, name):
+ attachment = cls(txn, None, dropboxID, name)
+ attachment = (yield attachment.initFromStore())
+ returnValue(attachment)
+
+
+ @property
+ def _path(self):
+ # Use directory hashing scheme based on MD5 of dropboxID
+ hasheduid = hashlib.md5(self._dropboxID).hexdigest()
+ attachmentRoot = self._attachmentPathRoot().child(hasheduid[0:2]).child(hasheduid[2:4]).child(hasheduid)
+ return attachmentRoot.child(self.name())
+
+
+ @inlineCallbacks
+ def changed(self, contentType, dispositionName, md5, size):
+ """
+ Dropbox attachments never change their path - ignore dispositionName.
+ """
+
+ self._contentType = contentType
+ self._md5 = md5
+ self._size = size
+
+ att = schema.ATTACHMENT
+ self._created, self._modified = map(
+ sqltime,
+ (yield Update(
+ {
+ att.CONTENT_TYPE : generateContentType(self._contentType),
+ att.SIZE : self._size,
+ att.MD5 : self._md5,
+ att.MODIFIED : utcNowSQL,
+ },
+ Where=(att.ATTACHMENT_ID == self._attachmentID),
+ Return=(att.CREATED, att.MODIFIED)).on(self._txn))[0]
+ )
+
+
+
+class ManagedAttachment(Attachment):
+
+ @classmethod
+ @inlineCallbacks
+ def _create(cls, txn, managedID, ownerHomeID):
+ """
+ Create a new Attachment object.
+
+ @param txn: The transaction to use
+ @type txn: L{CommonStoreTransaction}
+ @param managedID: the identifier for the attachment
+ @type managedID: C{str}
+ @param ownerHomeID: the resource-id of the home collection of the attachment owner
+ @type ownerHomeID: C{int}
+ """
+
+ # Now create the DB entry
+ att = schema.ATTACHMENT
+ rows = (yield Insert({
+ att.CALENDAR_HOME_RESOURCE_ID : ownerHomeID,
+ att.DROPBOX_ID : None,
+ att.CONTENT_TYPE : "",
+ att.SIZE : 0,
+ att.MD5 : "",
+ att.PATH : "",
+ }, Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))
+
+ row_iter = iter(rows[0])
+ a_id = row_iter.next()
+ created = sqltime(row_iter.next())
+ modified = sqltime(row_iter.next())
+
+ attachment = cls(txn, a_id, managedID, None, ownerHomeID, True)
+ attachment._managedID = managedID
+ attachment._created = created
+ attachment._modified = modified
+
+ # File system paths need to exist
+ try:
+ attachment._path.parent().makedirs()
+ except:
+ pass
+
+ returnValue(attachment)
+
+
+ @classmethod
+ @inlineCallbacks
+ def create(cls, txn, managedID, ownerHomeID, referencedBy):
+ """
+ Create a new Attachment object.
+
+ @param txn: The transaction to use
+ @type txn: L{CommonStoreTransaction}
+ @param managedID: the identifier for the attachment
+ @type managedID: C{str}
+ @param ownerHomeID: the resource-id of the home collection of the attachment owner
+ @type ownerHomeID: C{int}
+ @param referencedBy: the resource-id of the calendar object referencing the attachment
+ @type referencedBy: C{int}
+ """
+
+ # Now create the DB entry
+ attachment = (yield cls._create(txn, managedID, ownerHomeID))
+
+ # Create the attachment<->calendar object relationship for managed attachments
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ yield Insert({
+ attco.ATTACHMENT_ID : attachment._attachmentID,
+ attco.MANAGED_ID : managedID,
+ attco.CALENDAR_OBJECT_RESOURCE_ID : referencedBy,
+ }).on(txn)
+
+ returnValue(attachment)
+
+
+ @classmethod
+ @inlineCallbacks
+ def update(cls, txn, managedID, ownerHomeID, referencedBy, oldAttachmentID):
+ """
+ Create a new Attachment object.
+
+ @param txn: The transaction to use
+ @type txn: L{CommonStoreTransaction}
+ @param managedID: the identifier for the attachment
+ @type managedID: C{str}
+ @param ownerHomeID: the resource-id of the home collection of the attachment owner
+ @type ownerHomeID: C{int}
+ @param referencedBy: the resource-id of the calendar object referencing the attachment
+ @type referencedBy: C{int}
+ @param oldAttachmentID: the attachment-id of the existing attachment being updated
+ @type oldAttachmentID: C{int}
+ """
+
+ # Now create the DB entry
+ attachment = (yield cls._create(txn, managedID, ownerHomeID))
+
+ # Update the attachment<->calendar object relationship for managed attachments
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ yield Update(
+ {
+ attco.ATTACHMENT_ID : attachment._attachmentID,
+ },
+ Where=(attco.MANAGED_ID == managedID).And(
+ attco.CALENDAR_OBJECT_RESOURCE_ID == referencedBy
+ ),
+ ).on(txn)
+
+ # Now check whether old attachmentID is still referenced - if not delete it
+ rows = (yield Select(
+ [attco.ATTACHMENT_ID, ],
+ From=attco,
+ Where=(attco.ATTACHMENT_ID == oldAttachmentID),
+ ).on(txn))
+ aids = [row[0] for row in rows] if rows is not None else ()
+ if len(aids) == 0:
+ oldattachment = ManagedAttachment(txn, oldAttachmentID, None, None)
+ oldattachment = (yield oldattachment.initFromStore())
+ yield oldattachment.remove()
+
+ returnValue(attachment)
+
+
+ @classmethod
+ @inlineCallbacks
+ def load(cls, txn, managedID):
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ rows = (yield Select(
+ [attco.ATTACHMENT_ID, attco.CALENDAR_OBJECT_RESOURCE_ID, ],
+ From=attco,
+ Where=(attco.MANAGED_ID == managedID),
+ ).on(txn))
+ if len(rows) == 0:
+ returnValue(None)
+ elif len(rows) != 1:
+ raise AttachmentStoreValidManagedID
+
+ attachment = cls(txn, rows[0][0], None, None)
+ attachment = (yield attachment.initFromStore())
+ attachment._managedID = managedID
+ attachment._objectResourceID = rows[0][1]
+ returnValue(attachment)
+
+
+ @classmethod
+ @inlineCallbacks
+ def referencesTo(cls, txn, managedID):
+ """
+ Find all the calendar object resourceIds referenced by this supplied managed-id.
+ """
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ rows = (yield Select(
+ [attco.CALENDAR_OBJECT_RESOURCE_ID, ],
+ From=attco,
+ Where=(attco.MANAGED_ID == managedID),
+ ).on(txn))
+ cobjs = set([row[0] for row in rows]) if rows is not None else set()
+ returnValue(cobjs)
+
+
+ @classmethod
+ @inlineCallbacks
+ def usedManagedID(cls, txn, managedID):
+ """
+ Return the "owner" home and referencing resource is, and UID for a managed-id.
+ """
+ att = schema.ATTACHMENT
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ co = schema.CALENDAR_OBJECT
+ rows = (yield Select(
+ [
+ att.CALENDAR_HOME_RESOURCE_ID,
+ attco.CALENDAR_OBJECT_RESOURCE_ID,
+ co.ICALENDAR_UID,
+ ],
+ From=att.join(
+ attco, att.ATTACHMENT_ID == attco.ATTACHMENT_ID, "left outer"
+ ).join(co, co.RESOURCE_ID == attco.CALENDAR_OBJECT_RESOURCE_ID),
+ Where=(attco.MANAGED_ID == managedID),
+ ).on(txn))
+ returnValue(rows)
+
+
+ @classmethod
+ @inlineCallbacks
+ def resourceRemoved(cls, txn, resourceID):
+ """
+ Remove all attachments referencing the specified resource.
+ """
+
+ # Find all reference attachment-ids and dereference
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ rows = (yield Select(
+ [attco.MANAGED_ID, ],
+ From=attco,
+ Where=(attco.CALENDAR_OBJECT_RESOURCE_ID == resourceID),
+ ).on(txn))
+ mids = set([row[0] for row in rows]) if rows is not None else set()
+ for managedID in mids:
+ attachment = (yield ManagedAttachment.load(txn, managedID))
+ (yield attachment.removeFromResource(resourceID))
+
+
+ @classmethod
+ @inlineCallbacks
+ def copyManagedID(cls, txn, oldManagedID, newManagedID, referencedBy):
+ """
+ Copy a managed-ID to a new ID and associate the original attachment with the
+ new resource.
+ """
+
+ # Find all reference attachment-ids and dereference
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ aid = (yield Select(
+ [attco.ATTACHMENT_ID, ],
+ From=attco,
+ Where=(attco.MANAGED_ID == oldManagedID),
+ ).on(txn))[0][0]
+
+ yield Insert({
+ attco.ATTACHMENT_ID : aid,
+ attco.MANAGED_ID : newManagedID,
+ attco.CALENDAR_OBJECT_RESOURCE_ID : referencedBy,
+ }).on(txn)
+
+
+ def managedID(self):
+ return self._managedID
+
+
+ @inlineCallbacks
+ def objectResource(self):
+ """
+ Return the calendar object resource associated with this attachment.
+ """
+
+ home = (yield self._txn.calendarHomeWithResourceID(self._ownerHomeID))
+ obj = (yield home.objectResourceWithID(self._objectResourceID))
+ returnValue(obj)
+
+
+ @property
+ def _path(self):
+ # Use directory hashing scheme based on MD5 of attachmentID
+ hasheduid = hashlib.md5(str(self._attachmentID)).hexdigest()
+ return self._attachmentPathRoot().child(hasheduid[0:2]).child(hasheduid[2:4]).child(hasheduid)
+
+
+ @inlineCallbacks
+ def location(self, new_id=None):
+ """
+ Return the URI location of the attachment. Use a different managed-id if one is passed in. That is used
+ when creating a reference to an existing attachment via a new Managed-ID.
+ """
+ if not hasattr(self, "_ownerName"):
+ home = (yield self._txn.calendarHomeWithResourceID(self._ownerHomeID))
+ self._ownerName = home.name()
+ location = self._txn._store.attachmentsURIPattern % {
+ "home": self._ownerName,
+ "name": self._managedID if new_id is None else new_id,
+ }
+ returnValue(location)
+
+
+ @inlineCallbacks
+ def changed(self, contentType, dispositionName, md5, size):
+ """
+ Always update name to current disposition name.
+ """
+
+ self._contentType = contentType
+ self._name = dispositionName
+ self._md5 = md5
+ self._size = size
+ att = schema.ATTACHMENT
+ self._created, self._modified = map(
+ sqltime,
+ (yield Update(
+ {
+ att.CONTENT_TYPE : generateContentType(self._contentType),
+ att.SIZE : self._size,
+ att.MD5 : self._md5,
+ att.MODIFIED : utcNowSQL,
+ att.PATH : self._name,
+ },
+ Where=(att.ATTACHMENT_ID == self._attachmentID),
+ Return=(att.CREATED, att.MODIFIED)).on(self._txn))[0]
+ )
+
+
+ @inlineCallbacks
+ def removeFromResource(self, resourceID):
+
+ # Delete the reference
+ attco = schema.ATTACHMENT_CALENDAR_OBJECT
+ yield Delete(
+ From=attco,
+ Where=(attco.ATTACHMENT_ID == self._attachmentID).And(
+ attco.CALENDAR_OBJECT_RESOURCE_ID == resourceID),
+ ).on(self._txn)
+
+ # References still exist - if not remove actual attachment
+ rows = (yield Select(
+ [attco.CALENDAR_OBJECT_RESOURCE_ID, ],
+ From=attco,
+ Where=(attco.ATTACHMENT_ID == self._attachmentID),
+ ).on(self._txn))
+ if len(rows) == 0:
+ yield self.remove()
+
+
Calendar._objectResourceClass = CalendarObject
Modified: CalendarServer/trunk/txdav/caldav/datastore/test/common.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/datastore/test/common.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/datastore/test/common.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -1929,7 +1929,7 @@
attachment = yield obj.createAttachmentWithName(
"new.attachment",
)
- t = attachment.store(MimeType("text", "x-fixture"))
+ t = attachment.store(MimeType("text", "x-fixture"), "")
self.assertProvides(IAttachmentStorageTransport, t)
t.write("new attachment")
t.write(" text")
@@ -1990,7 +1990,7 @@
created, once all the bytes have been stored.
"""
att = yield obj.createAttachmentWithName(name)
- t = att.store(mimeType)
+ t = att.store(mimeType, "")
t.write(contents)
yield t.loseConnection()
returnValue(att)
@@ -2068,7 +2068,7 @@
obj = yield self.calendarObjectUnderTest()
name = 'a-fun-attachment'
attachment = yield obj.createAttachmentWithName(name)
- transport = attachment.store(MimeType("test", "x-something"))
+ transport = attachment.store(MimeType("test", "x-something"), "")
peer = transport.getPeer()
host = transport.getHost()
self.assertIdentical(peer.attachment, attachment)
@@ -2087,7 +2087,7 @@
"""
home = yield self.homeUnderTest()
attachment = yield getit()
- t = attachment.store(MimeType("text", "x-fixture"))
+ t = attachment.store(MimeType("text", "x-fixture"), "")
sample = "all work and no play makes jack a dull boy"
chunk = (sample * (home.quotaAllowedBytes() / len(sample)))
@@ -2124,7 +2124,7 @@
create = lambda: obj.createAttachmentWithName("exists.attachment")
get = lambda: obj.attachmentWithName("exists.attachment")
attachment = yield create()
- t = attachment.store(MimeType("text", "x-fixture"))
+ t = attachment.store(MimeType("text", "x-fixture"), "")
sampleData = "a reasonably sized attachment"
t.write(sampleData)
yield t.loseConnection()
@@ -2187,7 +2187,7 @@
attachment = yield obj.createAttachmentWithName(
"new.attachment",
)
- t = attachment.store(MimeType("text", "plain"))
+ t = attachment.store(MimeType("text", "plain"), "")
t.write("new attachment text")
yield t.loseConnection()
yield self.commit()
Modified: CalendarServer/trunk/txdav/caldav/datastore/test/test_sql.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/datastore/test/test_sql.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/datastore/test/test_sql.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -30,13 +30,13 @@
from txdav.xml.rfc2518 import GETContentLanguage, ResourceType
from txdav.base.propertystore.base import PropertyName
-from txdav.caldav.datastore.test.common import CommonTests as CalendarCommonTests,\
+from txdav.caldav.datastore.test.common import CommonTests as CalendarCommonTests, \
test_event_text
from txdav.caldav.datastore.test.test_file import setUpCalendarStore
from txdav.caldav.datastore.util import _migrateCalendar, migrateHome
from txdav.common.datastore.sql import ECALENDARTYPE, CommonObjectResource
from txdav.common.datastore.sql_legacy import PostgresLegacyIndexEmulator
-from txdav.common.datastore.sql_tables import schema, _BIND_MODE_DIRECT,\
+from txdav.common.datastore.sql_tables import schema, _BIND_MODE_DIRECT, \
_BIND_STATUS_ACCEPTED
from txdav.common.datastore.test.util import buildStore, populateCalendarsFrom
from txdav.common.icommondatastore import NoSuchObjectResourceError
@@ -63,8 +63,9 @@
self._sqlCalendarStore = yield buildStore(self, self.notifierFactory)
yield self.populate()
- self.nowYear = {"now":PyCalendarDateTime.getToday().getYear()}
+ self.nowYear = {"now": PyCalendarDateTime.getToday().getYear()}
+
@inlineCallbacks
def populate(self):
yield populateCalendarsFrom(self.requirements, self.storeUnderTest())
@@ -185,7 +186,7 @@
backed calendar. We need to test what happens when there is "bad" calendar data
present in the file-backed calendar with a broken recurrence-id that we can fix.
"""
-
+
self.storeUnderTest().setMigrating(True)
fromCalendar = yield (yield self.fileTransaction().calendarHomeWithUID(
"home_bad")).calendarWithName("calendar_fix_recurrence")
@@ -306,7 +307,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % self.nowYear)
-
+
toResource = yield toCalendar.calendarObjectWithName("3.ics")
caldata = yield toResource.component()
self.assertEqual(str(caldata), """BEGIN:VCALENDAR
@@ -353,7 +354,8 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n") % self.nowYear)
-
+
+
@inlineCallbacks
def test_migrateDuplicateAttachmentsCalendarFromFile(self):
"""
@@ -371,6 +373,7 @@
self.assertEqual(ok, 3)
self.assertEqual(bad, 0)
+
@inlineCallbacks
def test_migrateCalendarFromFile_Transparency(self):
"""
@@ -385,10 +388,10 @@
yield _migrateCalendar(fromCalendar, toCalendar,
lambda x: x.component())
- filter = caldavxml.Filter(
+ filter = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
- caldavxml.TimeRange(start="%(now)s0201T000000Z" % self.nowYear, end="%(now)s0202T000000Z" % self.nowYear),
+ caldavxml.TimeRange(start="%(now)s0201T000000Z" % self.nowYear, end="%(now)s0202T000000Z" % self.nowYear),
name=("VEVENT", "VFREEBUSY", "VAVAILABILITY"),
),
name="VCALENDAR",
@@ -403,6 +406,7 @@
self.assertEquals(uid, "uid4")
self.assertEquals(transp, 'T')
+
@inlineCallbacks
def test_migrateHomeFromFile(self):
"""
@@ -410,7 +414,7 @@
backend to another; in this specific case, from the file-based backend
to the SQL-based backend.
"""
-
+
# Need to turn of split calendar behavior just for this test
self.patch(config, "RestrictCalendarsToOneComponentType", False)
@@ -467,9 +471,10 @@
continue
result = yield calendar.getSupportedComponents()
supported_components.add(result)
-
+
self.assertEqual(supported_components, set(("VEVENT", "VTODO",)))
+
@inlineCallbacks
def test_migrateHomeNoSplits(self):
"""
@@ -494,24 +499,25 @@
continue
result = yield calendar.getSupportedComponents()
supported_components.add(result)
-
+
self.assertEqual(supported_components, set(("VEVENT", "VTODO",)))
+
def test_calendarHomeVersion(self):
"""
The DATAVERSION column for new calendar homes must match the
CALENDAR-DATAVERSION value.
"""
-
+
home = yield self.transactionUnderTest().calendarHomeWithUID("home_version")
self.assertTrue(home is not None)
yield self.transactionUnderTest().commit
-
+
txn = yield self.transactionUnderTest()
version = yield txn.calendarserverValue("CALENDAR-DATAVERSION")[0][0]
ch = schema.CALENDAR_HOME
homeVersion = yield Select(
- [ch.DATAVERSION,],
+ [ch.DATAVERSION, ],
From=ch,
Where=ch.OWNER_UID == "home_version",
).on(txn)[0][0]
@@ -695,6 +701,7 @@
yield d1
yield d2
+
@inlineCallbacks
def test_datetimes(self):
calendarStore = self._sqlCalendarStore
@@ -714,6 +721,7 @@
self.assertEqual(obj.created(), datetimeMktime(datetime.datetime(2011, 2, 7, 11, 22, 47)))
self.assertEqual(obj.modified(), datetimeMktime(datetime.datetime(2011, 2, 8, 11, 22, 47)))
+
@inlineCallbacks
def test_notificationsProvisioningConcurrency(self):
"""
@@ -753,6 +761,7 @@
self.assertNotEqual(notification_uid1_1, None)
self.assertNotEqual(notification_uid1_2, None)
+
@inlineCallbacks
def test_removeCalendarPropertiesOnDelete(self):
"""
@@ -766,7 +775,7 @@
calendar = yield home.createCalendarWithName(name)
resourceID = calendar._resourceID
calendarProperties = calendar.properties()
-
+
prop = caldavxml.CalendarDescription.fromString("Calendar to be removed")
calendarProperties[PropertyName.fromElement(prop)] = prop
yield self.commit()
@@ -794,6 +803,7 @@
self.assertEqual(len(tuple(rows)), 0)
yield self.commit()
+
@inlineCallbacks
def test_removeCalendarObjectPropertiesOnDelete(self):
"""
@@ -839,6 +849,7 @@
self.assertEqual(len(tuple(rows)), 0)
yield self.commit()
+
@inlineCallbacks
def test_removeInboxObjectPropertiesOnDelete(self):
"""
@@ -849,7 +860,7 @@
# Create calendar object and add a property
home = yield self.homeUnderTest()
inbox = yield home.createCalendarWithName("inbox")
-
+
name = "test.ics"
component = VComponent.fromString(test_event_text)
metadata = {
@@ -891,6 +902,7 @@
self.assertEqual(len(tuple(rows)), 0)
yield self.commit()
+
@inlineCallbacks
def test_directShareCreateConcurrency(self):
"""
@@ -942,22 +954,23 @@
yield d1
yield d2
+
@inlineCallbacks
def test_transferSharingDetails(self):
"""
Test Calendar._transferSharingDetails to make sure sharing details are transferred.
"""
-
+
shareeHome = yield self.transactionUnderTest().calendarHomeWithUID("home_splits_shared")
calendar = yield (yield self.transactionUnderTest().calendarHomeWithUID(
"home_splits")).calendarWithName("calendar_1")
-
+
# Fake a shared binding on the original calendar
bind = calendar._bindSchema
_bindCreate = Insert({
bind.HOME_RESOURCE_ID: shareeHome._resourceID,
- bind.RESOURCE_ID: calendar._resourceID,
+ bind.RESOURCE_ID: calendar._resourceID,
bind.RESOURCE_NAME: "shared_1",
bind.MESSAGE: "Shared to you",
bind.BIND_MODE: _BIND_MODE_DIRECT,
@@ -984,34 +997,36 @@
self.assertTrue(sharedCalendar is not None)
self.assertEqual(sharedCalendar._resourceID, newcalendar._resourceID)
+
@inlineCallbacks
def test_moveCalendarObjectResource(self):
"""
Test Calendar._transferSharingDetails to make sure sharing details are transferred.
"""
-
+
calendar1 = yield (yield self.transactionUnderTest().calendarHomeWithUID(
"home_splits")).calendarWithName("calendar_1")
calendar2 = yield (yield self.transactionUnderTest().calendarHomeWithUID(
"home_splits")).calendarWithName("calendar_2")
-
+
child = yield calendar2.calendarObjectWithName("5.ics")
-
+
yield calendar2.moveObjectResource(child, calendar1)
-
+
child = yield calendar2.calendarObjectWithName("5.ics")
self.assertTrue(child is None)
-
+
child = yield calendar1.calendarObjectWithName("5.ics")
self.assertTrue(child is not None)
+
@inlineCallbacks
def test_splitCalendars(self):
"""
Test Calendar.splitCollectionByComponentTypes to make sure components are split out,
sync information is updated.
"""
-
+
# calendar_2 add a dead property to make sure it gets copied over
home = yield self.transactionUnderTest().calendarHomeWithUID("home_splits")
calendar2 = yield home.calendarWithName("calendar_2")
@@ -1031,7 +1046,7 @@
child = yield home.calendarWithName("calendar_1-vtodo")
self.assertTrue(child is None)
- calendar1 = yield home.calendarWithName("calendar_1")
+ calendar1 = yield home.calendarWithName("calendar_1")
children = yield calendar1.listCalendarObjects()
self.assertEqual(len(children), 3)
new_sync_token1 = yield calendar1.syncToken()
@@ -1043,7 +1058,7 @@
# calendar_2 does split
home = yield self.transactionUnderTest().calendarHomeWithUID("home_splits")
- calendar2 = yield home.calendarWithName("calendar_2")
+ calendar2 = yield home.calendarWithName("calendar_2")
original_sync_token2 = yield calendar2.syncToken()
yield calendar2.splitCollectionByComponentTypes()
yield self.commit()
@@ -1062,7 +1077,7 @@
self.assertTrue(pkey in calendar2_vtodo.properties())
self.assertEqual(str(calendar2_vtodo.properties()[pkey]), "A birthday calendar")
- calendar2 = yield home.calendarWithName("calendar_2")
+ calendar2 = yield home.calendarWithName("calendar_2")
children = yield calendar2.listCalendarObjects()
self.assertEqual(len(children), 3)
new_sync_token2 = yield calendar2.syncToken()
@@ -1075,13 +1090,14 @@
self.assertTrue(pkey in calendar2.properties())
self.assertEqual(str(calendar2.properties()[pkey]), "A birthday calendar")
+
@inlineCallbacks
def test_noSplitCalendars(self):
"""
Test CalendarHome.splitCalendars to make sure we end up with at least two collections
with different supported components.
"""
-
+
# Do split
home = yield self.transactionUnderTest().calendarHomeWithUID("home_no_splits")
calendars = yield home.calendars()
@@ -1098,23 +1114,24 @@
continue
result = yield calendar.getSupportedComponents()
supported_components.add(result)
-
+
self.assertEqual(supported_components, set(("VEVENT", "VTODO",)))
+
@inlineCallbacks
def test_resourceLock(self):
"""
Test CommonObjectResource.lock to make sure it locks, raises on missing resource,
and raises when locked and wait=False used.
"""
-
+
# Valid object
resource = yield self.calendarObjectUnderTest()
-
+
# Valid lock
yield resource.lock()
self.assertTrue(resource._locked)
-
+
# Setup a new transaction to verify the lock and also verify wait behavior
newTxn = self._sqlCalendarStore.newTransaction()
newResource = yield self.calendarObjectUnderTest(txn=newTxn)
@@ -1129,19 +1146,19 @@
# Commit existing transaction and verify we can get the lock using
yield self.commit()
-
+
resource = yield self.calendarObjectUnderTest()
yield resource.lock()
self.assertTrue(resource._locked)
-
+
# Setup a new transaction to verify the lock but pass in an alternative txn directly
newTxn = self._sqlCalendarStore.newTransaction()
-
+
# FIXME: not sure why, but without this statement here, this portion of the test fails in a funny way.
# Basically the query in the try block seems to execute twice, failing each time, one of which is caught,
# and the other not - causing the test to fail. Seems like some state on newTxn is not being initialized?
yield self.calendarObjectUnderTest("2.ics", txn=newTxn)
-
+
try:
yield resource.lock(wait=False, useTxn=newTxn)
except:
@@ -1169,29 +1186,30 @@
"""
Test CalendarObjectResource.recurrenceMinMax to make sure it handles a None value.
"""
-
+
# Valid object
resource = yield self.calendarObjectUnderTest()
-
+
# Valid lock
rMin, rMax = yield resource.recurrenceMinMax()
self.assertEqual(rMin, None)
self.assertEqual(rMax, None)
+
@inlineCallbacks
def test_notExpandedWithin(self):
"""
Test PostgresLegacyIndexEmulator.notExpandedWithin to make sure it returns the correct
result based on the ranges passed in.
"""
-
+
self.patch(config, "FreeBusyIndexDelayedExpand", False)
# Create the index on a new calendar
home = yield self.homeUnderTest()
newcalendar = yield home.createCalendarWithName("index_testing")
index = PostgresLegacyIndexEmulator(newcalendar)
-
+
# Create the calendar object to use for testing
nowYear = self.nowYear["now"]
caldata = """BEGIN:VCALENDAR
@@ -1287,15 +1305,15 @@
instances = yield calendarObject.instances()
self.assertNotEqual(len(instances), 0)
yield self.commit()
-
+
# Re-add event with re-indexing
calendar = yield self.calendarUnderTest()
calendarObject = yield self.calendarObjectUnderTest("indexing.ics")
yield calendarObject.setComponent(component)
instances2 = yield calendarObject.instances()
self.assertNotEqual(
- sorted(instances, key=lambda x:x[0])[0],
- sorted(instances2, key=lambda x:x[0])[0],
+ sorted(instances, key=lambda x: x[0])[0],
+ sorted(instances2, key=lambda x: x[0])[0],
)
yield self.commit()
@@ -1306,13 +1324,14 @@
yield calendarObject.setComponent(component)
instances3 = yield calendarObject.instances()
self.assertEqual(
- sorted(instances2, key=lambda x:x[0])[0],
- sorted(instances3, key=lambda x:x[0])[0],
+ sorted(instances2, key=lambda x: x[0])[0],
+ sorted(instances3, key=lambda x: x[0])[0],
)
-
+
yield calendar.removeCalendarObjectWithName("indexing.ics")
yield self.commit()
+
@inlineCallbacks
def test_loadObjectResourcesWithName(self):
"""
@@ -1324,19 +1343,19 @@
def _tests(cal):
resources = yield cal.objectResourcesWithNames(("1.ics",))
self.assertEqual(set([resource.name() for resource in resources]), set(("1.ics",)))
-
+
resources = yield cal.objectResourcesWithNames(("1.ics", "2.ics",))
self.assertEqual(set([resource.name() for resource in resources]), set(("1.ics", "2.ics",)))
-
+
resources = yield cal.objectResourcesWithNames(("1.ics", "2.ics", "3.ics",))
self.assertEqual(set([resource.name() for resource in resources]), set(("1.ics", "2.ics", "3.ics",)))
-
+
resources = yield cal.objectResourcesWithNames(("1.ics", "2.ics", "3.ics", "4.ics",))
self.assertEqual(set([resource.name() for resource in resources]), set(("1.ics", "2.ics", "3.ics", "4.ics",)))
-
+
resources = yield cal.objectResourcesWithNames(("bogus1.ics",))
self.assertEqual(set([resource.name() for resource in resources]), set())
-
+
resources = yield cal.objectResourcesWithNames(("bogus1.ics", "2.ics",))
self.assertEqual(set([resource.name() for resource in resources]), set(("2.ics",)))
@@ -1347,7 +1366,7 @@
# Adjust batch size and try again
self.patch(CommonObjectResource, "BATCH_LOAD_SIZE", 2)
yield _tests(cal)
-
+
yield self.commit()
# Tests on inbox - resources with properties
@@ -1392,13 +1411,27 @@
self.assertEqual(resources[0].properties()[PropertyName.fromElement(prop)], prop)
resources = yield inbox.objectResourcesWithNames(("1.ics", "2.ics",))
- resources.sort(key=lambda x:x._name)
+ resources.sort(key=lambda x: x._name)
prop = caldavxml.CalendarDescription.fromString("p1")
self.assertEqual(resources[0].properties()[PropertyName.fromElement(prop)], prop)
prop = caldavxml.CalendarDescription.fromString("p2")
self.assertEqual(resources[1].properties()[PropertyName.fromElement(prop)], prop)
resources = yield inbox.objectResourcesWithNames(("bogus1.ics", "2.ics",))
- resources.sort(key=lambda x:x._name)
+ resources.sort(key=lambda x: x._name)
prop = caldavxml.CalendarDescription.fromString("p2")
self.assertEqual(resources[0].properties()[PropertyName.fromElement(prop)], prop)
+
+
+ @inlineCallbacks
+ def test_objectResourceWithID(self):
+ """
+ L{ICalendarHome.objectResourceWithID} will return the calendar object..
+ """
+ home = yield self.homeUnderTest()
+ calendarObject = (yield home.objectResourceWithID(9999))
+ self.assertEquals(calendarObject, None)
+
+ obj = (yield self.calendarObjectUnderTest())
+ calendarObject = (yield home.objectResourceWithID(obj._resourceID))
+ self.assertNotEquals(calendarObject, None)
Modified: CalendarServer/trunk/txdav/caldav/datastore/test/test_util.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/datastore/test/test_util.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/datastore/test/test_util.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -110,7 +110,7 @@
END:VCALENDAR
""")
- self.assertEquals( (yield dropboxIDFromCalendarObject(resource)), "12345-67890.dropbox")
+ self.assertEquals((yield dropboxIDFromCalendarObject(resource)), "12345-67890.dropbox")
@inlineCallbacks
@@ -305,10 +305,12 @@
return self._name
for filename, result in test_files:
- item = StorageTransportBase(FakeAttachment(filename), None)
+ item = StorageTransportBase(FakeAttachment(filename), None, None)
self.assertEquals(item._contentType, result)
- item = StorageTransportBase(FakeAttachment(filename), result)
+ self.assertEquals(item._dispositionName, None)
+ item = StorageTransportBase(FakeAttachment(filename), result, filename)
self.assertEquals(item._contentType, result)
+ self.assertEquals(item._dispositionName, filename)
Modified: CalendarServer/trunk/txdav/caldav/datastore/util.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/datastore/util.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/datastore/util.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -468,7 +468,7 @@
contentTypes = loadMimeTypes()
- def __init__(self, attachment, contentType):
+ def __init__(self, attachment, contentType, dispositionName):
"""
Create a storage transport with a reference to an L{IAttachment} and a
L{twext.web2.http_headers.MimeType}.
@@ -477,9 +477,10 @@
self._clock = reactor
self._attachment = attachment
self._contentType = contentType
+ self._dispositionName = dispositionName
self._producer = None
- # Make sure we have some kind of contrent-type
+ # Make sure we have some kind of content-type
if self._contentType is None:
self._contentType = http_headers.MimeType.fromString(getType(self._attachment.name(), self.contentTypes))
Modified: CalendarServer/trunk/txdav/caldav/icalendarstore.py
===================================================================
--- CalendarServer/trunk/txdav/caldav/icalendarstore.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/caldav/icalendarstore.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -56,6 +56,27 @@
+class AttachmentStoreFailed(Exception):
+ """
+ Unable to store an attachment.
+ """
+
+
+
+class AttachmentStoreValidManagedID(Exception):
+ """
+ Specified attachment managed-id is not valid.
+ """
+
+
+
+class AttachmentRemoveFailed(Exception):
+ """
+ Unable to remove an attachment.
+ """
+
+
+
class QuotaExceeded(Exception):
"""
The quota for a particular user has been exceeded.
@@ -277,7 +298,19 @@
"""
+ def objectResourceWithID(rid):
+ """
+ Return the calendar object resource with the specified ID, assumed to be a child of
+ a calendar collection within this home.
+ @param rid: resource id of object to find
+ @type rid: C{int}
+
+ @return: L{ICalendar} or C{None} if not found
+ """
+
+
+
class ICalendar(INotifier, IShareableCollection, IDataStoreObject):
"""
Calendar
@@ -546,6 +579,67 @@
"""
+ #
+ # New managed attachment APIs that supersede dropbox
+ #
+
+ def addAttachment(pathpattern, rids, content_type, filename, stream):
+ """
+ Add a managed attachment to the calendar data.
+
+ @param pathpattern: URI template for the attachment property value.
+ @type pathpattern: C{str}
+ @param rids: set of RECURRENCE-ID values (not adjusted for UTC or TZID offset) to add the
+ new attachment to. The server must create necessary overrides if none already exist.
+ @type rids: C{iterable}
+ @param content_type: content-type information for the attachment data.
+ @type content_type: L{MimeType}
+ @param filename: display file name to use for the attachment.
+ @type filename: C{str}
+ @param stream: stream from which attachment data can be retrieved.
+ @type stream: L{IStream}
+
+ @raise: if anything goes wrong...
+ """
+
+
+ def updateAttachment(pathpattern, managed_id, content_type, filename, stream):
+ """
+ Update an existing managed attachment in the calendar data.
+
+ @param pathpattern: URI template for the attachment property value.
+ @type pathpattern: C{str}
+ @param managed_id: the identifier of the attachment to update.
+ @type managed_id: C{str}
+ @param content_type: content-type information for the attachment data.
+ @type content_type: L{MimeType}
+ @param filename: display file name to use for the attachment.
+ @type filename: C{str}
+ @param stream: stream from which attachment data can be retrieved.
+ @type stream: L{IStream}
+
+ @raise: if anything goes wrong...
+ """
+
+
+ def removeAttachment(rids, managed_id):
+ """
+ Remove an existing managed attachment from the calendar data.
+
+ @param rids: set of RECURRENCE-ID values (not adjusted for UTC or TZID offset) to remove the
+ attachment from. The server must create necessary overrides if none already exist.
+ @type rids: C{iterable}
+ @param managed_id: the identifier of the attachment to remove.
+ @type managed_id: C{str}
+
+ @raise: if anything goes wrong...
+ """
+
+ #
+ # The following APIs are for the older Dropbox protocol, which is now deprecated in favor of
+ # managed attachments
+ #
+
def dropboxID():
"""
An identifier, unique to the calendar home, that specifies a location
Modified: CalendarServer/trunk/txdav/common/datastore/file.py
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/file.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/common/datastore/file.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -291,7 +291,7 @@
returnValue([kv[1] for kv in sorted(self._determineMemo(storeType, None).items(), key=lambda x: x[0])])
- @memoizedKey("uid", _determineMemo)
+ @memoizedKey("uid", _determineMemo, deferredResult=False)
def homeWithUID(self, storeType, uid, create=False):
if uid.startswith("."):
return None
@@ -302,7 +302,7 @@
return self._homeClass[storeType].homeWithUID(self, uid, create, storeType == ECALENDARTYPE)
- @memoizedKey("uid", "_notificationHomes")
+ @memoizedKey("uid", "_notificationHomes", deferredResult=False)
def notificationsWithUID(self, uid, home=None):
if home is None:
@@ -678,6 +678,15 @@
return results
+ def objectResourceWithID(self, rid):
+ """
+ Return all child object resources with the specified resource-ID.
+ """
+
+ # File store does not have resource ids.
+ raise NotImplementedError
+
+
def quotaUsedBytes(self):
try:
Modified: CalendarServer/trunk/txdav/common/datastore/sql.py
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/common/datastore/sql.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -147,7 +147,8 @@
implements(ICalendarStore)
- def __init__(self, sqlTxnFactory, notifierFactory, attachmentsPath,
+ def __init__(self, sqlTxnFactory, notifierFactory,
+ attachmentsPath, attachmentsURIPattern,
enableCalendars=True, enableAddressBooks=True,
label="unlabeled", quota=(2 ** 20),
logLabels=False, logStats=False, logStatsLogFile=None, logSQL=False,
@@ -159,6 +160,7 @@
self.sqlTxnFactory = sqlTxnFactory
self.notifierFactory = notifierFactory
self.attachmentsPath = attachmentsPath
+ self.attachmentsURIPattern = attachmentsURIPattern
self.enableCalendars = enableCalendars
self.enableAddressBooks = enableAddressBooks
self.label = label
@@ -836,6 +838,7 @@
a = ("-- Label: %s\n" % (self._label.replace("%", "%%"),) + a[0],) + a[1:]
if self._store.logSQL:
log.error("SQL: %r %r" % (a, kw,))
+ results = None
try:
results = (yield self._sqlTxn.execSQL(*a, **kw))
finally:
@@ -1006,13 +1009,13 @@
"""
# TODO: see if there is a better way to import Attachment
- from txdav.caldav.datastore.sql import Attachment
+ from txdav.caldav.datastore.sql import DropBoxAttachment
results = (yield self.orphanedAttachments(batchSize=batchSize))
count = 0
for dropboxID, path in results:
- attachment = Attachment(self, dropboxID, path)
- (yield attachment.remove())
+ attachment = (yield DropBoxAttachment.load(self, dropboxID, path))
+ yield attachment.remove()
count += 1
returnValue(count)
@@ -1587,6 +1590,34 @@
returnValue(results)
+ @classmethod
+ def _objectResourceIDQuery(cls):
+ obj = cls._objectSchema
+ return Select(
+ [obj.PARENT_RESOURCE_ID],
+ From=obj,
+ Where=(obj.RESOURCE_ID == Parameter("resourceID")),
+ )
+
+
+ @inlineCallbacks
+ def objectResourceWithID(self, rid):
+ """
+ Return all child object resources with the specified resource-ID.
+ """
+ rows = (yield self._objectResourceIDQuery().on(
+ self._txn, resourceID=rid
+ ))
+ if rows and len(rows) == 1:
+ child = (yield self.childWithID(rows[0][0]))
+ objectResource = (
+ yield child.objectResourceWithID(rid)
+ )
+ returnValue(objectResource)
+
+ returnValue(None)
+
+
@classproperty
def _quotaQuery(cls): #@NoSelf
meta = cls._homeMetaDataSchema
@@ -3288,13 +3319,18 @@
@classproperty
- def _moveParentUpdateQuery(cls): #@NoSelf
+ def _moveParentUpdateQuery(cls, adjustName=False): #@NoSelf
"""
DAL query to update a child to be in a new parent.
"""
obj = cls._objectSchema
+ cols = {
+ obj.PARENT_RESOURCE_ID: Parameter("newParentID")
+ }
+ if adjustName:
+ cols[obj.RESOURCE_NAME] = Parameter("newName")
return Update(
- {obj.PARENT_RESOURCE_ID: Parameter("newParentID")},
+ cols,
Where=obj.RESOURCE_ID == Parameter("resourceID")
)
@@ -3308,7 +3344,7 @@
@inlineCallbacks
- def moveObjectResource(self, child, newparent):
+ def moveObjectResource(self, child, newparent, newname=None):
"""
Move a child of this collection into another collection without actually removing/re-inserting the data.
Make sure sync and cache details for both collections are updated.
@@ -3320,11 +3356,19 @@
@type child: L{CommonObjectResource}
@param newparent: the parent to move to
@type newparent: L{CommonHomeChild}
+ @param newname: new name to use in new parent
+ @type newname: C{str} or C{None} for existing name
"""
+ if newname and newname.startswith("."):
+ raise ObjectResourceNameNotAllowedError(newname)
+
name = child.name()
uid = child.uid()
+ if newname is None:
+ newname = name
+
# Clean this collections cache and signal sync change
self._objects.pop(name, None)
self._objects.pop(uid, None)
@@ -3332,17 +3376,32 @@
yield self._deleteRevision(name)
yield self.notifyChanged()
- # Adjust the child to be a child of the new parent and update ancillary tables
- yield self._moveParentUpdateQuery.on(
+ # Handle cases where move is within the same collection or to a different collection
+ # with/without a name change
+ obj = self._objectSchema
+ cols = {}
+ if newparent._resourceID != self._resourceID:
+ cols[obj.PARENT_RESOURCE_ID] = Parameter("newParentID")
+ if newname != name:
+ cols[obj.RESOURCE_NAME] = Parameter("newName")
+ yield Update(
+ cols,
+ Where=obj.RESOURCE_ID == Parameter("resourceID")
+ ).on(
self._txn,
+ resourceID=child._resourceID,
newParentID=newparent._resourceID,
- resourceID=child._resourceID
+ newName=newname,
)
- yield self._movedObjectResource(child, newparent)
+
+ # Only signal a move when parent is different
+ if newparent._resourceID != self._resourceID:
+ yield self._movedObjectResource(child, newparent)
+
child._parentCollection = newparent
# Signal sync change on new collection
- yield newparent._insertRevision(name)
+ yield newparent._insertRevision(newname)
yield newparent.notifyChanged()
@@ -3856,6 +3915,21 @@
return Delete(cls._objectSchema, Where=cls._objectSchema.RESOURCE_ID == Parameter("resourceID"))
+ def moveTo(self, destination, name):
+ """
+ Move object to another collection.
+
+ @param destination: parent collection to move to
+ @type destination: L{CommonHomeChild}
+ @param name: new name in destination
+ @type name: C{str} or C{None} to use existing name
+ """
+
+ if name and name.startswith("."):
+ raise ObjectResourceNameNotAllowedError(name)
+ return self._parentCollection.moveObjectResource(self, destination, name)
+
+
@inlineCallbacks
def remove(self):
yield self._deleteQuery.on(self._txn, NoSuchObjectResourceError,
Modified: CalendarServer/trunk/txdav/common/datastore/sql_schema/current-oracle-dialect.sql
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_schema/current-oracle-dialect.sql 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/common/datastore/sql_schema/current-oracle-dialect.sql 2012-12-10 20:22:58 UTC (rev 10147)
@@ -1,5 +1,6 @@
create sequence RESOURCE_ID_SEQ;
create sequence INSTANCE_ID_SEQ;
+create sequence ATTACHMENT_ID_SEQ;
create sequence REVISION_SEQ;
create table NODE_INFO (
"HOSTNAME" nvarchar2(255),
@@ -159,6 +160,7 @@
);
create table ATTACHMENT (
+ "ATTACHMENT_ID" integer primary key,
"CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
"DROPBOX_ID" nvarchar2(255),
"CONTENT_TYPE" nvarchar2(255),
@@ -166,10 +168,17 @@
"MD5" nchar(32),
"CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
"MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
- "PATH" nvarchar2(1024),
- primary key("DROPBOX_ID", "PATH")
+ "PATH" nvarchar2(1024)
);
+create table ATTACHMENT_CALENDAR_OBJECT (
+ "ATTACHMENT_ID" integer not null references ATTACHMENT on delete cascade,
+ "MANAGED_ID" nvarchar2(255),
+ "CALENDAR_OBJECT_RESOURCE_ID" integer not null references CALENDAR_OBJECT on delete cascade,
+ primary key("ATTACHMENT_ID", "CALENDAR_OBJECT_RESOURCE_ID"),
+ unique("MANAGED_ID", "CALENDAR_OBJECT_RESOURCE_ID")
+);
+
create table RESOURCE_PROPERTY (
"RESOURCE_ID" integer not null,
"NAME" nvarchar2(255),
@@ -268,7 +277,7 @@
"VALUE" nvarchar2(255)
);
-insert into CALENDARSERVER (NAME, VALUE) values ('VERSION', '12');
+insert into CALENDARSERVER (NAME, VALUE) values ('VERSION', '13');
insert into CALENDARSERVER (NAME, VALUE) values ('CALENDAR-DATAVERSION', '3');
insert into CALENDARSERVER (NAME, VALUE) values ('ADDRESSBOOK-DATAVERSION', '1');
create index INVITE_INVITE_UID_9b0902ff on INVITE (
Modified: CalendarServer/trunk/txdav/common/datastore/sql_schema/current.sql
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_schema/current.sql 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/common/datastore/sql_schema/current.sql 2012-12-10 20:22:58 UTC (rev 10147)
@@ -46,7 +46,7 @@
create table CALENDAR_HOME (
RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
OWNER_UID varchar(255) not null unique, -- implicit index
- DATAVERSION integer default 0 not null
+ DATAVERSION integer default 0 not null
);
----------------------------
@@ -302,26 +302,39 @@
create index TRANSPARENCY_TIME_RANGE_INSTANCE_ID on
TRANSPARENCY(TIME_RANGE_INSTANCE_ID);
+
----------------
-- Attachment --
----------------
+create sequence ATTACHMENT_ID_SEQ;
+
create table ATTACHMENT (
- CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME,
- DROPBOX_ID varchar(255) not null,
- CONTENT_TYPE varchar(255) not null,
- SIZE integer not null,
- MD5 char(32) not null,
+ ATTACHMENT_ID integer primary key default nextval('ATTACHMENT_ID_SEQ'), -- implicit index
+ CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME,
+ DROPBOX_ID varchar(255),
+ CONTENT_TYPE varchar(255) not null,
+ SIZE integer not null,
+ MD5 char(32) not null,
CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
- PATH varchar(1024) not null,
-
- primary key (DROPBOX_ID, PATH) --implicit index
+ PATH varchar(1024) not null
);
create index ATTACHMENT_CALENDAR_HOME_RESOURCE_ID on
ATTACHMENT(CALENDAR_HOME_RESOURCE_ID);
+-- Many-to-many relationship between attachments and calendar objects
+create table ATTACHMENT_CALENDAR_OBJECT (
+ ATTACHMENT_ID integer not null references ATTACHMENT on delete cascade,
+ MANAGED_ID varchar(255) not null,
+ CALENDAR_OBJECT_RESOURCE_ID integer not null references CALENDAR_OBJECT on delete cascade,
+
+ primary key (ATTACHMENT_ID, CALENDAR_OBJECT_RESOURCE_ID), -- implicit index
+ unique (MANAGED_ID, CALENDAR_OBJECT_RESOURCE_ID) --implicit index
+);
+
+
-----------------------
-- Resource Property --
-----------------------
@@ -343,7 +356,7 @@
create table ADDRESSBOOK_HOME (
RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
OWNER_UID varchar(255) not null unique, -- implicit index
- DATAVERSION integer default 0 not null
+ DATAVERSION integer default 0 not null
);
-------------------------------
@@ -513,6 +526,6 @@
VALUE varchar(255)
);
-insert into CALENDARSERVER values ('VERSION', '12');
+insert into CALENDARSERVER values ('VERSION', '13');
insert into CALENDARSERVER values ('CALENDAR-DATAVERSION', '3');
insert into CALENDARSERVER values ('ADDRESSBOOK-DATAVERSION', '1');
Copied: CalendarServer/trunk/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql (from rev 10145, CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql)
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql (rev 0)
+++ CalendarServer/trunk/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql 2012-12-10 20:22:58 UTC (rev 10147)
@@ -0,0 +1,370 @@
+create sequence RESOURCE_ID_SEQ;
+create sequence INSTANCE_ID_SEQ;
+create sequence REVISION_SEQ;
+create table NODE_INFO (
+ "HOSTNAME" nvarchar2(255),
+ "PID" integer not null,
+ "PORT" integer not null,
+ "TIME" timestamp default CURRENT_TIMESTAMP at time zone 'UTC' not null,
+ primary key("HOSTNAME", "PORT")
+);
+
+create table CALENDAR_HOME (
+ "RESOURCE_ID" integer primary key,
+ "OWNER_UID" nvarchar2(255) unique,
+ "DATAVERSION" integer default 0 not null
+);
+
+create table CALENDAR_HOME_METADATA (
+ "RESOURCE_ID" integer primary key references CALENDAR_HOME on delete cascade,
+ "QUOTA_USED_BYTES" integer default 0 not null,
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table CALENDAR (
+ "RESOURCE_ID" integer primary key
+);
+
+create table CALENDAR_METADATA (
+ "RESOURCE_ID" integer primary key references CALENDAR on delete cascade,
+ "SUPPORTED_COMPONENTS" nvarchar2(255) default null,
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table INVITE (
+ "INVITE_UID" nvarchar2(255),
+ "NAME" nvarchar2(255),
+ "RECIPIENT_ADDRESS" nvarchar2(255),
+ "HOME_RESOURCE_ID" integer not null,
+ "RESOURCE_ID" integer not null
+);
+
+create table NOTIFICATION_HOME (
+ "RESOURCE_ID" integer primary key,
+ "OWNER_UID" nvarchar2(255) unique
+);
+
+create table NOTIFICATION (
+ "RESOURCE_ID" integer primary key,
+ "NOTIFICATION_HOME_RESOURCE_ID" integer not null references NOTIFICATION_HOME,
+ "NOTIFICATION_UID" nvarchar2(255),
+ "XML_TYPE" nvarchar2(255),
+ "XML_DATA" nclob,
+ "MD5" nchar(32),
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ unique("NOTIFICATION_UID", "NOTIFICATION_HOME_RESOURCE_ID")
+);
+
+create table CALENDAR_BIND (
+ "CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
+ "CALENDAR_RESOURCE_ID" integer not null references CALENDAR on delete cascade,
+ "CALENDAR_RESOURCE_NAME" nvarchar2(255),
+ "BIND_MODE" integer not null,
+ "BIND_STATUS" integer not null,
+ "SEEN_BY_OWNER" integer not null,
+ "SEEN_BY_SHAREE" integer not null,
+ "MESSAGE" nclob,
+ primary key("CALENDAR_HOME_RESOURCE_ID", "CALENDAR_RESOURCE_ID"),
+ unique("CALENDAR_HOME_RESOURCE_ID", "CALENDAR_RESOURCE_NAME")
+);
+
+create table CALENDAR_BIND_MODE (
+ "ID" integer primary key,
+ "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('own', 0);
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('read', 1);
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('write', 2);
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('direct', 3);
+create table CALENDAR_BIND_STATUS (
+ "ID" integer primary key,
+ "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('invited', 0);
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('accepted', 1);
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('declined', 2);
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('invalid', 3);
+create table CALENDAR_OBJECT (
+ "RESOURCE_ID" integer primary key,
+ "CALENDAR_RESOURCE_ID" integer not null references CALENDAR on delete cascade,
+ "RESOURCE_NAME" nvarchar2(255),
+ "ICALENDAR_TEXT" nclob,
+ "ICALENDAR_UID" nvarchar2(255),
+ "ICALENDAR_TYPE" nvarchar2(255),
+ "ATTACHMENTS_MODE" integer default 0 not null,
+ "DROPBOX_ID" nvarchar2(255),
+ "ORGANIZER" nvarchar2(255),
+ "ORGANIZER_OBJECT" integer references CALENDAR_OBJECT,
+ "RECURRANCE_MIN" date,
+ "RECURRANCE_MAX" date,
+ "ACCESS" integer default 0 not null,
+ "SCHEDULE_OBJECT" integer default 0,
+ "SCHEDULE_TAG" nvarchar2(36) default null,
+ "SCHEDULE_ETAGS" nclob default null,
+ "PRIVATE_COMMENTS" integer default 0 not null,
+ "MD5" nchar(32),
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ unique("CALENDAR_RESOURCE_ID", "RESOURCE_NAME")
+);
+
+create table CALENDAR_OBJECT_ATTACHMENTS_MO (
+ "ID" integer primary key,
+ "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into CALENDAR_OBJECT_ATTACHMENTS_MO (DESCRIPTION, ID) values ('none', 0);
+insert into CALENDAR_OBJECT_ATTACHMENTS_MO (DESCRIPTION, ID) values ('read', 1);
+insert into CALENDAR_OBJECT_ATTACHMENTS_MO (DESCRIPTION, ID) values ('write', 2);
+create table CALENDAR_ACCESS_TYPE (
+ "ID" integer primary key,
+ "DESCRIPTION" nvarchar2(32) unique
+);
+
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('', 0);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('public', 1);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('private', 2);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('confidential', 3);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('restricted', 4);
+create table TIME_RANGE (
+ "INSTANCE_ID" integer primary key,
+ "CALENDAR_RESOURCE_ID" integer not null references CALENDAR on delete cascade,
+ "CALENDAR_OBJECT_RESOURCE_ID" integer not null references CALENDAR_OBJECT on delete cascade,
+ "FLOATING" integer not null,
+ "START_DATE" timestamp not null,
+ "END_DATE" timestamp not null,
+ "FBTYPE" integer not null,
+ "TRANSPARENT" integer not null
+);
+
+create table FREE_BUSY_TYPE (
+ "ID" integer primary key,
+ "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('unknown', 0);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('free', 1);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('busy', 2);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('busy-unavailable', 3);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('busy-tentative', 4);
+create table TRANSPARENCY (
+ "TIME_RANGE_INSTANCE_ID" integer not null references TIME_RANGE on delete cascade,
+ "USER_ID" nvarchar2(255),
+ "TRANSPARENT" integer not null
+);
+
+create table ATTACHMENT (
+ "CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
+ "DROPBOX_ID" nvarchar2(255),
+ "CONTENT_TYPE" nvarchar2(255),
+ "SIZE" integer not null,
+ "MD5" nchar(32),
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "PATH" nvarchar2(1024),
+ primary key("DROPBOX_ID", "PATH")
+);
+
+create table RESOURCE_PROPERTY (
+ "RESOURCE_ID" integer not null,
+ "NAME" nvarchar2(255),
+ "VALUE" nclob,
+ "VIEWER_UID" nvarchar2(255),
+ primary key("RESOURCE_ID", "NAME", "VIEWER_UID")
+);
+
+create table ADDRESSBOOK_HOME (
+ "RESOURCE_ID" integer primary key,
+ "OWNER_UID" nvarchar2(255) unique,
+ "DATAVERSION" integer default 0 not null
+);
+
+create table ADDRESSBOOK_HOME_METADATA (
+ "RESOURCE_ID" integer primary key references ADDRESSBOOK_HOME on delete cascade,
+ "QUOTA_USED_BYTES" integer default 0 not null,
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table ADDRESSBOOK (
+ "RESOURCE_ID" integer primary key
+);
+
+create table ADDRESSBOOK_METADATA (
+ "RESOURCE_ID" integer primary key references ADDRESSBOOK on delete cascade,
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table ADDRESSBOOK_BIND (
+ "ADDRESSBOOK_HOME_RESOURCE_ID" integer not null references ADDRESSBOOK_HOME,
+ "ADDRESSBOOK_RESOURCE_ID" integer not null references ADDRESSBOOK on delete cascade,
+ "ADDRESSBOOK_RESOURCE_NAME" nvarchar2(255),
+ "BIND_MODE" integer not null,
+ "BIND_STATUS" integer not null,
+ "SEEN_BY_OWNER" integer not null,
+ "SEEN_BY_SHAREE" integer not null,
+ "MESSAGE" nclob,
+ primary key("ADDRESSBOOK_HOME_RESOURCE_ID", "ADDRESSBOOK_RESOURCE_ID"),
+ unique("ADDRESSBOOK_HOME_RESOURCE_ID", "ADDRESSBOOK_RESOURCE_NAME")
+);
+
+create table ADDRESSBOOK_OBJECT (
+ "RESOURCE_ID" integer primary key,
+ "ADDRESSBOOK_RESOURCE_ID" integer not null references ADDRESSBOOK on delete cascade,
+ "RESOURCE_NAME" nvarchar2(255),
+ "VCARD_TEXT" nclob,
+ "VCARD_UID" nvarchar2(255),
+ "MD5" nchar(32),
+ "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+ unique("ADDRESSBOOK_RESOURCE_ID", "RESOURCE_NAME"),
+ unique("ADDRESSBOOK_RESOURCE_ID", "VCARD_UID")
+);
+
+create table CALENDAR_OBJECT_REVISIONS (
+ "CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
+ "CALENDAR_RESOURCE_ID" integer references CALENDAR,
+ "CALENDAR_NAME" nvarchar2(255) default null,
+ "RESOURCE_NAME" nvarchar2(255),
+ "REVISION" integer not null,
+ "DELETED" integer not null
+);
+
+create table ADDRESSBOOK_OBJECT_REVISIONS (
+ "ADDRESSBOOK_HOME_RESOURCE_ID" integer not null references ADDRESSBOOK_HOME,
+ "ADDRESSBOOK_RESOURCE_ID" integer references ADDRESSBOOK,
+ "ADDRESSBOOK_NAME" nvarchar2(255) default null,
+ "RESOURCE_NAME" nvarchar2(255),
+ "REVISION" integer not null,
+ "DELETED" integer not null
+);
+
+create table NOTIFICATION_OBJECT_REVISIONS (
+ "NOTIFICATION_HOME_RESOURCE_ID" integer not null references NOTIFICATION_HOME on delete cascade,
+ "RESOURCE_NAME" nvarchar2(255),
+ "REVISION" integer not null,
+ "DELETED" integer not null,
+ unique("NOTIFICATION_HOME_RESOURCE_ID", "RESOURCE_NAME")
+);
+
+create table APN_SUBSCRIPTIONS (
+ "TOKEN" nvarchar2(255),
+ "RESOURCE_KEY" nvarchar2(255),
+ "MODIFIED" integer not null,
+ "SUBSCRIBER_GUID" nvarchar2(255),
+ "USER_AGENT" nvarchar2(255) default null,
+ "IP_ADDR" nvarchar2(255) default null,
+ primary key("TOKEN", "RESOURCE_KEY")
+);
+
+create table CALENDARSERVER (
+ "NAME" nvarchar2(255) primary key,
+ "VALUE" nvarchar2(255)
+);
+
+insert into CALENDARSERVER (NAME, VALUE) values ('VERSION', '12');
+insert into CALENDARSERVER (NAME, VALUE) values ('CALENDAR-DATAVERSION', '3');
+insert into CALENDARSERVER (NAME, VALUE) values ('ADDRESSBOOK-DATAVERSION', '1');
+create index INVITE_INVITE_UID_9b0902ff on INVITE (
+ INVITE_UID
+);
+
+create index INVITE_RESOURCE_ID_b36ddc23 on INVITE (
+ RESOURCE_ID
+);
+
+create index INVITE_HOME_RESOURCE__e9bdf77e on INVITE (
+ HOME_RESOURCE_ID
+);
+
+create index NOTIFICATION_NOTIFICA_f891f5f9 on NOTIFICATION (
+ NOTIFICATION_HOME_RESOURCE_ID
+);
+
+create index CALENDAR_BIND_RESOURC_e57964d4 on CALENDAR_BIND (
+ CALENDAR_RESOURCE_ID
+);
+
+create index CALENDAR_OBJECT_CALEN_a9a453a9 on CALENDAR_OBJECT (
+ CALENDAR_RESOURCE_ID,
+ ICALENDAR_UID
+);
+
+create index CALENDAR_OBJECT_CALEN_96e83b73 on CALENDAR_OBJECT (
+ CALENDAR_RESOURCE_ID,
+ RECURRANCE_MAX
+);
+
+create index CALENDAR_OBJECT_ORGAN_7ce24750 on CALENDAR_OBJECT (
+ ORGANIZER_OBJECT
+);
+
+create index CALENDAR_OBJECT_DROPB_de041d80 on CALENDAR_OBJECT (
+ DROPBOX_ID
+);
+
+create index TIME_RANGE_CALENDAR_R_beb6e7eb on TIME_RANGE (
+ CALENDAR_RESOURCE_ID
+);
+
+create index TIME_RANGE_CALENDAR_O_acf37bd1 on TIME_RANGE (
+ CALENDAR_OBJECT_RESOURCE_ID
+);
+
+create index TRANSPARENCY_TIME_RAN_5f34467f on TRANSPARENCY (
+ TIME_RANGE_INSTANCE_ID
+);
+
+create index ATTACHMENT_CALENDAR_H_0078845c on ATTACHMENT (
+ CALENDAR_HOME_RESOURCE_ID
+);
+
+create index ADDRESSBOOK_BIND_RESO_205aa75c on ADDRESSBOOK_BIND (
+ ADDRESSBOOK_RESOURCE_ID
+);
+
+create index CALENDAR_OBJECT_REVIS_3a3956c4 on CALENDAR_OBJECT_REVISIONS (
+ CALENDAR_HOME_RESOURCE_ID,
+ CALENDAR_RESOURCE_ID
+);
+
+create index CALENDAR_OBJECT_REVIS_2643d556 on CALENDAR_OBJECT_REVISIONS (
+ CALENDAR_RESOURCE_ID,
+ RESOURCE_NAME
+);
+
+create index CALENDAR_OBJECT_REVIS_265c8acf on CALENDAR_OBJECT_REVISIONS (
+ CALENDAR_RESOURCE_ID,
+ REVISION
+);
+
+create index ADDRESSBOOK_OBJECT_RE_f460d62d on ADDRESSBOOK_OBJECT_REVISIONS (
+ ADDRESSBOOK_HOME_RESOURCE_ID,
+ ADDRESSBOOK_RESOURCE_ID
+);
+
+create index ADDRESSBOOK_OBJECT_RE_9a848f39 on ADDRESSBOOK_OBJECT_REVISIONS (
+ ADDRESSBOOK_RESOURCE_ID,
+ RESOURCE_NAME
+);
+
+create index ADDRESSBOOK_OBJECT_RE_cb101e6b on ADDRESSBOOK_OBJECT_REVISIONS (
+ ADDRESSBOOK_RESOURCE_ID,
+ REVISION
+);
+
+create index NOTIFICATION_OBJECT_R_036a9cee on NOTIFICATION_OBJECT_REVISIONS (
+ NOTIFICATION_HOME_RESOURCE_ID,
+ REVISION
+);
+
+create index APN_SUBSCRIPTIONS_RES_9610d78e on APN_SUBSCRIPTIONS (
+ RESOURCE_KEY
+);
+
Copied: CalendarServer/trunk/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql (from rev 10145, CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql)
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql (rev 0)
+++ CalendarServer/trunk/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql 2012-12-10 20:22:58 UTC (rev 10147)
@@ -0,0 +1,518 @@
+-- -*- test-case-name: txdav.caldav.datastore.test.test_sql,txdav.carddav.datastore.test.test_sql -*-
+
+----
+-- Copyright (c) 2010-2012 Apple Inc. All rights reserved.
+--
+-- Licensed under the Apache License, Version 2.0 (the "License");
+-- you may not use this file except in compliance with the License.
+-- You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+----
+
+-----------------
+-- Resource ID --
+-----------------
+
+create sequence RESOURCE_ID_SEQ;
+
+-------------------------
+-- Cluster Bookkeeping --
+-------------------------
+
+-- Information about a process connected to this database.
+
+-- Note that this must match the node info schema in twext.enterprise.queue.
+create table NODE_INFO (
+ HOSTNAME varchar(255) not null,
+ PID integer not null,
+ PORT integer not null,
+ TIME timestamp not null default timezone('UTC', CURRENT_TIMESTAMP),
+
+ primary key(HOSTNAME, PORT)
+);
+
+
+-------------------
+-- Calendar Home --
+-------------------
+
+create table CALENDAR_HOME (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+ OWNER_UID varchar(255) not null unique, -- implicit index
+ DATAVERSION integer default 0 not null
+);
+
+----------------------------
+-- Calendar Home Metadata --
+----------------------------
+
+create table CALENDAR_HOME_METADATA (
+ RESOURCE_ID integer primary key references CALENDAR_HOME on delete cascade, -- implicit index
+ QUOTA_USED_BYTES integer default 0 not null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+--------------
+-- Calendar --
+--------------
+
+create table CALENDAR (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ') -- implicit index
+);
+
+
+-----------------------
+-- Calendar Metadata --
+-----------------------
+
+create table CALENDAR_METADATA (
+ RESOURCE_ID integer primary key references CALENDAR on delete cascade, -- implicit index
+ SUPPORTED_COMPONENTS varchar(255) default null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+
+------------------------
+-- Sharing Invitation --
+------------------------
+
+create table INVITE (
+ INVITE_UID varchar(255) not null,
+ NAME varchar(255) not null,
+ RECIPIENT_ADDRESS varchar(255) not null,
+ HOME_RESOURCE_ID integer not null,
+ RESOURCE_ID integer not null
+
+ -- Need primary key on (INVITE_UID, NAME, RECIPIENT_ADDRESS)?
+);
+
+create index INVITE_INVITE_UID on INVITE(INVITE_UID);
+create index INVITE_RESOURCE_ID on INVITE(RESOURCE_ID);
+create index INVITE_HOME_RESOURCE_ID on INVITE(HOME_RESOURCE_ID);
+
+---------------------------
+-- Sharing Notifications --
+---------------------------
+
+create table NOTIFICATION_HOME (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+ OWNER_UID varchar(255) not null unique -- implicit index
+);
+
+create table NOTIFICATION (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+ NOTIFICATION_HOME_RESOURCE_ID integer not null references NOTIFICATION_HOME,
+ NOTIFICATION_UID varchar(255) not null,
+ XML_TYPE varchar(255) not null,
+ XML_DATA text not null,
+ MD5 char(32) not null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+
+ unique(NOTIFICATION_UID, NOTIFICATION_HOME_RESOURCE_ID) -- implicit index
+);
+
+create index NOTIFICATION_NOTIFICATION_HOME_RESOURCE_ID on
+ NOTIFICATION(NOTIFICATION_HOME_RESOURCE_ID);
+
+-------------------
+-- Calendar Bind --
+-------------------
+
+-- Joins CALENDAR_HOME and CALENDAR
+
+create table CALENDAR_BIND (
+ CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME,
+ CALENDAR_RESOURCE_ID integer not null references CALENDAR on delete cascade,
+
+ -- An invitation which hasn't been accepted yet will not yet have a resource
+ -- name, so this field may be null.
+
+ CALENDAR_RESOURCE_NAME varchar(255),
+ BIND_MODE integer not null, -- enum CALENDAR_BIND_MODE
+ BIND_STATUS integer not null, -- enum CALENDAR_BIND_STATUS
+ SEEN_BY_OWNER boolean not null,
+ SEEN_BY_SHAREE boolean not null,
+ MESSAGE text,
+
+ primary key(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_ID), -- implicit index
+ unique(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_NAME) -- implicit index
+);
+
+create index CALENDAR_BIND_RESOURCE_ID on CALENDAR_BIND(CALENDAR_RESOURCE_ID);
+
+-- Enumeration of calendar bind modes
+
+create table CALENDAR_BIND_MODE (
+ ID integer primary key,
+ DESCRIPTION varchar(16) not null unique
+);
+
+insert into CALENDAR_BIND_MODE values (0, 'own' );
+insert into CALENDAR_BIND_MODE values (1, 'read' );
+insert into CALENDAR_BIND_MODE values (2, 'write');
+insert into CALENDAR_BIND_MODE values (3, 'direct');
+
+-- Enumeration of statuses
+
+create table CALENDAR_BIND_STATUS (
+ ID integer primary key,
+ DESCRIPTION varchar(16) not null unique
+);
+
+insert into CALENDAR_BIND_STATUS values (0, 'invited' );
+insert into CALENDAR_BIND_STATUS values (1, 'accepted');
+insert into CALENDAR_BIND_STATUS values (2, 'declined');
+insert into CALENDAR_BIND_STATUS values (3, 'invalid');
+
+
+---------------------
+-- Calendar Object --
+---------------------
+
+create table CALENDAR_OBJECT (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+ CALENDAR_RESOURCE_ID integer not null references CALENDAR on delete cascade,
+ RESOURCE_NAME varchar(255) not null,
+ ICALENDAR_TEXT text not null,
+ ICALENDAR_UID varchar(255) not null,
+ ICALENDAR_TYPE varchar(255) not null,
+ ATTACHMENTS_MODE integer default 0 not null, -- enum CALENDAR_OBJECT_ATTACHMENTS_MODE
+ DROPBOX_ID varchar(255),
+ ORGANIZER varchar(255),
+ ORGANIZER_OBJECT integer references CALENDAR_OBJECT,
+ RECURRANCE_MIN date, -- minimum date that recurrences have been expanded to.
+ RECURRANCE_MAX date, -- maximum date that recurrences have been expanded to.
+ ACCESS integer default 0 not null,
+ SCHEDULE_OBJECT boolean default false,
+ SCHEDULE_TAG varchar(36) default null,
+ SCHEDULE_ETAGS text default null,
+ PRIVATE_COMMENTS boolean default false not null,
+ MD5 char(32) not null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+
+ unique(CALENDAR_RESOURCE_ID, RESOURCE_NAME) -- implicit index
+
+ -- since the 'inbox' is a 'calendar resource' for the purpose of storing
+ -- calendar objects, this constraint has to be selectively enforced by the
+ -- application layer.
+
+ -- unique(CALENDAR_RESOURCE_ID, ICALENDAR_UID)
+);
+
+create index CALENDAR_OBJECT_CALENDAR_RESOURCE_ID_AND_ICALENDAR_UID on
+ CALENDAR_OBJECT(CALENDAR_RESOURCE_ID, ICALENDAR_UID);
+
+create index CALENDAR_OBJECT_CALENDAR_RESOURCE_ID_RECURRANCE_MAX on
+ CALENDAR_OBJECT(CALENDAR_RESOURCE_ID, RECURRANCE_MAX);
+
+create index CALENDAR_OBJECT_ORGANIZER_OBJECT on
+ CALENDAR_OBJECT(ORGANIZER_OBJECT);
+
+create index CALENDAR_OBJECT_DROPBOX_ID on
+ CALENDAR_OBJECT(DROPBOX_ID);
+
+-- Enumeration of attachment modes
+
+create table CALENDAR_OBJECT_ATTACHMENTS_MODE (
+ ID integer primary key,
+ DESCRIPTION varchar(16) not null unique
+);
+
+insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (0, 'none' );
+insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (1, 'read' );
+insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (2, 'write');
+
+
+-- Enumeration of calendar access types
+
+create table CALENDAR_ACCESS_TYPE (
+ ID integer primary key,
+ DESCRIPTION varchar(32) not null unique
+);
+
+insert into CALENDAR_ACCESS_TYPE values (0, '' );
+insert into CALENDAR_ACCESS_TYPE values (1, 'public' );
+insert into CALENDAR_ACCESS_TYPE values (2, 'private' );
+insert into CALENDAR_ACCESS_TYPE values (3, 'confidential' );
+insert into CALENDAR_ACCESS_TYPE values (4, 'restricted' );
+
+-----------------
+-- Instance ID --
+-----------------
+
+create sequence INSTANCE_ID_SEQ;
+
+
+----------------
+-- Time Range --
+----------------
+
+create table TIME_RANGE (
+ INSTANCE_ID integer primary key default nextval('INSTANCE_ID_SEQ'), -- implicit index
+ CALENDAR_RESOURCE_ID integer not null references CALENDAR on delete cascade,
+ CALENDAR_OBJECT_RESOURCE_ID integer not null references CALENDAR_OBJECT on delete cascade,
+ FLOATING boolean not null,
+ START_DATE timestamp not null,
+ END_DATE timestamp not null,
+ FBTYPE integer not null,
+ TRANSPARENT boolean not null
+);
+
+create index TIME_RANGE_CALENDAR_RESOURCE_ID on
+ TIME_RANGE(CALENDAR_RESOURCE_ID);
+create index TIME_RANGE_CALENDAR_OBJECT_RESOURCE_ID on
+ TIME_RANGE(CALENDAR_OBJECT_RESOURCE_ID);
+
+
+-- Enumeration of free/busy types
+
+create table FREE_BUSY_TYPE (
+ ID integer primary key,
+ DESCRIPTION varchar(16) not null unique
+);
+
+insert into FREE_BUSY_TYPE values (0, 'unknown' );
+insert into FREE_BUSY_TYPE values (1, 'free' );
+insert into FREE_BUSY_TYPE values (2, 'busy' );
+insert into FREE_BUSY_TYPE values (3, 'busy-unavailable');
+insert into FREE_BUSY_TYPE values (4, 'busy-tentative' );
+
+
+------------------
+-- Transparency --
+------------------
+
+create table TRANSPARENCY (
+ TIME_RANGE_INSTANCE_ID integer not null references TIME_RANGE on delete cascade,
+ USER_ID varchar(255) not null,
+ TRANSPARENT boolean not null
+);
+
+create index TRANSPARENCY_TIME_RANGE_INSTANCE_ID on
+ TRANSPARENCY(TIME_RANGE_INSTANCE_ID);
+
+----------------
+-- Attachment --
+----------------
+
+create table ATTACHMENT (
+ CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME,
+ DROPBOX_ID varchar(255) not null,
+ CONTENT_TYPE varchar(255) not null,
+ SIZE integer not null,
+ MD5 char(32) not null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ PATH varchar(1024) not null,
+
+ primary key(DROPBOX_ID, PATH) --implicit index
+);
+
+create index ATTACHMENT_CALENDAR_HOME_RESOURCE_ID on
+ ATTACHMENT(CALENDAR_HOME_RESOURCE_ID);
+
+-----------------------
+-- Resource Property --
+-----------------------
+
+create table RESOURCE_PROPERTY (
+ RESOURCE_ID integer not null, -- foreign key: *.RESOURCE_ID
+ NAME varchar(255) not null,
+ VALUE text not null, -- FIXME: xml?
+ VIEWER_UID varchar(255),
+
+ primary key(RESOURCE_ID, NAME, VIEWER_UID) -- implicit index
+);
+
+
+----------------------
+-- AddressBook Home --
+----------------------
+
+create table ADDRESSBOOK_HOME (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+ OWNER_UID varchar(255) not null unique, -- implicit index
+ DATAVERSION integer default 0 not null
+);
+
+-------------------------------
+-- AddressBook Home Metadata --
+-------------------------------
+
+create table ADDRESSBOOK_HOME_METADATA (
+ RESOURCE_ID integer primary key references ADDRESSBOOK_HOME on delete cascade, -- implicit index
+ QUOTA_USED_BYTES integer default 0 not null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+-----------------
+-- AddressBook --
+-----------------
+
+create table ADDRESSBOOK (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ') -- implicit index
+);
+
+
+--------------------------
+-- AddressBook Metadata --
+--------------------------
+
+create table ADDRESSBOOK_METADATA (
+ RESOURCE_ID integer primary key references ADDRESSBOOK on delete cascade, -- implicit index
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+
+----------------------
+-- AddressBook Bind --
+----------------------
+
+-- Joins ADDRESSBOOK_HOME and ADDRESSBOOK
+
+create table ADDRESSBOOK_BIND (
+ ADDRESSBOOK_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME,
+ ADDRESSBOOK_RESOURCE_ID integer not null references ADDRESSBOOK on delete cascade,
+
+ -- An invitation which hasn't been accepted yet will not yet have a resource
+ -- name, so this field may be null.
+
+ ADDRESSBOOK_RESOURCE_NAME varchar(255),
+ BIND_MODE integer not null, -- enum CALENDAR_BIND_MODE
+ BIND_STATUS integer not null, -- enum CALENDAR_BIND_STATUS
+ SEEN_BY_OWNER boolean not null,
+ SEEN_BY_SHAREE boolean not null,
+ MESSAGE text, -- FIXME: xml?
+
+ primary key(ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_ID), -- implicit index
+ unique(ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_NAME) -- implicit index
+);
+
+create index ADDRESSBOOK_BIND_RESOURCE_ID on
+ ADDRESSBOOK_BIND(ADDRESSBOOK_RESOURCE_ID);
+
+create table ADDRESSBOOK_OBJECT (
+ RESOURCE_ID integer primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+ ADDRESSBOOK_RESOURCE_ID integer not null references ADDRESSBOOK on delete cascade,
+ RESOURCE_NAME varchar(255) not null,
+ VCARD_TEXT text not null,
+ VCARD_UID varchar(255) not null,
+ MD5 char(32) not null,
+ CREATED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+ MODIFIED timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+
+ unique(ADDRESSBOOK_RESOURCE_ID, RESOURCE_NAME), -- implicit index
+ unique(ADDRESSBOOK_RESOURCE_ID, VCARD_UID) -- implicit index
+);
+
+---------------
+-- Revisions --
+---------------
+
+create sequence REVISION_SEQ;
+
+
+---------------
+-- Revisions --
+---------------
+
+create table CALENDAR_OBJECT_REVISIONS (
+ CALENDAR_HOME_RESOURCE_ID integer not null references CALENDAR_HOME,
+ CALENDAR_RESOURCE_ID integer references CALENDAR,
+ CALENDAR_NAME varchar(255) default null,
+ RESOURCE_NAME varchar(255),
+ REVISION integer default nextval('REVISION_SEQ') not null,
+ DELETED boolean not null
+);
+
+create index CALENDAR_OBJECT_REVISIONS_HOME_RESOURCE_ID_CALENDAR_RESOURCE_ID
+ on CALENDAR_OBJECT_REVISIONS(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_ID);
+
+create index CALENDAR_OBJECT_REVISIONS_RESOURCE_ID_RESOURCE_NAME
+ on CALENDAR_OBJECT_REVISIONS(CALENDAR_RESOURCE_ID, RESOURCE_NAME);
+
+create index CALENDAR_OBJECT_REVISIONS_RESOURCE_ID_REVISION
+ on CALENDAR_OBJECT_REVISIONS(CALENDAR_RESOURCE_ID, REVISION);
+
+-------------------------------
+-- AddressBook Object Revisions --
+-------------------------------
+
+create table ADDRESSBOOK_OBJECT_REVISIONS (
+ ADDRESSBOOK_HOME_RESOURCE_ID integer not null references ADDRESSBOOK_HOME,
+ ADDRESSBOOK_RESOURCE_ID integer references ADDRESSBOOK,
+ ADDRESSBOOK_NAME varchar(255) default null,
+ RESOURCE_NAME varchar(255),
+ REVISION integer default nextval('REVISION_SEQ') not null,
+ DELETED boolean not null
+);
+
+create index ADDRESSBOOK_OBJECT_REVISIONS_HOME_RESOURCE_ID_ADDRESSBOOK_RESOURCE_ID
+ on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_ID);
+
+create index ADDRESSBOOK_OBJECT_REVISIONS_RESOURCE_ID_RESOURCE_NAME
+ on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_RESOURCE_ID, RESOURCE_NAME);
+
+create index ADDRESSBOOK_OBJECT_REVISIONS_RESOURCE_ID_REVISION
+ on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_RESOURCE_ID, REVISION);
+
+-----------------------------------
+-- Notification Object Revisions --
+-----------------------------------
+
+create table NOTIFICATION_OBJECT_REVISIONS (
+ NOTIFICATION_HOME_RESOURCE_ID integer not null references NOTIFICATION_HOME on delete cascade,
+ RESOURCE_NAME varchar(255),
+ REVISION integer default nextval('REVISION_SEQ') not null,
+ DELETED boolean not null,
+
+ unique(NOTIFICATION_HOME_RESOURCE_ID, RESOURCE_NAME) -- implicit index
+);
+
+create index NOTIFICATION_OBJECT_REVISIONS_RESOURCE_ID_REVISION
+ on NOTIFICATION_OBJECT_REVISIONS(NOTIFICATION_HOME_RESOURCE_ID, REVISION);
+
+-------------------------------------------
+-- Apple Push Notification Subscriptions --
+-------------------------------------------
+
+create table APN_SUBSCRIPTIONS (
+ TOKEN varchar(255) not null,
+ RESOURCE_KEY varchar(255) not null,
+ MODIFIED integer not null,
+ SUBSCRIBER_GUID varchar(255) not null,
+ USER_AGENT varchar(255) default null,
+ IP_ADDR varchar(255) default null,
+
+ primary key(TOKEN, RESOURCE_KEY) -- implicit index
+);
+
+create index APN_SUBSCRIPTIONS_RESOURCE_KEY
+ on APN_SUBSCRIPTIONS(RESOURCE_KEY);
+
+
+--------------------
+-- Schema Version --
+--------------------
+
+create table CALENDARSERVER (
+ NAME varchar(255) primary key, -- implicit index
+ VALUE varchar(255)
+);
+
+insert into CALENDARSERVER values ('VERSION', '12');
+insert into CALENDARSERVER values ('CALENDAR-DATAVERSION', '3');
+insert into CALENDARSERVER values ('ADDRESSBOOK-DATAVERSION', '1');
Copied: CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/oracle-dialect/upgrade_from_12_to_13.sql (from rev 10145, CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/upgrades/oracle-dialect/upgrade_from_12_to_13.sql)
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/oracle-dialect/upgrade_from_12_to_13.sql (rev 0)
+++ CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/oracle-dialect/upgrade_from_12_to_13.sql 2012-12-10 20:22:58 UTC (rev 10147)
@@ -0,0 +1,44 @@
+----
+-- Copyright (c) 2012 Apple Inc. All rights reserved.
+--
+-- Licensed under the Apache License, Version 2.0 (the "License");
+-- you may not use this file except in compliance with the License.
+-- You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+----
+
+---------------------------------------------------
+-- Upgrade database schema from VERSION 12 to 13 --
+---------------------------------------------------
+
+-- Attachment related updates
+
+create sequence ATTACHMENT_ID_SEQ;
+
+
+alter table ATTACHMENT
+ drop primary key ("DROPBOX_ID", "PATH");
+alter table ATTACHMENT
+ modify (DROPBOX_ID null);
+alter table ATTACHMENT
+ add ("ATTACHMENT_ID" integer primary key);
+
+create table ATTACHMENT_CALENDAR_OBJECT (
+ "ATTACHMENT_ID" integer not null references ATTACHMENT on delete cascade,
+ "MANAGED_ID" nvarchar2(255),
+ "CALENDAR_OBJECT_RESOURCE_ID" integer not null references CALENDAR_OBJECT on delete cascade,
+ primary key ("ATTACHMENT_ID", "CALENDAR_OBJECT_RESOURCE_ID"),
+ unique ("MANAGED_ID", "CALENDAR_OBJECT_RESOURCE_ID")
+);
+
+
+-- Now update the version
+-- No data upgrades
+update CALENDARSERVER set VALUE = '13' where NAME = 'VERSION';
Copied: CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql (from rev 10145, CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql)
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql (rev 0)
+++ CalendarServer/trunk/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql 2012-12-10 20:22:58 UTC (rev 10147)
@@ -0,0 +1,44 @@
+----
+-- Copyright (c) 2012 Apple Inc. All rights reserved.
+--
+-- Licensed under the Apache License, Version 2.0 (the "License");
+-- you may not use this file except in compliance with the License.
+-- You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+----
+
+---------------------------------------------------
+-- Upgrade database schema from VERSION 12 to 13 --
+---------------------------------------------------
+
+
+-- Attachment related updates
+
+create sequence ATTACHMENT_ID_SEQ;
+
+
+alter table ATTACHMENT
+ drop constraint ATTACHMENT_PKEY,
+ alter column DROPBOX_ID drop not null,
+ add column ATTACHMENT_ID integer primary key default nextval('ATTACHMENT_ID_SEQ');
+
+create table ATTACHMENT_CALENDAR_OBJECT (
+ ATTACHMENT_ID integer not null references ATTACHMENT on delete cascade,
+ MANAGED_ID varchar(255) not null,
+ CALENDAR_OBJECT_RESOURCE_ID integer not null references CALENDAR_OBJECT on delete cascade,
+
+ primary key (ATTACHMENT_ID, CALENDAR_OBJECT_RESOURCE_ID), -- implicit index
+ unique (MANAGED_ID, CALENDAR_OBJECT_RESOURCE_ID) --implicit index
+);
+
+
+-- Now update the version
+-- No data upgrades
+update CALENDARSERVER set VALUE = '13' where NAME = 'VERSION';
Modified: CalendarServer/trunk/txdav/common/datastore/sql_tables.py
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/sql_tables.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/common/datastore/sql_tables.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -36,7 +36,7 @@
"""
Generate the global L{SchemaSyntax}.
"""
-
+
if version is None:
pathObj = getModule(__name__).filePath.sibling("sql_schema").child("current.sql")
else:
@@ -102,7 +102,7 @@
result = {}
for tableRole, tableDictionary in kw.items():
result.update([("%s:%s" % (tableRole, k), v)
- for k,v in tableDictionary.items()])
+ for k, v in tableDictionary.items()])
return result
@@ -114,8 +114,8 @@
"""
result = {}
result['name'] = tableSyntax.model.name
- #pkey = tableSyntax.model.primaryKey
- #if pkey is not None:
+ # pkey = tableSyntax.model.primaryKey
+ # if pkey is not None:
# default = pkey.default
# if isinstance(default, Sequence):
# result['sequence'] = default.name
@@ -147,18 +147,18 @@
schema.CALENDAR_BIND_STATUS.ID
)
-_BIND_STATUS_INVITED = _bindStatus('invited')
+_BIND_STATUS_INVITED = _bindStatus('invited')
_BIND_STATUS_ACCEPTED = _bindStatus('accepted')
_BIND_STATUS_DECLINED = _bindStatus('declined')
-_BIND_STATUS_INVALID = _bindStatus('invalid')
+_BIND_STATUS_INVALID = _bindStatus('invalid')
_attachmentsMode = _schemaConstants(
schema.CALENDAR_OBJECT_ATTACHMENTS_MODE.DESCRIPTION,
schema.CALENDAR_OBJECT_ATTACHMENTS_MODE.ID
)
-_ATTACHMENTS_MODE_NONE = _attachmentsMode('none')
-_ATTACHMENTS_MODE_READ = _attachmentsMode('read')
+_ATTACHMENTS_MODE_NONE = _attachmentsMode('none')
+_ATTACHMENTS_MODE_READ = _attachmentsMode('read')
_ATTACHMENTS_MODE_WRITE = _attachmentsMode('write')
@@ -175,20 +175,20 @@
# Compatibility tables for string formatting:
-CALENDAR_HOME_TABLE = _S(schema.CALENDAR_HOME)
-CALENDAR_HOME_METADATA_TABLE = _S(schema.CALENDAR_HOME_METADATA)
-ADDRESSBOOK_HOME_TABLE = _S(schema.ADDRESSBOOK_HOME)
-ADDRESSBOOK_HOME_METADATA_TABLE = _S(schema.ADDRESSBOOK_HOME_METADATA)
-NOTIFICATION_HOME_TABLE = _S(schema.NOTIFICATION_HOME)
-CALENDAR_TABLE = _S(schema.CALENDAR)
-ADDRESSBOOK_TABLE = _S(schema.ADDRESSBOOK)
-CALENDAR_BIND_TABLE = _S(schema.CALENDAR_BIND)
-ADDRESSBOOK_BIND_TABLE = _S(schema.ADDRESSBOOK_BIND)
-CALENDAR_OBJECT_REVISIONS_TABLE = _S(schema.CALENDAR_OBJECT_REVISIONS)
-ADDRESSBOOK_OBJECT_REVISIONS_TABLE = _S(schema.ADDRESSBOOK_OBJECT_REVISIONS)
+CALENDAR_HOME_TABLE = _S(schema.CALENDAR_HOME)
+CALENDAR_HOME_METADATA_TABLE = _S(schema.CALENDAR_HOME_METADATA)
+ADDRESSBOOK_HOME_TABLE = _S(schema.ADDRESSBOOK_HOME)
+ADDRESSBOOK_HOME_METADATA_TABLE = _S(schema.ADDRESSBOOK_HOME_METADATA)
+NOTIFICATION_HOME_TABLE = _S(schema.NOTIFICATION_HOME)
+CALENDAR_TABLE = _S(schema.CALENDAR)
+ADDRESSBOOK_TABLE = _S(schema.ADDRESSBOOK)
+CALENDAR_BIND_TABLE = _S(schema.CALENDAR_BIND)
+ADDRESSBOOK_BIND_TABLE = _S(schema.ADDRESSBOOK_BIND)
+CALENDAR_OBJECT_REVISIONS_TABLE = _S(schema.CALENDAR_OBJECT_REVISIONS)
+ADDRESSBOOK_OBJECT_REVISIONS_TABLE = _S(schema.ADDRESSBOOK_OBJECT_REVISIONS)
NOTIFICATION_OBJECT_REVISIONS_TABLE = _S(schema.NOTIFICATION_OBJECT_REVISIONS)
-CALENDAR_OBJECT_TABLE = _S(schema.CALENDAR_OBJECT)
-ADDRESSBOOK_OBJECT_TABLE = _S(schema.ADDRESSBOOK_OBJECT)
+CALENDAR_OBJECT_TABLE = _S(schema.CALENDAR_OBJECT)
+ADDRESSBOOK_OBJECT_TABLE = _S(schema.ADDRESSBOOK_OBJECT)
# Some combined tables used in join-string-formatting.
CALENDAR_AND_CALENDAR_BIND = _combine(CHILD=CALENDAR_TABLE,
@@ -322,12 +322,12 @@
elif default is False:
default = 0
out.write(" " + repr(default))
- if ( (not column.model.canBeNull())
+ if ((not column.model.canBeNull())
# Oracle treats empty strings as NULLs, so we have to accept
# NULL values in columns of a string type. Other types should
# be okay though.
and typeName not in ('varchar', 'nclob', 'char', 'nchar',
- 'nvarchar', 'nvarchar2') ):
+ 'nvarchar', 'nvarchar2')):
out.write(' not null')
if [column.model] in list(table.model.uniques()):
out.write(' unique')
@@ -387,11 +387,8 @@
if __name__ == '__main__':
import sys
if len(sys.argv) == 2:
- # Argument is the name of a old/postgres-dialect file (without the .sql suffix), e.g. "v4"
+ # Argument is the name of a old/postgres-dialect file (without the .sql suffix), e.g. "v4"
schema = _populateSchema(sys.argv[1])
else:
schema = _populateSchema()
_translateSchema(sys.stdout, schema=schema)
-
-
-
Modified: CalendarServer/trunk/txdav/common/datastore/test/util.py
===================================================================
--- CalendarServer/trunk/txdav/common/datastore/test/util.py 2012-12-10 20:22:03 UTC (rev 10146)
+++ CalendarServer/trunk/txdav/common/datastore/test/util.py 2012-12-10 20:22:58 UTC (rev 10147)
@@ -126,7 +126,8 @@
reactor.addSystemEventTrigger("before", "shutdown", cp.stopService)
cds = CommonDataStore(
cp.connection, StubNotifierFactory(),
- attachmentRoot, quota=staticQuota
+ attachmentRoot, "",
+ quota=staticQuota
)
return cds
@@ -188,7 +189,11 @@
maxConnections=5)
quota = deriveQuota(testCase)
store = CommonDataStore(
- cp.connection, notifierFactory, attachmentRoot, quota=quota
+ cp.connection,
+ notifierFactory,
+ attachmentRoot,
+ "https://example.com/calendars/__uids__/%(home)s/attachments/%(name)s",
+ quota=quota
)
store.label = currentTestID
cp.startService()
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20121210/80918c2c/attachment-0001.html>
More information about the calendarserver-changes
mailing list