[CalendarServer-changes] [9998] CalendarServer/branches/users/cdaboo/managed-attachments

source_changes at macosforge.org source_changes at macosforge.org
Wed Oct 31 13:12:46 PDT 2012


Revision: 9998
          http://trac.calendarserver.org//changeset/9998
Author:   cdaboo at apple.com
Date:     2012-10-31 13:12:45 -0700 (Wed, 31 Oct 2012)
Log Message:
-----------
Initial dump of managed attachments support - attachment-add only.

Modified Paths:
--------------
    CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/http_headers.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/test/test_http_headers.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/caldavxml.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/ical.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/method/post.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/resource.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/stdconfig.py
    CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/storebridge.py
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/sql.py
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/util.py
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/icalendarstore.py
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/current.sql
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_tables.py

Added Paths:
-----------
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql
    CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/http_headers.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/http_headers.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/http_headers.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -1,5 +1,5 @@
 # -*- test-case-name: twext.web2.test.test_http_headers -*-
-##
+# #
 # Copyright (c) 2008 Twisted Matrix Laboratories.
 # Copyright (c) 2010-2012 Apple Computer, Inc. All rights reserved.
 #
@@ -21,7 +21,7 @@
 # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
 # SOFTWARE.
 #
-##
+# #
 
 """
 HTTP header representation, parsing, and serialization.
@@ -33,8 +33,8 @@
 import re
 
 def dashCapitalize(s):
-    ''' Capitalize a string, making sure to treat - as a word seperator '''
-    return '-'.join([ x.capitalize() for x in s.split('-')])
+    ''' Capitalize a string, making sure to treat - as a word separator '''
+    return '-'.join([x.capitalize() for x in s.split('-')])
 
 # datetime parsing and formatting
 weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
@@ -50,13 +50,16 @@
 
 def casemappingify(d):
     global header_case_mapping
-    newd = dict([(key.lower(),key) for key in d.keys()])
+    newd = dict([(key.lower(), key) for key in d.keys()])
     header_case_mapping.update(newd)
 
+
+
 def lowerify(d):
-    return dict([(key.lower(),value) for key,value in d.items()])
+    return dict([(key.lower(), value) for key, value in d.items()])
 
 
+
 class HeaderHandler(object):
     """HeaderHandler manages header generating and parsing functions.
     """
@@ -77,6 +80,7 @@
         if generators:
             self.HTTPGenerators.update(generators)
 
+
     def parse(self, name, header):
         """
         Parse the given header based on its given name.
@@ -101,10 +105,11 @@
                 # if isinstance(h, types.GeneratorType):
                 #     h=list(h)
         except ValueError:
-            header=None
+            header = None
 
         return header
 
+
     def generate(self, name, header):
         """
         Generate the given header based on its given name.
@@ -126,9 +131,10 @@
         for g in generator:
             header = g(header)
 
-        #self._raw_headers[name] = h
+        # self._raw_headers[name] = h
         return header
 
+
     def updateParsers(self, parsers):
         """Update en masse the parser maps.
 
@@ -138,6 +144,7 @@
         casemappingify(parsers)
         self.HTTPParsers.update(lowerify(parsers))
 
+
     def addParser(self, name, value):
         """Add an individual parser chain for the given header.
 
@@ -149,6 +156,7 @@
         """
         self.updateParsers({name: value})
 
+
     def updateGenerators(self, generators):
         """Update en masse the generator maps.
 
@@ -158,6 +166,7 @@
         casemappingify(generators)
         self.HTTPGenerators.update(lowerify(generators))
 
+
     def addGenerators(self, name, value):
         """Add an individual generator chain for the given header.
 
@@ -169,6 +178,7 @@
         """
         self.updateGenerators({name: value})
 
+
     def update(self, parsers, generators):
         """Conveniently update parsers and generators all at once.
         """
@@ -179,7 +189,7 @@
 DefaultHTTPHandler = HeaderHandler()
 
 
-## HTTP DateTime parser
+# # HTTP DateTime parser
 def parseDateTime(dateString):
     """Convert an HTTP date string (one of three formats) to seconds since epoch."""
     parts = dateString.split()
@@ -187,7 +197,7 @@
     if not parts[0][0:3].lower() in weekdayname_lower:
         # Weekday is stupid. Might have been omitted.
         try:
-            return parseDateTime("Sun, "+dateString)
+            return parseDateTime("Sun, " + dateString)
         except ValueError:
             # Guess not.
             pass
@@ -209,7 +219,7 @@
         # Two digit year, yucko.
         day, month, year = parts[1].split('-')
         time = parts[2]
-        year=int(year)
+        year = int(year)
         if year < 69:
             year = year + 2000
         elif year < 100:
@@ -231,9 +241,10 @@
     return int(timegm((year, month, day, hour, min, sec)))
 
 
+
 ##### HTTP tokenizer
 class Token(str):
-    __slots__=[]
+    __slots__ = []
     tokens = {}
     def __new__(self, char):
         token = Token.tokens.get(char)
@@ -241,6 +252,7 @@
             Token.tokens[char] = token = str.__new__(self, char)
         return token
 
+
     def __repr__(self):
         return "Token(%s)" % str.__repr__(self)
 
@@ -265,8 +277,8 @@
     Takes a raw header value (list of strings), and
     Returns a generator of strings and Token class instances.
     """
-    tokens=http_tokens
-    ctls=http_ctls
+    tokens = http_tokens
+    ctls = http_ctls
 
     string = ",".join(header)
     start = 0
@@ -280,15 +292,15 @@
         if quoted:
             if qpair:
                 qpair = False
-                qstring = qstring+string[start:cur-1]+x
-                start = cur+1
+                qstring = qstring + string[start:cur - 1] + x
+                start = cur + 1
             elif x == '\\':
                 qpair = True
             elif x == '"':
                 quoted = False
-                yield qstring+string[start:cur]
-                qstring=None
-                start = cur+1
+                yield qstring + string[start:cur]
+                qstring = None
+                start = cur + 1
         elif x in tokens:
             if start != cur:
                 if foldCase:
@@ -296,7 +308,7 @@
                 else:
                     yield string[start:cur]
 
-            start = cur+1
+            start = cur + 1
             if x == '"':
                 quoted = True
                 qstring = ""
@@ -315,12 +327,12 @@
                 inSpaces = False
 
             inSpaces = False
-        cur = cur+1
+        cur = cur + 1
 
     if qpair:
-        raise ValueError, "Missing character after '\\'"
+        raise ValueError("Missing character after '\\'")
     if quoted:
-        raise ValueError, "Missing end quote"
+        raise ValueError("Missing end quote")
 
     if start != cur:
         if foldCase:
@@ -328,6 +340,8 @@
         else:
             yield string[start:cur]
 
+
+
 def split(seq, delim):
     """The same as str.split but works on arbitrary sequences.
     Too bad it's not builtin to python!"""
@@ -350,6 +364,7 @@
 #         return -1
 
 
+
 def filterTokens(seq):
     """Filter out instances of Token, leaving only a list of strings.
 
@@ -361,31 +376,39 @@
     hurt anything, in any case.
     """
 
-    l=[]
+    l = []
     for x in seq:
         if not isinstance(x, Token):
             l.append(x)
     return l
 
+
+
 ##### parser utilities:
 def checkSingleToken(tokens):
     if len(tokens) != 1:
-        raise ValueError, "Expected single token, not %s." % (tokens,)
+        raise ValueError("Expected single token, not %s." % (tokens,))
     return tokens[0]
 
+
+
 def parseKeyValue(val):
     if len(val) == 1:
-        return val[0],None
+        return val[0], None
     elif len(val) == 3 and val[1] == Token('='):
-        return val[0],val[2]
-    raise ValueError, "Expected key or key=value, but got %s." % (val,)
+        return val[0], val[2]
+    raise ValueError("Expected key or key=value, but got %s." % (val,))
 
+
+
 def parseArgs(field):
-    args=split(field, Token(';'))
+    args = split(field, Token(';'))
     val = args.next()
     args = [parseKeyValue(arg) for arg in args]
-    return val,args
+    return val, args
 
+
+
 def listParser(fun):
     """Return a function which applies 'fun' to every element in the
     comma-separated list"""
@@ -397,11 +420,15 @@
 
     return listParserHelper
 
+
+
 def last(seq):
     """Return seq[-1]"""
 
     return seq[-1]
 
+
+
 ##### Generation utilities
 def quoteString(s):
     """
@@ -413,6 +440,8 @@
     """
     return '"%s"' % s.replace('\\', '\\\\').replace('"', '\\"')
 
+
+
 def listGenerator(fun):
     """Return a function which applies 'fun' to every element in
     the given list, then joins the result with generateList"""
@@ -421,9 +450,13 @@
 
     return listGeneratorHelper
 
+
+
 def generateList(seq):
     return ", ".join(seq)
 
+
+
 def singleHeader(item):
     return [item]
 
@@ -456,8 +489,10 @@
     return ";".join(l)
 
 
+
 class MimeType(object):
-    def fromString(klass, mimeTypeString):
+
+    def fromString(cls, mimeTypeString):
         """Generate a MimeType object from the given string.
 
         @param mimeTypeString: The mimetype to parse
@@ -483,27 +518,82 @@
         if kwargs:
             self.params.update(kwargs)
 
+
     def __eq__(self, other):
-        if not isinstance(other, MimeType): return NotImplemented
+        if not isinstance(other, MimeType):
+            return NotImplemented
         return (self.mediaType == other.mediaType and
                 self.mediaSubtype == other.mediaSubtype and
                 self.params == other.params)
 
+
     def __ne__(self, other):
         return not self.__eq__(other)
 
+
     def __repr__(self):
         return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params)
 
+
     def __hash__(self):
-        return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))
+        return hash(self.mediaType) ^ hash(self.mediaSubtype) ^ hash(tuple(self.params.iteritems()))
 
+
+
+class MimeDisposition(object):
+
+    def fromString(cls, dispositionString):
+        """Generate a MimeDisposition object from the given string.
+
+        @param dispositionString: The disposition to parse
+
+        @return: L{MimeDisposition}
+        """
+        return DefaultHTTPHandler.parse('content-disposition', [dispositionString])
+
+    fromString = classmethod(fromString)
+
+    def __init__(self, dispositionType, params={}, **kwargs):
+        """
+        @type mediaType: C{str}
+
+        @type mediaSubtype: C{str}
+
+        @type params: C{dict}
+        """
+        self.dispositionType = dispositionType
+        self.params = dict(params)
+
+        if kwargs:
+            self.params.update(kwargs)
+
+
+    def __eq__(self, other):
+        if not isinstance(other, MimeDisposition):
+            return NotImplemented
+        return (self.dispositionType == other.dispositionType and
+                self.params == other.params)
+
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+
+    def __repr__(self):
+        return "MimeDisposition(%r, %r)" % (self.dispositionType, self.params)
+
+
+    def __hash__(self):
+        return hash(self.dispositionType) ^ hash(tuple(self.params.iteritems()))
+
+
+
 ##### Specific header parsers.
 def parseAccept(field):
-    type,args = parseArgs(field)
+    atype, args = parseArgs(field)
 
-    if len(type) != 3 or type[1] != Token('/'):
-        raise ValueError, "MIME Type "+str(type)+" invalid."
+    if len(atype) != 3 or atype[1] != Token('/'):
+        raise ValueError("MIME Type " + str(atype) + " invalid.")
 
     # okay, this spec is screwy. A 'q' parameter is used as the separator
     # between MIME parameters and (as yet undefined) additional HTTP
@@ -512,44 +602,50 @@
     num = 0
     for arg in args:
         if arg[0] == 'q':
-            mimeparams=tuple(args[0:num])
-            params=args[num:]
+            mimeparams = tuple(args[0:num])
+            params = args[num:]
             break
         num = num + 1
     else:
-        mimeparams=tuple(args)
-        params=[]
+        mimeparams = tuple(args)
+        params = []
 
     # Default values for parameters:
     qval = 1.0
 
     # Parse accept parameters:
     for param in params:
-        if param[0] =='q':
+        if param[0] == 'q':
             qval = float(param[1])
         else:
             # Warn? ignored parameter.
             pass
 
-    ret = MimeType(type[0],type[2],mimeparams),qval
+    ret = MimeType(atype[0], atype[2], mimeparams), qval
     return ret
 
+
+
 def parseAcceptQvalue(field):
-    type,args=parseArgs(field)
+    atype, args = parseArgs(field)
 
-    type = checkSingleToken(type)
+    atype = checkSingleToken(atype)
 
     qvalue = 1.0 # Default qvalue is 1
     for arg in args:
         if arg[0] == 'q':
             qvalue = float(arg[1])
-    return type,qvalue
+    return atype, qvalue
 
+
+
 def addDefaultCharset(charsets):
     if charsets.get('*') is None and charsets.get('iso-8859-1') is None:
         charsets['iso-8859-1'] = 1.0
     return charsets
 
+
+
 def addDefaultEncoding(encodings):
     if encodings.get('*') is None and encodings.get('identity') is None:
         # RFC doesn't specify a default value for identity, only that it
@@ -558,26 +654,46 @@
     return encodings
 
 
+
 def parseContentType(header):
     # Case folding is disabled for this header, because of use of
     # Content-Type: multipart/form-data; boundary=CaSeFuLsTuFf
-    # So, we need to explicitly .lower() the type/subtype and arg keys.
+    # So, we need to explicitly .lower() the ctype and arg keys.
 
-    type,args = parseArgs(header)
+    ctype, args = parseArgs(header)
 
-    if len(type) != 3 or type[1] != Token('/'):
-        raise ValueError, "MIME Type "+str(type)+" invalid."
+    if len(ctype) != 3 or ctype[1] != Token('/'):
+        raise ValueError("MIME Type " + str(ctype) + " invalid.")
 
     args = [(kv[0].lower(), kv[1]) for kv in args]
 
-    return MimeType(type[0].lower(), type[2].lower(), tuple(args))
+    return MimeType(ctype[0].lower(), ctype[2].lower(), tuple(args))
 
+
+
+def parseContentDisposition(header):
+    # Case folding is disabled for this header, because of use of
+    # So, we need to explicitly .lower() the dtype and arg keys.
+
+    dtype, args = parseArgs(header)
+
+    if len(dtype) != 1:
+        raise ValueError("Content-Disposition " + str(dtype) + " invalid.")
+
+    args = [(kv[0].lower(), kv[1]) for kv in args]
+
+    return MimeDisposition(dtype[0].lower(), tuple(args))
+
+
+
 def parseContentMD5(header):
     try:
         return base64.decodestring(header)
-    except Exception,e:
+    except Exception, e:
         raise ValueError(e)
 
+
+
 def parseContentRange(header):
     """Parse a content-range header into (kind, start, end, realLength).
 
@@ -589,7 +705,7 @@
         raise ValueError("a range of type %r is not supported")
     startend, realLength = other.split("/")
     if startend.strip() == '*':
-        start,end=None,None
+        start, end = None, None
     else:
         start, end = map(int, startend.split("-"))
     if realLength == "*":
@@ -598,12 +714,16 @@
         realLength = int(realLength)
     return (kind, start, end, realLength)
 
+
+
 def parseExpect(field):
-    type,args=parseArgs(field)
+    etype, args = parseArgs(field)
 
-    type=parseKeyValue(type)
-    return (type[0], (lambda *args:args)(type[1], *args))
+    etype = parseKeyValue(etype)
+    return (etype[0], (lambda *args: args)(etype[1], *args))
 
+
+
 def parseExpires(header):
     # """HTTP/1.1 clients and caches MUST treat other invalid date formats,
     #    especially including the value 0, as in the past (i.e., "already expired")."""
@@ -613,6 +733,8 @@
     except ValueError:
         return 0
 
+
+
 def parseIfModifiedSince(header):
     # Ancient versions of netscape and *current* versions of MSIE send
     #   If-Modified-Since: Thu, 05 Aug 2004 12:57:27 GMT; length=123
@@ -622,30 +744,34 @@
     # So, we'll just strip off everything after a ';'.
     return parseDateTime(header.split(';', 1)[0])
 
+
+
 def parseIfRange(headers):
     try:
         return ETag.parse(tokenize(headers))
     except ValueError:
         return parseDateTime(last(headers))
 
-def parseRange(range):
-    range = list(range)
-    if len(range) < 3 or range[1] != Token('='):
-        raise ValueError("Invalid range header format: %s" %(range,))
 
-    type=range[0]
-    if type != 'bytes':
-        raise ValueError("Unknown range unit: %s." % (type,))
-    rangeset=split(range[2:], Token(','))
+
+def parseRange(crange):
+    crange = list(crange)
+    if len(crange) < 3 or crange[1] != Token('='):
+        raise ValueError("Invalid range header format: %s" % (crange,))
+
+    rtype = crange[0]
+    if rtype != 'bytes':
+        raise ValueError("Unknown range unit: %s." % (rtype,))
+    rangeset = split(crange[2:], Token(','))
     ranges = []
 
     for byterangespec in rangeset:
         if len(byterangespec) != 1:
-            raise ValueError("Invalid range header format: %s" % (range,))
-        start,end=byterangespec[0].split('-')
+            raise ValueError("Invalid range header format: %s" % (crange,))
+        start, end = byterangespec[0].split('-')
 
         if not start and not end:
-            raise ValueError("Invalid range header format: %s" % (range,))
+            raise ValueError("Invalid range header format: %s" % (crange,))
 
         if start:
             start = int(start)
@@ -658,10 +784,12 @@
             end = None
 
         if start and end and start > end:
-            raise ValueError("Invalid range header, start > end: %s" % (range,))
-        ranges.append((start,end))
-    return type,ranges
+            raise ValueError("Invalid range header, start > end: %s" % (crange,))
+        ranges.append((start, end))
+    return rtype, ranges
 
+
+
 def parseRetryAfter(header):
     try:
         # delta seconds
@@ -672,6 +800,8 @@
 
 # WWW-Authenticate and Authorization
 
+
+
 def parseWWWAuthenticate(tokenized):
     headers = []
 
@@ -715,36 +845,46 @@
 
     return headers
 
+
+
 def parseAuthorization(header):
     scheme, rest = header.split(' ', 1)
     # this header isn't tokenized because it may eat characters
     # in the unquoted base64 encoded credentials
     return scheme.lower(), rest
 
+
+
 #### Header generators
 def generateAccept(accept):
-    mimeType,q = accept
+    mimeType, q = accept
 
-    out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+    out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype)
     if mimeType.params:
-        out+=';'+generateKeyValues(mimeType.params.iteritems())
+        out += ';' + generateKeyValues(mimeType.params.iteritems())
 
     if q != 1.0:
-        out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.')
+        out += (';q=%.3f' % (q,)).rstrip('0').rstrip('.')
 
     return out
 
+
+
 def removeDefaultEncoding(seq):
     for item in seq:
         if item[0] != 'identity' or item[1] != .0001:
             yield item
 
+
+
 def generateAcceptQvalue(keyvalue):
     if keyvalue[1] == 1.0:
         return "%s" % keyvalue[0:1]
     else:
         return ("%s;q=%.3f" % keyvalue).rstrip('0').rstrip('.')
 
+
+
 def parseCacheControl(kv):
     k, v = parseKeyValue(kv)
     if k == 'max-age' or k == 'min-fresh' or k == 's-maxage':
@@ -763,6 +903,8 @@
             v = [field.strip().lower() for field in v.split(',')]
     return k, v
 
+
+
 def generateCacheControl((k, v)):
     if v is None:
         return str(k)
@@ -771,67 +913,89 @@
             # quoted list of values
             v = quoteString(generateList(
                 [header_case_mapping.get(name) or dashCapitalize(name) for name in v]))
-        return '%s=%s' % (k,v)
+        return '%s=%s' % (k, v)
 
+
+
 def generateContentRange(tup):
-    """tup is (type, start, end, len)
-    len can be None.
+    """tup is (rtype, start, end, rlen)
+    rlen can be None.
     """
-    type, start, end, len = tup
-    if len == None:
-        len = '*'
+    rtype, start, end, rlen = tup
+    if rlen == None:
+        rlen = '*'
     else:
-        len = int(len)
+        rlen = int(rlen)
     if start == None and end == None:
         startend = '*'
     else:
         startend = '%d-%d' % (start, end)
 
-    return '%s %s/%s' % (type, startend, len)
+    return '%s %s/%s' % (rtype, startend, rlen)
 
+
+
 def generateDateTime(secSinceEpoch):
     """Convert seconds since epoch to HTTP datetime string."""
-    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(secSinceEpoch)
+    year, month, day, hh, mm, ss, wd, _ignore_y, _ignore_z = time.gmtime(secSinceEpoch)
     s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
         weekdayname[wd],
         day, monthname[month], year,
         hh, mm, ss)
     return s
 
+
+
 def generateExpect(item):
     if item[1][0] is None:
         out = '%s' % (item[0],)
     else:
         out = '%s=%s' % (item[0], item[1][0])
     if len(item[1]) > 1:
-        out += ';'+generateKeyValues(item[1][1:])
+        out += ';' + generateKeyValues(item[1][1:])
     return out
 
-def generateRange(range):
+
+
+def generateRange(crange):
     def noneOr(s):
         if s is None:
             return ''
         return s
 
-    type,ranges=range
+    rtype, ranges = crange
 
-    if type != 'bytes':
-        raise ValueError("Unknown range unit: "+type+".")
+    if rtype != 'bytes':
+        raise ValueError("Unknown range unit: " + rtype + ".")
 
-    return (type+'='+
+    return (rtype + '=' +
             ','.join(['%s-%s' % (noneOr(startend[0]), noneOr(startend[1]))
                       for startend in ranges]))
 
+
+
 def generateRetryAfter(when):
     # always generate delta seconds format
     return str(int(when - time.time()))
 
+
+
 def generateContentType(mimeType):
-    out="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
+    out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype)
     if mimeType.params:
-        out+=';'+generateKeyValues(mimeType.params.iteritems())
+        out += ';' + generateKeyValues(mimeType.params.iteritems())
     return out
 
+
+
+def generateContentDisposition(disposition):
+    out = disposition.dispositionType
+    if disposition.params:
+        out += ';' + generateKeyValues(disposition.params.iteritems())
+    return out
+
+
+
 def generateIfRange(dateOrETag):
     if isinstance(dateOrETag, ETag):
         return dateOrETag.generate()
@@ -840,6 +1004,8 @@
 
 # WWW-Authenticate and Authorization
 
+
+
 def generateWWWAuthenticate(headers):
     _generated = []
     for seq in headers:
@@ -850,7 +1016,7 @@
 
         try:
             l = []
-            for k,v in dict(challenge).iteritems():
+            for k, v in dict(challenge).iteritems():
                 l.append("%s=%s" % (k, quoteString(v)))
 
             _generated.append("%s %s" % (scheme, ", ".join(l)))
@@ -859,16 +1025,20 @@
 
     return _generated
 
+
+
 def generateAuthorization(seq):
     return [' '.join(seq)]
 
 
+
 ####
 class ETag(object):
     def __init__(self, tag, weak=False):
         self.tag = str(tag)
         self.weak = weak
 
+
     def match(self, other, strongCompare):
         # Sec 13.3.
         # The strong comparison function: in order to be considered equal, both
@@ -885,17 +1055,21 @@
             return False
         return True
 
+
     def __eq__(self, other):
         return isinstance(other, ETag) and other.tag == self.tag and other.weak == self.weak
 
+
     def __ne__(self, other):
         return not self.__eq__(other)
 
+
     def __repr__(self):
         return "Etag(%r, weak=%r)" % (self.tag, self.weak)
 
+
     def parse(tokens):
-        tokens=tuple(tokens)
+        tokens = tuple(tokens)
         if len(tokens) == 1 and not isinstance(tokens[0], Token):
             return ETag(tokens[0])
 
@@ -905,57 +1079,73 @@
 
         raise ValueError("Invalid ETag.")
 
-    parse=staticmethod(parse)
+    parse = staticmethod(parse)
 
     def generate(self):
         if self.weak:
-            return 'W/'+quoteString(self.tag)
+            return 'W/' + quoteString(self.tag)
         else:
             return quoteString(self.tag)
 
+
+
 def parseStarOrETag(tokens):
-    tokens=tuple(tokens)
+    tokens = tuple(tokens)
     if tokens == ('*',):
         return '*'
     else:
         return ETag.parse(tokens)
 
+
+
 def generateStarOrETag(etag):
-    if etag=='*':
+    if etag == '*':
         return etag
     else:
         return etag.generate()
 
+
+
 #### Cookies. Blech!
 class Cookie(object):
     # __slots__ = ['name', 'value', 'path', 'domain', 'ports', 'expires', 'discard', 'secure', 'comment', 'commenturl', 'version']
 
     def __init__(self, name, value, path=None, domain=None, ports=None, expires=None, discard=False, secure=False, comment=None, commenturl=None, version=0):
-        self.name=name
-        self.value=value
-        self.path=path
-        self.domain=domain
-        self.ports=ports
-        self.expires=expires
-        self.discard=discard
-        self.secure=secure
-        self.comment=comment
-        self.commenturl=commenturl
-        self.version=version
+        self.name = name
+        self.value = value
+        self.path = path
+        self.domain = domain
+        self.ports = ports
+        self.expires = expires
+        self.discard = discard
+        self.secure = secure
+        self.comment = comment
+        self.commenturl = commenturl
+        self.version = version
 
+
     def __repr__(self):
-        s="Cookie(%r=%r" % (self.name, self.value)
-        if self.path is not None: s+=", path=%r" % (self.path,)
-        if self.domain is not None: s+=", domain=%r" % (self.domain,)
-        if self.ports is not None: s+=", ports=%r" % (self.ports,)
-        if self.expires is not None: s+=", expires=%r" % (self.expires,)
-        if self.secure is not False: s+=", secure=%r" % (self.secure,)
-        if self.comment is not None: s+=", comment=%r" % (self.comment,)
-        if self.commenturl is not None: s+=", commenturl=%r" % (self.commenturl,)
-        if self.version != 0: s+=", version=%r" % (self.version,)
-        s+=")"
+        s = "Cookie(%r=%r" % (self.name, self.value)
+        if self.path is not None:
+            s += ", path=%r" % (self.path,)
+        if self.domain is not None:
+            s += ", domain=%r" % (self.domain,)
+        if self.ports is not None:
+            s += ", ports=%r" % (self.ports,)
+        if self.expires is not None:
+            s += ", expires=%r" % (self.expires,)
+        if self.secure is not False:
+            s += ", secure=%r" % (self.secure,)
+        if self.comment is not None:
+            s += ", comment=%r" % (self.comment,)
+        if self.commenturl is not None:
+            s += ", commenturl=%r" % (self.commenturl,)
+        if self.version != 0:
+            s += ", version=%r" % (self.version,)
+        s += ")"
         return s
 
+
     def __eq__(self, other):
         return (isinstance(other, Cookie) and
                 other.path == self.path and
@@ -967,10 +1157,12 @@
                 other.commenturl == self.commenturl and
                 other.version == self.version)
 
+
     def __ne__(self, other):
         return not self.__eq__(other)
 
 
+
 def parseCookie(headers):
     """Bleargh, the cookie spec sucks.
     This surely needs interoperability testing.
@@ -987,7 +1179,7 @@
     header = ';'.join(headers)
     if header[0:8].lower() == "$version":
         # RFC2965 cookie
-        h=tokenize([header], foldCase=False)
+        h = tokenize([header], foldCase=False)
         r_cookies = split(h, Token(','))
         for r_cookie in r_cookies:
             last_cookie = None
@@ -1000,20 +1192,20 @@
                     (name,), = nameval
                     value = None
 
-                name=name.lower()
+                name = name.lower()
                 if name == '$version':
                     continue
                 if name[0] == '$':
                     if last_cookie is not None:
                         if name == '$path':
-                            last_cookie.path=value
+                            last_cookie.path = value
                         elif name == '$domain':
-                            last_cookie.domain=value
+                            last_cookie.domain = value
                         elif name == '$port':
                             if value is None:
                                 last_cookie.ports = ()
                             else:
-                                last_cookie.ports=tuple([int(s) for s in value.split(',')])
+                                last_cookie.ports = tuple([int(s) for s in value.split(',')])
                 else:
                     last_cookie = Cookie(name, value, version=1)
                     cookies.append(last_cookie)
@@ -1024,19 +1216,21 @@
         # however.
         r_cookies = header.split(';')
         for r_cookie in r_cookies:
-            name,value = r_cookie.split('=', 1)
-            name=name.strip(' \t')
-            value=value.strip(' \t')
+            name, value = r_cookie.split('=', 1)
+            name = name.strip(' \t')
+            value = value.strip(' \t')
 
             cookies.append(Cookie(name, value))
 
     return cookies
 
-cookie_validname = "[^"+re.escape(http_tokens+http_ctls)+"]*$"
+cookie_validname = "[^" + re.escape(http_tokens + http_ctls) + "]*$"
 cookie_validname_re = re.compile(cookie_validname)
-cookie_validvalue = cookie_validname+'|"([^"]|\\\\")*"$'
+cookie_validvalue = cookie_validname + '|"([^"]|\\\\")*"$'
 cookie_validvalue_re = re.compile(cookie_validvalue)
 
+
+
 def generateCookie(cookies):
     # There's a fundamental problem with the two cookie specifications.
     # They both use the "Cookie" header, and the RFC Cookie header only allows
@@ -1094,7 +1288,7 @@
                 if cookie_validname_re.match(cookie.name) is None:
                     continue
 
-                value=cookie.value
+                value = cookie.value
                 if cookie_validvalue_re.match(cookie.value) is None:
                     value = quoteString(value)
 
@@ -1114,6 +1308,8 @@
                     str_cookies.append("$Port=%s" % quoteString(",".join([str(x) for x in cookie.ports])))
         return ';'.join(str_cookies)
 
+
+
 def parseSetCookie(headers):
     setCookies = []
     for header in headers:
@@ -1122,15 +1318,15 @@
             l = []
 
             for part in parts:
-                namevalue = part.split('=',1)
+                namevalue = part.split('=', 1)
                 if len(namevalue) == 1:
-                    name=namevalue[0]
-                    value=None
+                    name = namevalue[0]
+                    value = None
                 else:
-                    name,value=namevalue
-                    value=value.strip(' \t')
+                    name, value = namevalue
+                    value = value.strip(' \t')
 
-                name=name.strip(' \t')
+                name = name.strip(' \t')
 
                 l.append((name, value))
 
@@ -1141,6 +1337,8 @@
             pass
     return setCookies
 
+
+
 def parseSetCookie2(toks):
     outCookies = []
     for cookie in [[parseKeyValue(x) for x in split(y, Token(';'))]
@@ -1152,6 +1350,8 @@
             pass
     return outCookies
 
+
+
 def makeCookieFromList(tup, netscapeFormat):
     name, value = tup[0]
     if name is None or value is None:
@@ -1161,7 +1361,7 @@
     cookie = Cookie(name, value)
     hadMaxAge = False
 
-    for name,value in tup[1:]:
+    for name, value in tup[1:]:
         name = name.lower()
 
         if value is None:
@@ -1195,6 +1395,7 @@
     return cookie
 
 
+
 def generateSetCookie(cookies):
     setCookies = []
     for cookie in cookies:
@@ -1211,6 +1412,8 @@
         setCookies.append('; '.join(out))
     return setCookies
 
+
+
 def generateSetCookie2(cookies):
     setCookies = []
     for cookie in cookies:
@@ -1238,11 +1441,15 @@
         setCookies.append('; '.join(out))
     return setCookies
 
+
+
 def parseDepth(depth):
     if depth not in ("0", "1", "infinity"):
         raise ValueError("Invalid depth header value: %s" % (depth,))
     return depth
 
+
+
 def parseOverWrite(overwrite):
     if overwrite == "F":
         return False
@@ -1250,12 +1457,16 @@
         return True
     raise ValueError("Invalid overwrite header value: %s" % (overwrite,))
 
+
+
 def generateOverWrite(overwrite):
     if overwrite:
         return "T"
     else:
         return "F"
 
+
+
 def parseBrief(brief):
     # We accept upper or lower case
     if brief.upper() == "F":
@@ -1264,6 +1475,8 @@
         return True
     raise ValueError("Invalid brief header value: %s" % (brief,))
 
+
+
 def generateBrief(brief):
     # MS definition uses lower case
     return "t" if brief else "f"
@@ -1308,6 +1521,8 @@
 
 #     return accepts.get('*')
 
+
+
 # Headers object
 class __RecalcNeeded(object):
     def __repr__(self):
@@ -1315,6 +1530,7 @@
 
 _RecalcNeeded = __RecalcNeeded()
 
+
 class Headers(object):
     """
     This class stores the HTTP headers as both a parsed representation
@@ -1333,10 +1549,12 @@
             for key, value in rawHeaders.iteritems():
                 self.setRawHeaders(key, value)
 
+
     def _setRawHeaders(self, headers):
         self._raw_headers = headers
         self._headers = {}
 
+
     def _toParsed(self, name):
         r = self._raw_headers.get(name, None)
         h = self.handler.parse(name, r)
@@ -1344,6 +1562,7 @@
             self._headers[name] = h
         return h
 
+
     def _toRaw(self, name):
         h = self._headers.get(name, None)
         r = self.handler.generate(name, h)
@@ -1351,21 +1570,24 @@
             self._raw_headers[name] = r
         return r
 
+
     def hasHeader(self, name):
         """Does a header with the given name exist?"""
-        name=name.lower()
-        return self._raw_headers.has_key(name)
+        name = name.lower()
+        return name in self._raw_headers
 
+
     def getRawHeaders(self, name, default=None):
         """Returns a list of headers matching the given name as the raw string given."""
 
-        name=name.lower()
+        name = name.lower()
         raw_header = self._raw_headers.get(name, default)
         if raw_header is not _RecalcNeeded:
             return raw_header
 
         return self._toRaw(name)
 
+
     def getHeader(self, name, default=None):
         """Ret9urns the parsed representation of the given header.
         The exact form of the return value depends on the header in question.
@@ -1374,37 +1596,40 @@
 
         If the header doesn't exist, return default (or None if not specified)
         """
-        name=name.lower()
+        name = name.lower()
         parsed = self._headers.get(name, default)
         if parsed is not _RecalcNeeded:
             return parsed
         return self._toParsed(name)
 
+
     def setRawHeaders(self, name, value):
         """Sets the raw representation of the given header.
         Value should be a list of strings, each being one header of the
         given name.
         """
-        name=name.lower()
+        name = name.lower()
         self._raw_headers[name] = value
         self._headers[name] = _RecalcNeeded
 
+
     def setHeader(self, name, value):
         """Sets the parsed representation of the given header.
         Value should be a list of objects whose exact form depends
         on the header in question.
         """
-        name=name.lower()
+        name = name.lower()
         self._raw_headers[name] = _RecalcNeeded
         self._headers[name] = value
 
+
     def addRawHeader(self, name, value):
         """
         Add a raw value to a header that may or may not already exist.
         If it exists, add it as a separate header to output; do not
         replace anything.
         """
-        name=name.lower()
+        name = name.lower()
         raw_header = self._raw_headers.get(name)
         if raw_header is None:
             # No header yet
@@ -1416,36 +1641,42 @@
         raw_header.append(value)
         self._headers[name] = _RecalcNeeded
 
+
     def removeHeader(self, name):
         """Removes the header named."""
 
-        name=name.lower()
-        if self._raw_headers.has_key(name):
+        name = name.lower()
+        if name in self._raw_headers:
             del self._raw_headers[name]
             del self._headers[name]
 
+
     def __repr__(self):
-        return '<Headers: Raw: %s Parsed: %s>'% (self._raw_headers, self._headers)
+        return '<Headers: Raw: %s Parsed: %s>' % (self._raw_headers, self._headers)
 
+
     def canonicalNameCaps(self, name):
         """Return the name with the canonical capitalization, if known,
         otherwise, Caps-After-Dashes"""
         return header_case_mapping.get(name) or dashCapitalize(name)
 
+
     def getAllRawHeaders(self):
         """Return an iterator of key,value pairs of all headers
         contained in this object, as strings. The keys are capitalized
         in canonical capitalization."""
-        for k,v in self._raw_headers.iteritems():
+        for k, v in self._raw_headers.iteritems():
             if v is _RecalcNeeded:
                 v = self._toRaw(k)
             yield self.canonicalNameCaps(k), v
 
+
     def makeImmutable(self):
         """Make this header set immutable. All mutating operations will
         raise an exception."""
         self.setHeader = self.setRawHeaders = self.removeHeader = self._mutateRaise
 
+
     def _mutateRaise(self, *args):
         raise AttributeError("This header object is immutable as the headers have already been sent.")
 
@@ -1462,24 +1693,24 @@
 
 
 parser_general_headers = {
-    'Cache-Control':(tokenize, listParser(parseCacheControl), dict),
-    'Connection':(tokenize,filterTokens),
-    'Date':(last,parseDateTime),
-#    'Pragma':tokenize
-#    'Trailer':tokenize
-    'Transfer-Encoding':(tokenize,filterTokens),
-#    'Upgrade':tokenize
-#    'Via':tokenize,stripComment
-#    'Warning':tokenize
+    'Cache-Control': (tokenize, listParser(parseCacheControl), dict),
+    'Connection': (tokenize, filterTokens),
+    'Date': (last, parseDateTime),
+#    'Pragma': tokenize
+#    'Trailer': tokenize
+    'Transfer-Encoding': (tokenize, filterTokens),
+#    'Upgrade': tokenize
+#    'Via': tokenize,stripComment
+#    'Warning': tokenize
 }
 
 generator_general_headers = {
-    'Cache-Control':(iteritems, listGenerator(generateCacheControl), singleHeader),
-    'Connection':(generateList,singleHeader),
-    'Date':(generateDateTime,singleHeader),
+    'Cache-Control': (iteritems, listGenerator(generateCacheControl), singleHeader),
+    'Connection': (generateList, singleHeader),
+    'Date': (generateDateTime, singleHeader),
 #    'Pragma':
 #    'Trailer':
-    'Transfer-Encoding':(generateList,singleHeader),
+    'Transfer-Encoding': (generateList, singleHeader),
 #    'Upgrade':
 #    'Via':
 #    'Warning':
@@ -1488,104 +1719,106 @@
 parser_request_headers = {
     'Accept': (tokenize, listParser(parseAccept), dict),
     'Accept-Charset': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultCharset),
-    'Accept-Encoding':(tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding),
-    'Accept-Language':(tokenize, listParser(parseAcceptQvalue), dict),
+    'Accept-Encoding': (tokenize, listParser(parseAcceptQvalue), dict, addDefaultEncoding),
+    'Accept-Language': (tokenize, listParser(parseAcceptQvalue), dict),
     'Authorization': (last, parseAuthorization),
-    'Cookie':(parseCookie,),
-    'Expect':(tokenize, listParser(parseExpect), dict),
-    'From':(last,),
-    'Host':(last,),
-    'If-Match':(tokenize, listParser(parseStarOrETag), list),
-    'If-Modified-Since':(last, parseIfModifiedSince),
-    'If-None-Match':(tokenize, listParser(parseStarOrETag), list),
-    'If-Range':(parseIfRange,),
-    'If-Unmodified-Since':(last,parseDateTime),
-    'Max-Forwards':(last,int),
-    'Prefer':(tokenize, listParser(parseExpect), dict),     # Prefer like Expect
-#    'Proxy-Authorization':str, # what is "credentials"
-    'Range':(tokenize, parseRange),
-    'Referer':(last,str), # TODO: URI object?
-    'TE':(tokenize, listParser(parseAcceptQvalue), dict),
-    'User-Agent':(last,str),
+    'Cookie': (parseCookie,),
+    'Expect': (tokenize, listParser(parseExpect), dict),
+    'From': (last,),
+    'Host': (last,),
+    'If-Match': (tokenize, listParser(parseStarOrETag), list),
+    'If-Modified-Since': (last, parseIfModifiedSince),
+    'If-None-Match': (tokenize, listParser(parseStarOrETag), list),
+    'If-Range': (parseIfRange,),
+    'If-Unmodified-Since': (last, parseDateTime),
+    'Max-Forwards': (last, int),
+    'Prefer': (tokenize, listParser(parseExpect), dict), # Prefer like Expect
+#    'Proxy-Authorization': str, # what is "credentials"
+    'Range': (tokenize, parseRange),
+    'Referer': (last, str), # TODO: URI object?
+    'TE': (tokenize, listParser(parseAcceptQvalue), dict),
+    'User-Agent': (last, str),
 }
 
 generator_request_headers = {
-    'Accept': (iteritems,listGenerator(generateAccept),singleHeader),
-    'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
-    'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue),singleHeader),
-    'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
+    'Accept': (iteritems, listGenerator(generateAccept), singleHeader),
+    'Accept-Charset': (iteritems, listGenerator(generateAcceptQvalue), singleHeader),
+    'Accept-Encoding': (iteritems, removeDefaultEncoding, listGenerator(generateAcceptQvalue), singleHeader),
+    'Accept-Language': (iteritems, listGenerator(generateAcceptQvalue), singleHeader),
     'Authorization': (generateAuthorization,), # what is "credentials"
-    'Cookie':(generateCookie,singleHeader),
-    'Expect':(iteritems, listGenerator(generateExpect), singleHeader),
-    'From':(str,singleHeader),
-    'Host':(str,singleHeader),
-    'If-Match':(listGenerator(generateStarOrETag), singleHeader),
-    'If-Modified-Since':(generateDateTime,singleHeader),
-    'If-None-Match':(listGenerator(generateStarOrETag), singleHeader),
-    'If-Range':(generateIfRange, singleHeader),
-    'If-Unmodified-Since':(generateDateTime,singleHeader),
-    'Max-Forwards':(str, singleHeader),
-    'Prefer':(iteritems, listGenerator(generateExpect), singleHeader),      # Prefer like Expect
-#    'Proxy-Authorization':str, # what is "credentials"
-    'Range':(generateRange,singleHeader),
-    'Referer':(str,singleHeader),
-    'TE': (iteritems, listGenerator(generateAcceptQvalue),singleHeader),
-    'User-Agent':(str,singleHeader),
+    'Cookie': (generateCookie, singleHeader),
+    'Expect': (iteritems, listGenerator(generateExpect), singleHeader),
+    'From': (str, singleHeader),
+    'Host': (str, singleHeader),
+    'If-Match': (listGenerator(generateStarOrETag), singleHeader),
+    'If-Modified-Since': (generateDateTime, singleHeader),
+    'If-None-Match': (listGenerator(generateStarOrETag), singleHeader),
+    'If-Range': (generateIfRange, singleHeader),
+    'If-Unmodified-Since': (generateDateTime, singleHeader),
+    'Max-Forwards': (str, singleHeader),
+    'Prefer': (iteritems, listGenerator(generateExpect), singleHeader), # Prefer like Expect
+#    'Proxy-Authorization': str, # what is "credentials"
+    'Range': (generateRange, singleHeader),
+    'Referer': (str, singleHeader),
+    'TE': (iteritems, listGenerator(generateAcceptQvalue), singleHeader),
+    'User-Agent': (str, singleHeader),
 }
 
 parser_response_headers = {
-    'Accept-Ranges':(tokenize, filterTokens),
-    'Age':(last,int),
-    'ETag':(tokenize, ETag.parse),
-    'Location':(last,), # TODO: URI object?
+    'Accept-Ranges': (tokenize, filterTokens),
+    'Age': (last, int),
+    'ETag': (tokenize, ETag.parse),
+    'Location': (last,), # TODO: URI object?
 #    'Proxy-Authenticate'
-    'Retry-After':(last, parseRetryAfter),
-    'Server':(last,),
-    'Set-Cookie':(parseSetCookie,),
-    'Set-Cookie2':(tokenize, parseSetCookie2),
-    'Vary':(tokenize, filterTokens),
+    'Retry-After': (last, parseRetryAfter),
+    'Server': (last,),
+    'Set-Cookie': (parseSetCookie,),
+    'Set-Cookie2': (tokenize, parseSetCookie2),
+    'Vary': (tokenize, filterTokens),
     'WWW-Authenticate': (lambda h: tokenize(h, foldCase=False),
                          parseWWWAuthenticate,)
 }
 
 generator_response_headers = {
-    'Accept-Ranges':(generateList, singleHeader),
-    'Age':(str, singleHeader),
-    'ETag':(ETag.generate, singleHeader),
-    'Location':(str, singleHeader),
+    'Accept-Ranges': (generateList, singleHeader),
+    'Age': (str, singleHeader),
+    'ETag': (ETag.generate, singleHeader),
+    'Location': (str, singleHeader),
 #    'Proxy-Authenticate'
-    'Retry-After':(generateRetryAfter, singleHeader),
-    'Server':(str, singleHeader),
-    'Set-Cookie':(generateSetCookie,),
-    'Set-Cookie2':(generateSetCookie2,),
-    'Vary':(generateList, singleHeader),
-    'WWW-Authenticate':(generateWWWAuthenticate,)
+    'Retry-After': (generateRetryAfter, singleHeader),
+    'Server': (str, singleHeader),
+    'Set-Cookie': (generateSetCookie,),
+    'Set-Cookie2': (generateSetCookie2,),
+    'Vary': (generateList, singleHeader),
+    'WWW-Authenticate': (generateWWWAuthenticate,)
 }
 
 parser_entity_headers = {
-    'Allow':(lambda str:tokenize(str, foldCase=False), filterTokens),
-    'Content-Encoding':(tokenize, filterTokens),
-    'Content-Language':(tokenize, filterTokens),
-    'Content-Length':(last, int),
-    'Content-Location':(last,), # TODO: URI object?
-    'Content-MD5':(last, parseContentMD5),
-    'Content-Range':(last, parseContentRange),
-    'Content-Type':(lambda str:tokenize(str, foldCase=False), parseContentType),
-    'Expires':(last, parseExpires),
-    'Last-Modified':(last, parseDateTime),
+    'Allow': (lambda hdr: tokenize(hdr, foldCase=False), filterTokens),
+    'Content-Disposition': (lambda hdr: tokenize(hdr, foldCase=False), parseContentDisposition),
+    'Content-Encoding': (tokenize, filterTokens),
+    'Content-Language': (tokenize, filterTokens),
+    'Content-Length': (last, int),
+    'Content-Location': (last,), # TODO: URI object?
+    'Content-MD5': (last, parseContentMD5),
+    'Content-Range': (last, parseContentRange),
+    'Content-Type': (lambda hdr: tokenize(hdr, foldCase=False), parseContentType),
+    'Expires': (last, parseExpires),
+    'Last-Modified': (last, parseDateTime),
     }
 
 generator_entity_headers = {
-    'Allow':(generateList, singleHeader),
-    'Content-Encoding':(generateList, singleHeader),
-    'Content-Language':(generateList, singleHeader),
-    'Content-Length':(str, singleHeader),
-    'Content-Location':(str, singleHeader),
-    'Content-MD5':(base64.encodestring, lambda x: x.strip("\n"), singleHeader),
-    'Content-Range':(generateContentRange, singleHeader),
-    'Content-Type':(generateContentType, singleHeader),
-    'Expires':(generateDateTime, singleHeader),
-    'Last-Modified':(generateDateTime, singleHeader),
+    'Allow': (generateList, singleHeader),
+    'Content-Disposition': (generateContentDisposition, singleHeader),
+    'Content-Encoding': (generateList, singleHeader),
+    'Content-Language': (generateList, singleHeader),
+    'Content-Length': (str, singleHeader),
+    'Content-Location': (str, singleHeader),
+    'Content-MD5': (base64.encodestring, lambda x: x.strip("\n"), singleHeader),
+    'Content-Range': (generateContentRange, singleHeader),
+    'Content-Type': (generateContentType, singleHeader),
+    'Expires': (generateDateTime, singleHeader),
+    'Last-Modified': (generateDateTime, singleHeader),
     }
 
 parser_dav_headers = {
@@ -1593,11 +1826,11 @@
     'DAV'         : (tokenize, list),
     'Depth'       : (last, parseDepth),
     'Destination' : (last,), # TODO: URI object?
-   #'If'          : (),
-   #'Lock-Token'  : (),
+   # 'If'          : (),
+   # 'Lock-Token'  : (),
     'Overwrite'   : (last, parseOverWrite),
-   #'Status-URI'  : (),
-   #'Timeout'     : (),
+   # 'Status-URI'  : (),
+   # 'Timeout'     : (),
 }
 
 generator_dav_headers = {
@@ -1605,11 +1838,11 @@
     'DAV'         : (generateList, singleHeader),
     'Depth'       : (singleHeader),
     'Destination' : (singleHeader),
-   #'If'          : (),
-   #'Lock-Token'  : (),
+   # 'If'          : (),
+   # 'Lock-Token'  : (),
     'Overwrite'   : (),
-   #'Status-URI'  : (),
-   #'Timeout'     : (),
+   # 'Status-URI'  : (),
+   # 'Timeout'     : (),
 }
 
 DefaultHTTPHandler.updateParsers(parser_general_headers)

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/test/test_http_headers.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/test/test_http_headers.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twext/web2/test/test_http_headers.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -6,7 +6,8 @@
 """
 
 from twisted.trial import unittest
-import random, time
+import random
+import time
 
 from twext.web2 import http_headers
 from twext.web2.http_headers import Cookie, HeaderHandler, quoteString, generateKeyValues
@@ -17,9 +18,12 @@
     def __init__(self, raw):
         self.raw = raw
 
+
     def __eq__(self, other):
         return isinstance(other, parsedvalue) and other.raw == self.raw
 
+
+
 class HeadersAPITest(unittest.TestCase):
     """Make sure the public API exists and works."""
     def testRaw(self):
@@ -34,6 +38,7 @@
         h.removeHeader("test")
         self.assertEquals(h.getRawHeaders("test"), None)
 
+
     def testParsed(self):
         parsed = parsedvalue(("value1", "value2"))
         h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
@@ -46,17 +51,19 @@
         h.removeHeader("test")
         self.assertEquals(h.getHeader("test"), None)
 
+
     def testParsedAndRaw(self):
         def parse(raw):
             return parsedvalue(raw)
 
+
         def generate(parsed):
             return parsed.raw
 
         rawvalue = ("value1", "value2")
         rawvalue2 = ("value3", "value4")
-        handler = HeaderHandler(parsers={'test':(parse,)},
-                                generators={'test':(generate,)})
+        handler = HeaderHandler(parsers={'test': (parse,)},
+                                generators={'test': (generate,)})
 
         h = http_headers.Headers(handler=handler)
         h.setRawHeaders("test", rawvalue)
@@ -74,6 +81,7 @@
                                  handler=handler)
         self.assertEquals(h.getRawHeaders("test"), rawvalue2)
 
+
     def testImmutable(self):
         h = http_headers.Headers(handler=HeaderHandler(parsers={}, generators={}))
 
@@ -82,11 +90,13 @@
         self.assertRaises(AttributeError, h.setHeader, "test", 1)
         self.assertRaises(AttributeError, h.removeHeader, "test")
 
+
+
 class TokenizerTest(unittest.TestCase):
     """Test header list parsing functions."""
 
     def testParse(self):
-        parser = lambda val: list(http_headers.tokenize([val,]))
+        parser = lambda val: list(http_headers.tokenize([val, ]))
         Token = http_headers.Token
         tests = (('foo,bar', ['foo', Token(','), 'bar']),
                  ('FOO,BAR', ['foo', Token(','), 'bar']),
@@ -97,17 +107,21 @@
 
         raiseTests = ('"open quote', '"ending \\', "control character: \x127", "\x00", "\x1f")
 
-        for test,result in tests:
+        for test, result in tests:
             self.assertEquals(parser(test), result)
         for test in raiseTests:
             self.assertRaises(ValueError, parser, test)
 
+
     def testGenerate(self):
         pass
 
+
     def testRoundtrip(self):
         pass
 
+
+
 def atSpecifiedTime(when, func):
     def inner(*a, **kw):
         orig = time.time
@@ -118,12 +132,16 @@
             time.time = orig
     return util.mergeFunctionMetadata(func, inner)
 
+
+
 def parseHeader(name, val):
     head = http_headers.Headers(handler=http_headers.DefaultHTTPHandler)
-    head.setRawHeaders(name,val)
+    head.setRawHeaders(name, val)
     return head.getHeader(name)
 parseHeader = atSpecifiedTime(999999990, parseHeader) # Sun, 09 Sep 2001 01:46:30 GMT
 
+
+
 def generateHeader(name, val):
     head = http_headers.Headers(handler=http_headers.DefaultHTTPHandler)
     head.setHeader(name, val)
@@ -131,6 +149,7 @@
 generateHeader = atSpecifiedTime(999999990, generateHeader) # Sun, 09 Sep 2001 01:46:30 GMT
 
 
+
 class HeaderParsingTestBase(unittest.TestCase):
     def runRoundtripTest(self, headername, table):
         """
@@ -163,11 +182,10 @@
             elif len(row) == 3:
                 rawHeaderInput, parsedHeaderData, requiredGeneratedElements = row
 
-
             assert isinstance(requiredGeneratedElements, list)
 
             # parser
-            parsed = parseHeader(headername, [rawHeaderInput,])
+            parsed = parseHeader(headername, [rawHeaderInput, ])
             self.assertEquals(parsed, parsedHeaderData)
 
             regeneratedHeaderValue = generateHeader(headername, parsed)
@@ -193,33 +211,35 @@
             parsed = parseHeader(headername, val)
             self.assertEquals(parsed, None)
 
+
+
 class GeneralHeaderParsingTests(HeaderParsingTestBase):
     def testCacheControl(self):
         table = (
             ("no-cache",
-             {'no-cache':None}),
+             {'no-cache': None}),
             ("no-cache, no-store, max-age=5, max-stale=3, min-fresh=5, no-transform, only-if-cached, blahblah-extension-thingy",
              {'no-cache': None,
               'no-store': None,
-              'max-age':5,
-              'max-stale':3,
-              'min-fresh':5,
-              'no-transform':None,
-              'only-if-cached':None,
-              'blahblah-extension-thingy':None}),
+              'max-age': 5,
+              'max-stale': 3,
+              'min-fresh': 5,
+              'no-transform': None,
+              'only-if-cached': None,
+              'blahblah-extension-thingy': None}),
             ("max-stale",
-             {'max-stale':None}),
+             {'max-stale': None}),
             ("public, private, no-cache, no-store, no-transform, must-revalidate, proxy-revalidate, max-age=5, s-maxage=10, blahblah-extension-thingy",
-             {'public':None,
-              'private':None,
-              'no-cache':None,
-              'no-store':None,
-              'no-transform':None,
-              'must-revalidate':None,
-              'proxy-revalidate':None,
-              'max-age':5,
-              's-maxage':10,
-              'blahblah-extension-thingy':None}),
+             {'public': None,
+              'private': None,
+              'no-cache': None,
+              'no-store': None,
+              'no-transform': None,
+              'must-revalidate': None,
+              'proxy-revalidate': None,
+              'max-age': 5,
+              's-maxage': 10,
+              'blahblah-extension-thingy': None}),
             ('private="Set-Cookie, Set-Cookie2", no-cache="PROXY-AUTHENTICATE"',
              {'private': ['set-cookie', 'set-cookie2'],
               'no-cache': ['proxy-authenticate']},
@@ -227,13 +247,15 @@
             )
         self.runRoundtripTest("Cache-Control", table)
 
+
     def testConnection(self):
         table = (
-            ("close", ['close',]),
+            ("close", ['close', ]),
             ("close, foo-bar", ['close', 'foo-bar'])
             )
         self.runRoundtripTest("Connection", table)
 
+
     def testDate(self):
         # Don't need major tests since the datetime parser has its own tests
         self.runRoundtripTest("Date", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
@@ -244,6 +266,7 @@
 #     def testTrailer(self):
 #         fail
 
+
     def testTransferEncoding(self):
         table = (
             ('chunked', ['chunked']),
@@ -260,8 +283,10 @@
 #     def testWarning(self):
 #         fail
 
+
+
 class RequestHeaderParsingTests(HeaderParsingTestBase):
-    #FIXME test ordering too.
+    # FIXME test ordering too.
     def testAccept(self):
         table = (
             ("audio/*;q=0.2, audio/basic",
@@ -307,6 +332,7 @@
             )
         self.runRoundtripTest("Accept-Charset", table)
 
+
     def testAcceptEncoding(self):
         table = (
             ("compress, gzip",
@@ -319,11 +345,12 @@
              {'compress': 0.5, 'gzip': 1.0, 'identity': 0.0001},
              ["compress;q=0.5", "gzip"]),
             ("gzip;q=1.0, identity;q=0.5, *;q=0",
-             {'gzip': 1.0, 'identity': 0.5, '*':0},
+             {'gzip': 1.0, 'identity': 0.5, '*': 0},
              ["gzip", "identity;q=0.5", "*;q=0"]),
             )
         self.runRoundtripTest("Accept-Encoding", table)
 
+
     def testAcceptLanguage(self):
         table = (
             ("da, en-gb;q=0.8, en;q=0.7",
@@ -333,6 +360,7 @@
             )
         self.runRoundtripTest("Accept-Language", table)
 
+
     def testAuthorization(self):
         table = (
             ("Basic dXNlcm5hbWU6cGFzc3dvcmQ=",
@@ -345,6 +373,7 @@
 
         self.runRoundtripTest("Authorization", table)
 
+
     def testCookie(self):
         table = (
             ('name=value', [Cookie('name', 'value')]),
@@ -358,12 +387,12 @@
             )
         self.runRoundtripTest("Cookie", table)
 
-        #newstyle RFC2965 Cookie
+        # newstyle RFC2965 Cookie
         table2 = (
             ('$Version="1";'
              'name="value";$Path="/foo";$Domain="www.local";$Port="80,8000";'
              'name2="value"',
-             [Cookie('name', 'value', path='/foo', domain='www.local', ports=(80,8000), version=1), Cookie('name2', 'value', version=1)]),
+             [Cookie('name', 'value', path='/foo', domain='www.local', ports=(80, 8000), version=1), Cookie('name2', 'value', version=1)]),
             ('$Version="1";'
              'name="value";$Port',
              [Cookie('name', 'value', ports=(), version=1)]),
@@ -386,10 +415,9 @@
              '$Version="1";name="qq\\"qq";name2="value2"'),
             )
         for row in table3:
-            self.assertEquals(generateHeader("Cookie", row[0]), [row[1],])
+            self.assertEquals(generateHeader("Cookie", row[0]), [row[1], ])
 
 
-
     def testSetCookie(self):
         table = (
             ('name,"blah=value,; expires=Sun, 09 Sep 2001 01:46:40 GMT; path=/foo; domain=bar.baz; secure',
@@ -400,30 +428,35 @@
             )
         self.runRoundtripTest("Set-Cookie", table)
 
+
     def testSetCookie2(self):
         table = (
             ('name="value"; Comment="YadaYada"; CommentURL="http://frobnotz/"; Discard; Domain="blah.blah"; Max-Age=10; Path="/foo"; Port="80,8080"; Secure; Version="1"',
-             [Cookie("name", "value", comment="YadaYada", commenturl="http://frobnotz/", discard=True, domain="blah.blah", expires=1000000000, path="/foo", ports=(80,8080), secure=True, version=1)]),
+             [Cookie("name", "value", comment="YadaYada", commenturl="http://frobnotz/", discard=True, domain="blah.blah", expires=1000000000, path="/foo", ports=(80, 8080), secure=True, version=1)]),
             )
         self.runRoundtripTest("Set-Cookie2", table)
 
+
     def testExpect(self):
         table = (
             ("100-continue",
-             {"100-continue":(None,)}),
+             {"100-continue": (None,)}),
             ('foobar=twiddle',
-             {'foobar':('twiddle',)}),
+             {'foobar': ('twiddle',)}),
             ("foo=bar;a=b;c",
-             {'foo':('bar',('a', 'b'), ('c', None))})
+             {'foo': ('bar', ('a', 'b'), ('c', None))})
             )
         self.runRoundtripTest("Expect", table)
 
+
     def testFrom(self):
         self.runRoundtripTest("From", (("webmaster at w3.org", "webmaster at w3.org"),))
 
+
     def testHost(self):
         self.runRoundtripTest("Host", (("www.w3.org", "www.w3.org"),))
 
+
     def testIfMatch(self):
         table = (
             ('"xyzzy"', [http_headers.ETag('xyzzy')]),
@@ -433,6 +466,8 @@
             ('*', ['*']),
             )
         self.runRoundtripTest("If-Match", table)
+
+
     def testIfModifiedSince(self):
         # Don't need major tests since the datetime parser has its own test
         # Just test stupid ; length= brokenness.
@@ -443,6 +478,7 @@
 
         self.runRoundtripTest("If-Modified-Since", table)
 
+
     def testIfNoneMatch(self):
         table = (
             ('"xyzzy"', [http_headers.ETag('xyzzy')]),
@@ -456,6 +492,7 @@
             )
         self.runRoundtripTest("If-None-Match", table)
 
+
     def testIfRange(self):
         table = (
             ('"xyzzy"', http_headers.ETag('xyzzy')),
@@ -465,9 +502,11 @@
             )
         self.runRoundtripTest("If-Range", table)
 
+
     def testIfUnmodifiedSince(self):
         self.runRoundtripTest("If-Unmodified-Since", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
 
+
     def testMaxForwards(self):
         self.runRoundtripTest("Max-Forwards", (("15", 15),))
 
@@ -475,13 +514,14 @@
 #     def testProxyAuthorize(self):
 #         fail
 
+
     def testRange(self):
         table = (
-            ("bytes=0-499", ('bytes', [(0,499),])),
-            ("bytes=500-999", ('bytes', [(500,999),])),
-            ("bytes=-500",('bytes', [(None,500),])),
-            ("bytes=9500-",('bytes', [(9500, None),])),
-            ("bytes=0-0,-1", ('bytes', [(0,0),(None,1)])),
+            ("bytes=0-499", ('bytes', [(0, 499), ])),
+            ("bytes=500-999", ('bytes', [(500, 999), ])),
+            ("bytes=-500", ('bytes', [(None, 500), ])),
+            ("bytes=9500-", ('bytes', [(9500, None), ])),
+            ("bytes=0-0,-1", ('bytes', [(0, 0), (None, 1)])),
             )
         self.runRoundtripTest("Range", table)
 
@@ -493,23 +533,27 @@
 
     def testTE(self):
         table = (
-            ("deflate", {'deflate':1}),
+            ("deflate", {'deflate': 1}),
             ("", {}),
-            ("trailers, deflate;q=0.5", {'trailers':1, 'deflate':0.5}),
+            ("trailers, deflate;q=0.5", {'trailers': 1, 'deflate': 0.5}),
             )
         self.runRoundtripTest("TE", table)
 
+
     def testUserAgent(self):
         self.runRoundtripTest("User-Agent", (("CERN-LineMode/2.15 libwww/2.17b3", "CERN-LineMode/2.15 libwww/2.17b3"),))
 
 
+
 class ResponseHeaderParsingTests(HeaderParsingTestBase):
     def testAcceptRanges(self):
         self.runRoundtripTest("Accept-Ranges", (("bytes", ["bytes"]), ("none", ["none"])))
 
+
     def testAge(self):
         self.runRoundtripTest("Age", (("15", 15),))
 
+
     def testETag(self):
         table = (
             ('"xyzzy"', http_headers.ETag('xyzzy')),
@@ -518,6 +562,7 @@
             )
         self.runRoundtripTest("ETag", table)
 
+
     def testLocation(self):
         self.runRoundtripTest("Location", (("http://www.w3.org/pub/WWW/People.htm",
                                            "http://www.w3.org/pub/WWW/People.htm"),))
@@ -526,17 +571,20 @@
 #     def testProxyAuthenticate(self):
 #         fail
 
+
     def testRetryAfter(self):
         # time() is always 999999990 when being tested.
         table = (
             ("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000, ["10"]),
-            ("120", 999999990+120),
+            ("120", 999999990 + 120),
             )
         self.runRoundtripTest("Retry-After", table)
 
+
     def testServer(self):
         self.runRoundtripTest("Server", (("CERN/3.0 libwww/2.17", "CERN/3.0 libwww/2.17"),))
 
+
     def testVary(self):
         table = (
             ("*", ["*"]),
@@ -544,11 +592,12 @@
             )
         self.runRoundtripTest("Vary", table)
 
+
     def testWWWAuthenticate(self):
         digest = ('Digest realm="digest realm", nonce="bAr", qop="auth"',
-                  [('Digest', {'realm': 'digest realm', 'nonce': 'bAr', 
+                  [('Digest', {'realm': 'digest realm', 'nonce': 'bAr',
                                'qop': 'auth'})],
-                  ['Digest', 'realm="digest realm"', 
+                  ['Digest', 'realm="digest realm"',
                    'nonce="bAr"', 'qop="auth"'])
 
         basic = ('Basic realm="foo"',
@@ -558,29 +607,29 @@
                 [('NTLM', {})], ['NTLM', ''])
 
         negotiate = ('Negotiate SomeGssAPIData',
-                     [('Negotiate', 'SomeGssAPIData')], 
+                     [('Negotiate', 'SomeGssAPIData')],
                      ['Negotiate', 'SomeGssAPIData'])
 
         table = (digest,
                  basic,
-                 (digest[0]+', '+basic[0],
+                 (digest[0] + ', ' + basic[0],
                   digest[1] + basic[1],
                   [digest[2], basic[2]]),
                  ntlm,
                  negotiate,
-                 (ntlm[0]+', '+basic[0],
+                 (ntlm[0] + ', ' + basic[0],
                   ntlm[1] + basic[1],
                   [ntlm[2], basic[2]]),
-                 (digest[0]+', '+negotiate[0],
+                 (digest[0] + ', ' + negotiate[0],
                   digest[1] + negotiate[1],
                   [digest[2], negotiate[2]]),
-                 (negotiate[0]+', '+negotiate[0],
+                 (negotiate[0] + ', ' + negotiate[0],
                   negotiate[1] + negotiate[1],
                   [negotiate[2] + negotiate[2]]),
-                 (ntlm[0]+', '+ntlm[0],
+                 (ntlm[0] + ', ' + ntlm[0],
                   ntlm[1] + ntlm[1],
                   [ntlm[2], ntlm[2]]),
-                 (basic[0]+', '+ntlm[0],
+                 (basic[0] + ', ' + ntlm[0],
                   basic[1] + ntlm[1],
                   [basic[2], ntlm[2]]),
                  )
@@ -593,7 +642,7 @@
         for row in table:
             rawHeaderInput, parsedHeaderData, requiredGeneratedElements = row
 
-            parsed = parseHeader(headername, [rawHeaderInput,])
+            parsed = parseHeader(headername, [rawHeaderInput, ])
             self.assertEquals(parsed, parsedHeaderData)
 
             regeneratedHeaderValue = generateHeader(headername, parsed)
@@ -617,6 +666,7 @@
         self.assertEquals(parsed, reparsed)
 
 
+
 class EntityHeaderParsingTests(HeaderParsingTestBase):
     def testAllow(self):
         # Allow is a silly case-sensitive header unlike all the rest
@@ -626,32 +676,38 @@
             )
         self.runRoundtripTest("Allow", table)
 
+
     def testContentEncoding(self):
         table = (
-            ("gzip", ['gzip',]),
+            ("gzip", ['gzip', ]),
             )
         self.runRoundtripTest("Content-Encoding", table)
 
+
     def testContentLanguage(self):
         table = (
-            ("da", ['da',]),
+            ("da", ['da', ]),
             ("mi, en", ['mi', 'en']),
             )
         self.runRoundtripTest("Content-Language", table)
 
+
     def testContentLength(self):
         self.runRoundtripTest("Content-Length", (("15", 15),))
         self.invalidParseTest("Content-Length", ("asdf",))
 
+
     def testContentLocation(self):
         self.runRoundtripTest("Content-Location",
                               (("http://www.w3.org/pub/WWW/People.htm",
                                 "http://www.w3.org/pub/WWW/People.htm"),))
 
+
     def testContentMD5(self):
         self.runRoundtripTest("Content-MD5", (("Q2hlY2sgSW50ZWdyaXR5IQ==", "Check Integrity!"),))
         self.invalidParseTest("Content-MD5", ("sdlaksjdfhlkaj",))
 
+
     def testContentRange(self):
         table = (
             ("bytes 0-499/1234", ("bytes", 0, 499, 1234)),
@@ -664,13 +720,23 @@
             )
         self.runRoundtripTest("Content-Range", table)
 
+
     def testContentType(self):
         table = (
-            ("text/html;charset=iso-8859-4", http_headers.MimeType('text', 'html', (('charset','iso-8859-4'),))),
+            ("text/html;charset=iso-8859-4", http_headers.MimeType('text', 'html', (('charset', 'iso-8859-4'),))),
             ("text/html", http_headers.MimeType('text', 'html')),
             )
         self.runRoundtripTest("Content-Type", table)
 
+
+    def testContentDisposition(self):
+        table = (
+            ("attachment;filename=foo.txt", http_headers.MimeDisposition('attachment', (('filename', 'foo.txt'),))),
+            ("inline", http_headers.MimeDisposition('inline')),
+            )
+        self.runRoundtripTest("Content-Disposition", table)
+
+
     def testExpires(self):
         self.runRoundtripTest("Expires", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
         # Invalid expires MUST return date in the past.
@@ -682,6 +748,8 @@
         # Don't need major tests since the datetime parser has its own test
         self.runRoundtripTest("Last-Modified", (("Sun, 09 Sep 2001 01:46:40 GMT", 1000000000),))
 
+
+
 class DateTimeTest(unittest.TestCase):
     """Test date parsing functions."""
 
@@ -716,15 +784,18 @@
     def testGenerate(self):
         self.assertEquals(http_headers.generateDateTime(784111777), 'Sun, 06 Nov 1994 08:49:37 GMT')
 
+
     def testRoundtrip(self):
-        for i in range(2000):
+        for _ignore in range(2000):
             time = random.randint(0, 2000000000)
             timestr = http_headers.generateDateTime(time)
             time2 = http_headers.parseDateTime(timestr)
             self.assertEquals(time, time2)
 
 
+
 class TestMimeType(unittest.TestCase):
+
     def testEquality(self):
         """Test that various uses of the constructer are equal
         """
@@ -748,6 +819,28 @@
 
 
 
+class TestMimeDisposition(unittest.TestCase):
+
+    def testEquality(self):
+        """Test that various uses of the constructer are equal
+        """
+
+        kwargMime = http_headers.MimeDisposition('attachment',
+                                          key='value')
+        dictMime = http_headers.MimeDisposition('attachment',
+                                         {'key': 'value'})
+        tupleMime = http_headers.MimeDisposition('attachment',
+                                          (('key', 'value'),))
+
+        stringMime = http_headers.MimeDisposition.fromString('attachment;key=value')
+
+        self.assertEquals(kwargMime, dictMime)
+        self.assertEquals(dictMime, tupleMime)
+        self.assertEquals(kwargMime, tupleMime)
+        self.assertEquals(kwargMime, stringMime)
+
+
+
 class FormattingUtilityTests(unittest.TestCase):
     """
     Tests for various string formatting functionality required to generate

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/caldavxml.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/caldavxml.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/caldavxml.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -1,4 +1,4 @@
-##
+# #
 # Copyright (c) 2005-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-##
+# #
 
 """
 CalDAV XML Support.
@@ -40,9 +40,9 @@
 
 log = Logger()
 
-##
+#
 # CalDAV objects
-##
+#
 
 caldav_namespace = "urn:ietf:params:xml:ns:caldav"
 
@@ -73,6 +73,7 @@
     namespace = caldav_namespace
 
 
+
 class CalDAVEmptyElement (WebDAVEmptyElement):
     """
     CalDAV element with no contents.
@@ -80,6 +81,7 @@
     namespace = caldav_namespace
 
 
+
 class CalDAVTextElement (WebDAVTextElement):
     """
     CalDAV element containing PCDATA.
@@ -87,6 +89,7 @@
     namespace = caldav_namespace
 
 
+
 class CalDAVTimeRangeElement (CalDAVEmptyElement):
     """
     CalDAV element containing a time range.
@@ -102,17 +105,18 @@
         # One of start or end must be present
         if "start" not in attributes and "end" not in attributes:
             raise ValueError("One of 'start' or 'end' must be present in CALDAV:time-range")
-        
+
         self.start = PyCalendarDateTime.parseText(attributes["start"]) if "start" in attributes else None
         self.end = PyCalendarDateTime.parseText(attributes["end"]) if "end" in attributes else None
 
+
     def valid(self, level=0):
         """
         Indicate whether the time-range is valid (must be date-time in UTC).
-        
+
         @return:      True if valid, False otherwise
         """
-        
+
         if self.start is not None and self.start.isDateOnly():
             log.msg("start attribute in <time-range> is not a date-time: %s" % (self.start,))
             return False
@@ -130,6 +134,7 @@
         return True
 
 
+
 class CalDAVTimeZoneElement (CalDAVTextElement):
     """
     CalDAV element containing iCalendar data with a single VTIMEZONE component.
@@ -141,6 +146,7 @@
         """
         return iComponent.fromString(str(self))
 
+
     def gettimezone(self):
         """
         Get the timezone to use. If none, return UTC timezone.
@@ -156,13 +162,14 @@
         # Default to using utc tzinfo
         return PyCalendarTimezone(utc=True)
 
+
     def valid(self):
         """
         Determine whether the content of this element is a valid single VTIMEZONE component.
-        
+
         @return: True if valid, False if not.
         """
-        
+
         try:
             calendar = self.calendar()
             if calendar is None:
@@ -182,8 +189,9 @@
                 return False
 
         return found
-        
 
+
+
 @registerElement
 class CalendarHomeSet (CalDAVElement):
     """
@@ -193,9 +201,10 @@
     name = "calendar-home-set"
     hidden = True
 
-    allowed_children = { (dav_namespace, "href"): (0, None) }
+    allowed_children = {(dav_namespace, "href"): (0, None)}
 
 
+
 @registerElement
 class CalendarDescription (CalDAVTextElement):
     """
@@ -208,6 +217,7 @@
     # May be protected; but we'll let the client set this if they like.
 
 
+
 @registerElement
 class CalendarTimeZone (CalDAVTimeZoneElement):
     """
@@ -218,6 +228,7 @@
     hidden = True
 
 
+
 @registerElement
 class SupportedCalendarComponentSets (CalDAVElement):
     """
@@ -229,9 +240,10 @@
     hidden = True
     protected = True
 
-    allowed_children = { (caldav_namespace, "supported-calendar-component-set"): (0, None) }
+    allowed_children = {(caldav_namespace, "supported-calendar-component-set"): (0, None)}
 
 
+
 @registerElement
 class SupportedCalendarComponentSet (CalDAVElement):
     """
@@ -242,9 +254,10 @@
     hidden = True
     protected = True
 
-    allowed_children = { (caldav_namespace, "comp"): (0, None) }
+    allowed_children = {(caldav_namespace, "comp"): (0, None)}
 
 
+
 @registerElement
 class SupportedCalendarData (CalDAVElement):
     """
@@ -255,9 +268,10 @@
     hidden = True
     protected = True
 
-    allowed_children = { (caldav_namespace, "calendar-data"): (0, None) }
+    allowed_children = {(caldav_namespace, "calendar-data"): (0, None)}
 
 
+
 @registerElement
 class MaxResourceSize (CalDAVTextElement):
     """
@@ -269,6 +283,7 @@
     protected = True
 
 
+
 @registerElement
 class MinDateTime (CalDAVTextElement):
     """
@@ -280,6 +295,7 @@
     protected = True
 
 
+
 @registerElement
 class MaxDateTime (CalDAVTextElement):
     """
@@ -291,6 +307,7 @@
     protected = True
 
 
+
 @registerElement
 class MaxInstances (CalDAVTextElement):
     """
@@ -302,6 +319,7 @@
     protected = True
 
 
+
 @registerElement
 class MaxAttendeesPerInstance (CalDAVTextElement):
     """
@@ -313,6 +331,7 @@
     protected = True
 
 
+
 @registerElement
 class Calendar (CalDAVEmptyElement):
     """
@@ -322,6 +341,7 @@
     name = "calendar"
 
 
+
 @registerElement
 class MakeCalendar (CalDAVElement):
     """
@@ -330,11 +350,12 @@
     """
     name = "mkcalendar"
 
-    allowed_children = { (dav_namespace, "set"): (0, 1) }
+    allowed_children = {(dav_namespace, "set"): (0, 1)}
 
-    child_types = { "WebDAVUnknownElement": (0, None) }
+    child_types = {"WebDAVUnknownElement": (0, None)}
 
 
+
 @registerElement
 class MakeCalendarResponse (CalDAVElement):
     """
@@ -343,9 +364,10 @@
     """
     name = "mkcalendar-response"
 
-    allowed_children = { WebDAVElement: (0, None) }
+    allowed_children = {WebDAVElement: (0, None)}
 
 
+
 @registerElement
 class CalendarQuery (CalDAVElement):
     """
@@ -355,13 +377,14 @@
     name = "calendar-query"
 
     allowed_children = {
-        (dav_namespace,    "allprop" ): (0, None),
-        (dav_namespace,    "propname"): (0, None),
-        (dav_namespace,    "prop"    ): (0, None),
+        (dav_namespace, "allprop"): (0, None),
+        (dav_namespace, "propname"): (0, None),
+        (dav_namespace, "prop"): (0, None),
         (caldav_namespace, "timezone"): (0, 1),
-        (caldav_namespace, "filter"  ): (0, 1), # Actually (1, 1) unless element is empty
+        (caldav_namespace, "filter"): (0, 1), # Actually (1, 1) unless element is empty
     }
 
+
     def __init__(self, *children, **attributes):
         super(CalendarQuery, self).__init__(*children, **attributes)
 
@@ -373,9 +396,9 @@
             qname = child.qname()
 
             if qname in (
-                (dav_namespace, "allprop" ),
+                (dav_namespace, "allprop"),
                 (dav_namespace, "propname"),
-                (dav_namespace, "prop"    ),
+                (dav_namespace, "prop"),
             ):
                 if props is not None:
                     raise ValueError("Only one of CalDAV:allprop, CalDAV:propname, CalDAV:prop allowed")
@@ -384,7 +407,7 @@
             elif qname == (caldav_namespace, "filter"):
                 filter = child
 
-            elif qname ==(caldav_namespace, "timezone"):
+            elif qname == (caldav_namespace, "timezone"):
                 timezone = child
 
             else:
@@ -394,11 +417,12 @@
             if filter is None:
                 raise ValueError("CALDAV:filter required")
 
-        self.props  = props
+        self.props = props
         self.filter = filter
         self.timezone = timezone
 
 
+
 @registerElement
 class CalendarData (CalDAVElement):
     """
@@ -409,10 +433,10 @@
     name = "calendar-data"
 
     allowed_children = {
-        (caldav_namespace, "comp"                 ): (0, None),
-        (caldav_namespace, "expand"               ): (0, 1),
-        (caldav_namespace, "limit-recurrence-set" ): (0, 1),
-        (caldav_namespace, "limit-freebusy-set"   ): (0, 1),
+        (caldav_namespace, "comp"): (0, None),
+        (caldav_namespace, "expand"): (0, 1),
+        (caldav_namespace, "limit-recurrence-set"): (0, 1),
+        (caldav_namespace, "limit-freebusy-set"): (0, 1),
         PCDATAElement: (0, None),
     }
     allowed_attributes = {
@@ -434,13 +458,14 @@
 
     fromTextData = fromCalendar
 
+
     def __init__(self, *children, **attributes):
         super(CalendarData, self).__init__(*children, **attributes)
 
-        component      = None
+        component = None
         recurrence_set = None
-        freebusy_set   = None
-        data           = None
+        freebusy_set = None
+        data = None
 
         for child in self.children:
             qname = child.qname()
@@ -450,7 +475,7 @@
 
             elif qname in (
                 (caldav_namespace, "expand"),
-                (caldav_namespace, "limit-recurrence-set" ),
+                (caldav_namespace, "limit-recurrence-set"),
             ):
                 if recurrence_set is not None:
                     raise ValueError("Only one of CalDAV:expand, CalDAV:limit-recurrence-set allowed")
@@ -465,20 +490,22 @@
                 else:
                     data += child
 
-            else: raise AssertionError("We shouldn't be here")
+            else:
+                raise AssertionError("We shouldn't be here")
 
-        self.component      = component
+        self.component = component
         self.recurrence_set = recurrence_set
-        self.freebusy_set   = freebusy_set
+        self.freebusy_set = freebusy_set
 
         if data is not None:
             try:
                 if component is not None:
-                    raise ValueError("Only one of CalDAV:comp (%r) or PCDATA (%r) allowed"% (component, str(data)))
+                    raise ValueError("Only one of CalDAV:comp (%r) or PCDATA (%r) allowed" % (component, str(data)))
                 if recurrence_set is not None:
-                    raise ValueError("%s not allowed with PCDATA (%r)"% (recurrence_set, str(data)))
+                    raise ValueError("%s not allowed with PCDATA (%r)" % (recurrence_set, str(data)))
             except ValueError:
-                if not data.isWhitespace(): raise
+                if not data.isWhitespace():
+                    raise
             else:
                 # Since we've already combined PCDATA elements, we'd may as well
                 # optimize them originals away
@@ -494,19 +521,21 @@
         else:
             self.version = "2.0"
 
+
     def verifyTypeVersion(self, types_and_versions):
         """
         Make sure any content-type and version matches at least one of the supplied set.
-        
+
         @param types_and_versions: a list of (content-type, version) tuples to test against.
         @return:                   True if there is at least one match, False otherwise.
         """
         for item in types_and_versions:
             if (item[0] == self.content_type) and (item[1] == self.version):
                 return True
-        
+
         return False
 
+
     def calendar(self):
         """
         Returns a calendar component derived from this element.
@@ -519,6 +548,7 @@
 
     generateComponent = calendar
 
+
     def calendarData(self):
         """
         Returns the calendar data derived from this element.
@@ -535,6 +565,7 @@
     textData = calendarData
 
 
+
 @registerElement
 class CalendarComponent (CalDAVElement):
     """
@@ -545,12 +576,13 @@
 
     allowed_children = {
         (caldav_namespace, "allcomp"): (0, 1),
-        (caldav_namespace, "comp"   ): (0, None),
+        (caldav_namespace, "comp"): (0, None),
         (caldav_namespace, "allprop"): (0, 1),
-        (caldav_namespace, "prop"   ): (0, None),
+        (caldav_namespace, "prop"): (0, None),
     }
-    allowed_attributes = { "name": True }
+    allowed_attributes = {"name": True}
 
+
     def __init__(self, *children, **attributes):
         super(CalendarComponent, self).__init__(*children, **attributes)
 
@@ -597,6 +629,7 @@
         self.properties = properties
         self.type = self.attributes["name"]
 
+
     def getFromICalendar(self, component):
         """
         Returns a calendar component object containing the data in the given
@@ -639,6 +672,7 @@
         return result
 
 
+
 @registerElement
 class AllComponents (CalDAVEmptyElement):
     """
@@ -648,6 +682,7 @@
     name = "allcomp"
 
 
+
 @registerElement
 class AllProperties (CalDAVEmptyElement):
     """
@@ -657,6 +692,7 @@
     name = "allprop"
 
 
+
 @registerElement
 class Property (CalDAVEmptyElement):
     """
@@ -670,6 +706,7 @@
         "novalue": False,
     }
 
+
     def __init__(self, *children, **attributes):
         super(Property, self).__init__(*children, **attributes)
 
@@ -687,6 +724,7 @@
             self.novalue = False
 
 
+
 @registerElement
 class Expand (CalDAVTimeRangeElement):
     """
@@ -697,6 +735,7 @@
     name = "expand"
 
 
+
 @registerElement
 class LimitRecurrenceSet (CalDAVTimeRangeElement):
     """
@@ -707,6 +746,7 @@
     name = "limit-recurrence-set"
 
 
+
 @registerElement
 class LimitFreeBusySet (CalDAVTimeRangeElement):
     """
@@ -717,6 +757,7 @@
     name = "limit-freebusy-set"
 
 
+
 @registerElement
 class Filter (CalDAVElement):
     """
@@ -725,9 +766,10 @@
     """
     name = "filter"
 
-    allowed_children = { (caldav_namespace, "comp-filter"): (1, 1) }
+    allowed_children = {(caldav_namespace, "comp-filter"): (1, 1)}
 
 
+
 @registerElement
 class ComponentFilter (CalDAVElement):
     """
@@ -737,10 +779,10 @@
     name = "comp-filter"
 
     allowed_children = {
-        (caldav_namespace, "is-not-defined" ): (0, 1),
-        (caldav_namespace, "time-range"     ): (0, 1),
-        (caldav_namespace, "comp-filter"    ): (0, None),
-        (caldav_namespace, "prop-filter"    ): (0, None),
+        (caldav_namespace, "is-not-defined"): (0, 1),
+        (caldav_namespace, "time-range"): (0, 1),
+        (caldav_namespace, "comp-filter"): (0, None),
+        (caldav_namespace, "prop-filter"): (0, None),
     }
     allowed_attributes = {
         "name": True,
@@ -748,6 +790,7 @@
     }
 
 
+
 @registerElement
 class PropertyFilter (CalDAVElement):
     """
@@ -757,10 +800,10 @@
     name = "prop-filter"
 
     allowed_children = {
-        (caldav_namespace, "is-not-defined" ): (0, 1),
-        (caldav_namespace, "time-range"     ): (0, 1),
-        (caldav_namespace, "text-match"     ): (0, 1),
-        (caldav_namespace, "param-filter"   ): (0, None),
+        (caldav_namespace, "is-not-defined"): (0, 1),
+        (caldav_namespace, "time-range"): (0, 1),
+        (caldav_namespace, "text-match"): (0, 1),
+        (caldav_namespace, "param-filter"): (0, None),
     }
     allowed_attributes = {
         "name": True,
@@ -768,6 +811,7 @@
     }
 
 
+
 @registerElement
 class ParameterFilter (CalDAVElement):
     """
@@ -777,12 +821,13 @@
     name = "param-filter"
 
     allowed_children = {
-        (caldav_namespace, "is-not-defined" ): (0, 1),
-        (caldav_namespace, "text-match"     ): (0, 1),
+        (caldav_namespace, "is-not-defined"): (0, 1),
+        (caldav_namespace, "text-match"): (0, 1),
     }
-    allowed_attributes = { "name": True }
+    allowed_attributes = {"name": True}
 
 
+
 @registerElement
 class IsNotDefined (CalDAVEmptyElement):
     """
@@ -792,6 +837,7 @@
     name = "is-not-defined"
 
 
+
 @registerElement
 class TextMatch (CalDAVTextElement):
     """
@@ -800,7 +846,8 @@
     """
     name = "text-match"
 
-    def fromString(clazz, string, caseless=False): #@NoSelf
+
+    def fromString(clazz, string, caseless=False): # @NoSelf
         if caseless:
             caseless = "yes"
         else:
@@ -822,6 +869,7 @@
     }
 
 
+
 @registerElement
 class TimeZone (CalDAVTimeZoneElement):
     """
@@ -831,6 +879,7 @@
     name = "timezone"
 
 
+
 @registerElement
 class TimeRange (CalDAVTimeRangeElement):
     """
@@ -840,6 +889,7 @@
     name = "time-range"
 
 
+
 @registerElement
 class CalendarMultiGet (CalDAVElement):
     """
@@ -852,12 +902,13 @@
     # To allow for an empty element in a supported-report-set property we need
     # to relax the child restrictions
     allowed_children = {
-        (dav_namespace, "allprop" ): (0, 1),
+        (dav_namespace, "allprop"): (0, 1),
         (dav_namespace, "propname"): (0, 1),
-        (dav_namespace, "prop"    ): (0, 1),
-        (dav_namespace, "href"    ): (0, None),    # Actually ought to be (1, None)
+        (dav_namespace, "prop"): (0, 1),
+        (dav_namespace, "href"): (0, None), # Actually ought to be (1, None)
     }
 
+
     def __init__(self, *children, **attributes):
         super(CalendarMultiGet, self).__init__(*children, **attributes)
 
@@ -868,9 +919,9 @@
             qname = child.qname()
 
             if qname in (
-                (dav_namespace, "allprop" ),
+                (dav_namespace, "allprop"),
                 (dav_namespace, "propname"),
-                (dav_namespace, "prop"    ),
+                (dav_namespace, "prop"),
             ):
                 if property is not None:
                     raise ValueError("Only one of DAV:allprop, DAV:propname, DAV:prop allowed")
@@ -879,10 +930,11 @@
             elif qname == (dav_namespace, "href"):
                 resources.append(child)
 
-        self.property  = property
+        self.property = property
         self.resources = resources
 
 
+
 @registerElement
 class FreeBusyQuery (CalDAVElement):
     """
@@ -894,8 +946,9 @@
 
     # To allow for an empty element in a supported-report-set property we need
     # to relax the child restrictions
-    allowed_children = { (caldav_namespace, "time-range" ): (0, 1) } # Actually ought to be (1, 1)
+    allowed_children = {(caldav_namespace, "time-range"): (0, 1)} # Actually ought to be (1, 1)
 
+
     def __init__(self, *children, **attributes):
         super(FreeBusyQuery, self).__init__(*children, **attributes)
 
@@ -909,11 +962,12 @@
                     raise ValueError("Only one time-range element allowed in free-busy-query: %r" % (self,))
                 timerange = child
             else:
-                raise ValueError("Unknown element %r in free-busy-query: %r" % (child,self))
+                raise ValueError("Unknown element %r in free-busy-query: %r" % (child, self))
 
-        self.timerange  = timerange
+        self.timerange = timerange
 
 
+
 @registerElement
 class ReadFreeBusy(CalDAVEmptyElement):
     """
@@ -921,8 +975,9 @@
     (CalDAV-access, RFC 4791 section 6.1.1)
     """
     name = "read-free-busy"
-    
 
+
+
 @registerElement
 class NoUIDConflict(CalDAVElement):
     """
@@ -931,9 +986,10 @@
     """
     name = "no-uid-conflict"
 
-    allowed_children = { (dav_namespace, "href"): (1, 1) }
-    
+    allowed_children = {(dav_namespace, "href"): (1, 1)}
 
+
+
 @registerElement
 class SupportedFilter(CalDAVElement):
     """
@@ -944,15 +1000,17 @@
     name = "supported-filter"
 
     allowed_children = {
-        (caldav_namespace, "comp-filter" ): (0, None),
-        (caldav_namespace, "prop-filter" ): (0, None),
+        (caldav_namespace, "comp-filter"): (0, None),
+        (caldav_namespace, "prop-filter"): (0, None),
         (caldav_namespace, "param-filter"): (0, None)
     }
-    
-##
+
+#
 # CalDAV Schedule objects
-##
+#
 
+
+
 @registerElement
 class CalendarUserAddressSet (CalDAVElement):
     """
@@ -962,9 +1020,10 @@
     name = "calendar-user-address-set"
     hidden = True
 
-    allowed_children = { (dav_namespace, "href"): (0, None) }
+    allowed_children = {(dav_namespace, "href"): (0, None)}
 
 
+
 @registerElement
 class CalendarFreeBusySet (CalDAVElement):
     """
@@ -975,9 +1034,10 @@
     name = "calendar-free-busy-set"
     hidden = True
 
-    allowed_children = { (dav_namespace, "href"): (0, None) }
+    allowed_children = {(dav_namespace, "href"): (0, None)}
 
 
+
 @registerElement
 class ScheduleCalendarTransp (CalDAVElement):
     """
@@ -986,11 +1046,12 @@
     name = "schedule-calendar-transp"
 
     allowed_children = {
-        (caldav_namespace,     "opaque"      ): (0, 1),
-        (caldav_namespace,     "transparent" ): (0, 1),
+        (caldav_namespace, "opaque"): (0, 1),
+        (caldav_namespace, "transparent"): (0, 1),
     }
 
 
+
 @registerElement
 class Opaque (CalDAVEmptyElement):
     """
@@ -999,6 +1060,7 @@
     name = "opaque"
 
 
+
 @registerElement
 class Transparent (CalDAVEmptyElement):
     """
@@ -1007,6 +1069,7 @@
     name = "transparent"
 
 
+
 @registerElement
 class ScheduleDefaultCalendarURL (CalDAVElement):
     """
@@ -1014,9 +1077,10 @@
     """
     name = "schedule-default-calendar-URL"
 
-    allowed_children = { (dav_namespace, "href"): (0, 1) }
+    allowed_children = {(dav_namespace, "href"): (0, 1)}
 
 
+
 @registerElement
 class ScheduleInboxURL (CalDAVElement):
     """
@@ -1027,9 +1091,10 @@
     hidden = True
     protected = True
 
-    allowed_children = { (dav_namespace, "href"): (0, 1) }
+    allowed_children = {(dav_namespace, "href"): (0, 1)}
 
 
+
 @registerElement
 class ScheduleOutboxURL (CalDAVElement):
     """
@@ -1040,9 +1105,10 @@
     hidden = True
     protected = True
 
-    allowed_children = { (dav_namespace, "href"): (0, 1) }
+    allowed_children = {(dav_namespace, "href"): (0, 1)}
 
 
+
 @registerElement
 class Originator (CalDAVElement):
     """
@@ -1054,16 +1120,17 @@
     hidden = True
     protected = True
 
-    allowed_children = { (dav_namespace, "href"): (0, 1) } # NB Minimum is zero because this is a property name
+    allowed_children = {(dav_namespace, "href"): (0, 1)} # NB Minimum is zero because this is a property name
 
 
+
 @registerElement
 class Recipient (CalDAVElement):
     """
     A property on resources in schedule Inbox indicating the Recipients targeted
     by the SCHEDULE operation.
     (CalDAV-schedule, section x.x.x)
-    
+
     The recipient for whom this response is for.
     (CalDAV-schedule, section x.x.x)
     """
@@ -1071,9 +1138,10 @@
     hidden = True
     protected = True
 
-    allowed_children = { (dav_namespace, "href"): (0, None) } # NB Minimum is zero because this is a property name
+    allowed_children = {(dav_namespace, "href"): (0, None)} # NB Minimum is zero because this is a property name
 
 
+
 @registerElement
 class ScheduleTag (CalDAVTextElement):
     """
@@ -1085,6 +1153,7 @@
     protected = True
 
 
+
 @registerElement
 class ScheduleInbox (CalDAVEmptyElement):
     """
@@ -1094,6 +1163,7 @@
     name = "schedule-inbox"
 
 
+
 @registerElement
 class ScheduleOutbox (CalDAVEmptyElement):
     """
@@ -1103,6 +1173,7 @@
     name = "schedule-outbox"
 
 
+
 @registerElement
 class ScheduleResponse (CalDAVElement):
     """
@@ -1111,9 +1182,10 @@
     """
     name = "schedule-response"
 
-    allowed_children = { (caldav_namespace, "response"): (0, None) }
+    allowed_children = {(caldav_namespace, "response"): (0, None)}
 
 
+
 @registerElement
 class Response (CalDAVElement):
     """
@@ -1123,14 +1195,15 @@
     name = "response"
 
     allowed_children = {
-        (caldav_namespace, "recipient"          ): (1, 1),
-        (caldav_namespace, "request-status"     ): (1, 1),
-        (caldav_namespace, "calendar-data"      ): (0, 1),
-        (dav_namespace,    "error"              ): (0, 1),  # 2518bis
-        (dav_namespace,    "responsedescription"): (0, 1)
+        (caldav_namespace, "recipient"): (1, 1),
+        (caldav_namespace, "request-status"): (1, 1),
+        (caldav_namespace, "calendar-data"): (0, 1),
+        (dav_namespace, "error"): (0, 1), # 2518bis
+        (dav_namespace, "responsedescription"): (0, 1)
     }
 
 
+
 @registerElement
 class RequestStatus (CalDAVTextElement):
     """
@@ -1140,6 +1213,7 @@
     name = "request-status"
 
 
+
 @registerElement
 class Schedule (CalDAVEmptyElement):
     """
@@ -1147,8 +1221,9 @@
     (CalDAV-schedule, section x.x.x)
     """
     name = "schedule"
-    
 
+
+
 @registerElement
 class ScheduleDeliver (CalDAVEmptyElement):
     """
@@ -1156,8 +1231,9 @@
     (CalDAV-schedule, section x.x.x)
     """
     name = "schedule-deliver"
-    
 
+
+
 @registerElement
 class ScheduleSend (CalDAVEmptyElement):
     """
@@ -1165,8 +1241,9 @@
     (CalDAV-schedule, section x.x.x)
     """
     name = "schedule-send"
-    
 
+
+
 @registerElement
 class CalendarUserType (CalDAVTextElement):
     """
@@ -1176,9 +1253,9 @@
     protected = True
 
 
-##
+#
 # draft-daboo-valarm-extensions
-##
+#
 
 caldav_default_alarms_compliance = (
     "calendar-default-alarms",
@@ -1192,6 +1269,7 @@
 
     calendartxt = None
 
+
     def calendar(self):
         """
         Returns a calendar component derived from this element, which contains
@@ -1200,13 +1278,14 @@
         valarm = str(self)
         return iComponent.fromString(self.calendartxt % str(self)) if valarm else None
 
+
     def valid(self):
         """
         Determine whether the content of this element is a valid single VALARM component or empty.
-        
+
         @return: True if valid, False if not.
         """
-        
+
         if str(self):
             try:
                 calendar = self.calendar()
@@ -1214,7 +1293,7 @@
                     return False
             except ValueError:
                 return False
-        
+
             # Make sure there is one alarm component
             try:
                 valarm = tuple(tuple(calendar.subcomponents())[0].subcomponents())[0]
@@ -1222,10 +1301,11 @@
                 return False
             if valarm.name().upper() != "VALARM":
                 return False
-        
+
         return True
 
 
+
 @registerElement
 class DefaultAlarmVEventDateTime (DefaultAlarmBase):
     name = "default-alarm-vevent-datetime"
@@ -1242,8 +1322,9 @@
 %sEND:VEVENT
 END:VCALENDAR
 """
-    
 
+
+
 @registerElement
 class DefaultAlarmVEventDate (DefaultAlarmBase):
     name = "default-alarm-vevent-date"
@@ -1260,8 +1341,9 @@
 %sEND:VEVENT
 END:VCALENDAR
 """
-    
 
+
+
 @registerElement
 class DefaultAlarmVToDoDateTime (DefaultAlarmBase):
     name = "default-alarm-vtodo-datetime"
@@ -1279,6 +1361,7 @@
 """
 
 
+
 @registerElement
 class DefaultAlarmVToDoDate (DefaultAlarmBase):
     name = "default-alarm-vtodo-date"
@@ -1296,13 +1379,37 @@
 """
 
 
-##
+
+#
+# draft-daboo-caldav-attachments
+#
+
+caldav_managed_attachments_compliance = (
+    "calendar-managed-attachments",
+)
+
+
+
+ at registerElement
+class ManagedAttachmentsServerURL (CalDAVElement):
+    """
+    Zero or one href elements defining the base scheme/host for attachments.
+    """
+    name = "managed-attachments-server-URL"
+
+    allowed_children = {(dav_namespace, "href"): (0, 1)}
+
+
+
+#
 # Extensions to ResourceType
-##
+#
 
-def _isCalendar(self): return bool(self.childrenOfType(Calendar))
+def _isCalendar(self):
+    return bool(self.childrenOfType(Calendar))
+
 ResourceType.isCalendar = _isCalendar
 
-ResourceType.calendar       = ResourceType(Collection(), Calendar())
-ResourceType.scheduleInbox  = ResourceType(Collection(), ScheduleInbox())
+ResourceType.calendar = ResourceType(Collection(), Calendar())
+ResourceType.scheduleInbox = ResourceType(Collection(), ScheduleInbox())
 ResourceType.scheduleOutbox = ResourceType(Collection(), ScheduleOutbox())

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/ical.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/ical.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/ical.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -172,7 +172,8 @@
             self._pycalendar = pyobj
         else:
             # Convert params dictionary to list of lists format used by pycalendar
-            self._pycalendar = PyCalendarProperty(name, value)
+            valuetype = kwargs.get("valuetype")
+            self._pycalendar = PyCalendarProperty(name, value, valuetype=valuetype)
             for attrname, attrvalue in params.items():
                 self._pycalendar.addAttribute(PyCalendarAttribute(attrname, attrvalue))
 

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/method/post.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/method/post.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/method/post.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -1,4 +1,4 @@
-##
+# #
 # Copyright (c) 2005-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +12,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-##
+# #
 
 from hashlib import md5
 
@@ -44,7 +44,7 @@
 def http_POST(self, request):
 
     # POST can support many different APIs
-    
+
     # First look at query params
     if request.params:
         if request.params == "add-member":
@@ -52,22 +52,33 @@
                 result = (yield POST_handler_add_member(self, request))
                 returnValue(result)
 
-    else:
-        # Content-type handlers
-        contentType = request.headers.getHeader("content-type")
-        if contentType:
-            if hasattr(self, "POST_handler_content_type"):
-                result = (yield self.POST_handler_content_type(request, (contentType.mediaType, contentType.mediaSubtype)))
+    # Look for query arguments
+    if request.args:
+        action = request.args.get("action", ("",))
+        if len(action) == 1:
+            action = action[0]
+            if action in ("attachment-add", "attachment-update", "attachment-remove") and \
+                hasattr(self, "POST_handler_attachment"):
+                result = (yield self.POST_handler_attachment(request, action))
                 returnValue(result)
 
+    # Content-type handlers
+    contentType = request.headers.getHeader("content-type")
+    if contentType:
+        if hasattr(self, "POST_handler_content_type"):
+            result = (yield self.POST_handler_content_type(request, (contentType.mediaType, contentType.mediaSubtype)))
+            returnValue(result)
+
     returnValue(responsecode.FORBIDDEN)
 
+
+
 @inlineCallbacks
 def POST_handler_add_member(self, request):
 
     # Handle ;add-member
     if self.isCalendarCollection():
-        
+
         parentURL = request.path
         parent = self
 
@@ -80,7 +91,7 @@
                 (caldav_namespace, "supported-calendar-data"),
                 "Wrong MIME type for calendar collection",
             ))
-            
+
         # Read the calendar component from the stream
         try:
             calendardata = (yield allDataFromStream(request.stream))
@@ -98,19 +109,19 @@
                 ))
 
             # Create a new name if one was not provided
-            name =  md5(str(calendardata) + str(time.time()) + request.path).hexdigest() + ".ics"
-        
+            name = md5(str(calendardata) + str(time.time()) + request.path).hexdigest() + ".ics"
+
             # Get a resource for the new item
             newchildURL = joinURL(parentURL, name)
             newchild = (yield request.locateResource(newchildURL))
 
             storer = StoreCalendarObjectResource(
-                request = request,
-                destination = newchild,
-                destination_uri = newchildURL,
-                destinationcal = True,
-                destinationparent = parent,
-                calendar = calendardata,
+                request=request,
+                destination=newchild,
+                destination_uri=newchildURL,
+                destinationcal=True,
+                destinationparent=parent,
+                calendar=calendardata,
             )
             result = (yield storer.run())
 
@@ -134,7 +145,7 @@
             raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, str(e)))
 
     elif self.isAddressBookCollection():
-        
+
         parentURL = request.path
         parent = self
 
@@ -147,7 +158,7 @@
                 (carddav_namespace, "supported-address-data"),
                 "Wrong MIME type for address book collection",
             ))
-            
+
         # Read the calendar component from the stream
         try:
             vcarddata = (yield allDataFromStream(request.stream))
@@ -165,20 +176,20 @@
                 ))
 
             # Create a new name if one was not provided
-            name =  md5(str(vcarddata) + str(time.time()) + request.path).hexdigest() + ".vcf"
-        
+            name = md5(str(vcarddata) + str(time.time()) + request.path).hexdigest() + ".vcf"
+
             # Get a resource for the new item
             newchildURL = joinURL(parentURL, name)
             newchild = (yield request.locateResource(newchildURL))
 
             storer = StoreAddressObjectResource(
-                request = request,
-                sourceadbk = False,
-                destination = newchild,
-                destination_uri = newchildURL,
-                destinationadbk = True,
-                destinationparent = parent,
-                vcard = vcarddata,
+                request=request,
+                sourceadbk=False,
+                destination=newchild,
+                destination_uri=newchildURL,
+                destinationadbk=True,
+                destinationparent=parent,
+                vcard=vcarddata,
             )
             result = (yield storer.run())
 
@@ -203,4 +214,3 @@
 
     # Default behavior
     returnValue(responsecode.FORBIDDEN)
-

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/resource.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/resource.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/resource.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -2551,6 +2551,12 @@
             (customxml.calendarserver_namespace, "xmpp-heartbeat-uri"),
             (customxml.calendarserver_namespace, "xmpp-server"),
         )
+
+        if config.EnableManagedAttachments:
+            existing += (
+                caldavxml.ManagedAttachmentsServerURL.qname(),
+            )
+
         return existing
 
 
@@ -2579,6 +2585,14 @@
                 prop = caldavxml.SupportedCalendarComponentSets()
             returnValue(prop)
 
+        elif qname == caldavxml.ManagedAttachmentsServerURL.qname():
+            if config.EnableManagedAttachments:
+                # The HRef is empty - this will force the client to treat all managed attachment URLs
+                # as relative to this server scheme/host.
+                returnValue(caldavxml.ManagedAttachmentsServerURL(element.HRef.fromString("")))
+            else:
+                returnValue(None)
+
         result = (yield super(CalendarHomeResource, self).readProperty(property, request))
         returnValue(result)
 
@@ -2596,6 +2610,10 @@
             from twistedcaldav.storebridge import DropboxCollection
             self._provisionedChildren["dropbox"] = DropboxCollection
 
+        if config.EnableManagedAttachments:
+            from twistedcaldav.storebridge import AttachmentsCollection
+            self._provisionedChildren["attachments"] = AttachmentsCollection
+
         if config.FreeBusyURL.Enabled:
             from twistedcaldav.freebusyurl import FreeBusyURLResource
             self._provisionedChildren["freebusy"] = FreeBusyURLResource

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/stdconfig.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/stdconfig.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/stdconfig.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -283,6 +283,8 @@
                            # configuration key.  Will support more values in
                            # the future.
 
+    "SpawnedDBUser" : "caldav", # The username to use when DBType is empty
+
     "DSN"          : "", # Data Source Name.  Used to connect to an external
                            # database if DBType is non-empty.  Format varies
                            # depending on database type.
@@ -516,6 +518,8 @@
     "EnableWellKnown"             : True, # /.well-known resource
     "EnableCalendarQueryExtended" : True, # Extended calendar-query REPORT
 
+    "EnableManagedAttachments"    : True, # Support Managed Attachments
+
     #
     # Non-standard CalDAV extensions
     #
@@ -1516,6 +1520,8 @@
             compliance += caldavxml.caldav_query_extended_compliance
         if configDict.EnableDefaultAlarms:
             compliance += caldavxml.caldav_default_alarms_compliance
+        if configDict.EnableManagedAttachments:
+            compliance += caldavxml.caldav_managed_attachments_compliance
     else:
         compliance = ()
 

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/storebridge.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/storebridge.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/twistedcaldav/storebridge.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -30,7 +30,7 @@
 from txdav.xml import element as davxml
 from txdav.xml.base import dav_namespace, WebDAVUnknownElement, encodeXMLName
 from txdav.base.propertystore.base import PropertyName
-from txdav.caldav.icalendarstore import QuotaExceeded
+from txdav.caldav.icalendarstore import QuotaExceeded, AttachmentStoreFailed
 from txdav.common.icommondatastore import NoSuchObjectResourceError
 from txdav.common.datastore.sql_tables import _BIND_MODE_READ, _BIND_MODE_WRITE
 from txdav.idav import PropertyChangeNotAllowedError
@@ -38,7 +38,7 @@
 from twext.web2 import responsecode
 from twext.web2.stream import ProducerStream, readStream, MemoryStream
 from twext.web2.http import HTTPError, StatusResponse, Response
-from twext.web2.http_headers import ETag, MimeType
+from twext.web2.http_headers import ETag, MimeType, MimeDisposition
 from twext.web2.dav.http import ErrorResponse, ResponseQueue, MultiStatusResponse
 from twext.web2.dav.noneprops import NonePropertyStore
 from twext.web2.dav.resource import TwistedACLInheritable, AccessDeniedError
@@ -67,6 +67,8 @@
 from twistedcaldav.scheduling.caldav.resource import ScheduleInboxResource
 from twistedcaldav.scheduling.implicit import ImplicitScheduler
 from twistedcaldav.vcard import Component as VCard, InvalidVCardDataError
+from pycalendar.datetime import PyCalendarDateTime
+import uuid
 
 """
 Wrappers to translate between the APIs in L{txdav.caldav.icalendarstore} and
@@ -108,13 +110,13 @@
             ))
 
 
-    def set(self, property):
+    def set(self, prop):
         try:
-            self._newPropertyStore[self._convertKey(property.qname())] = property
+            self._newPropertyStore[self._convertKey(prop.qname())] = prop
         except PropertyChangeNotAllowedError:
             raise HTTPError(StatusResponse(
                 FORBIDDEN,
-                "Property cannot be changed: %s" % (property.sname(),)
+                "Property cannot be changed: %s" % (prop.sname(),)
             ))
 
 
@@ -244,16 +246,16 @@
 
 
     @inlineCallbacks
-    def readProperty(self, property, request):
-        if type(property) is tuple:
-            qname = property
+    def readProperty(self, prop, request):
+        if type(prop) is tuple:
+            qname = prop
         else:
-            qname = property.qname()
+            qname = prop.qname()
 
         if qname == customxml.MaxResources.qname() and config.MaxResourcesPerCollection:
             returnValue(customxml.MaxResources.fromString(config.MaxResourcesPerCollection))
 
-        returnValue((yield super(_CommonHomeChildCollectionMixin, self).readProperty(property, request)))
+        returnValue((yield super(_CommonHomeChildCollectionMixin, self).readProperty(prop, request)))
 
 
     def url(self):
@@ -531,7 +533,7 @@
 
 
     @inlineCallbacks
-    def _readGlobalProperty(self, qname, property, request):
+    def _readGlobalProperty(self, qname, prop, request):
 
         if config.EnableBatchUpload and qname == customxml.BulkRequests.qname():
             returnValue(customxml.BulkRequests(
@@ -545,7 +547,7 @@
                 ),
             ))
         else:
-            result = (yield super(_CommonHomeChildCollectionMixin, self)._readGlobalProperty(qname, property, request))
+            result = (yield super(_CommonHomeChildCollectionMixin, self)._readGlobalProperty(qname, prop, request))
             returnValue(result)
 
 
@@ -692,9 +694,9 @@
 
             # Determine the multiput operation: create, update, delete
             href = xmlchild.childOfType(davxml.HRef.qname())
-            set = xmlchild.childOfType(davxml.Set.qname())
-            prop = set.childOfType(davxml.PropertyContainer.qname()) if set is not None else None
-            xmldata_root = prop if prop else set
+            set_items = xmlchild.childOfType(davxml.Set.qname())
+            prop = set_items.childOfType(davxml.PropertyContainer.qname()) if set_items is not None else None
+            xmldata_root = prop if prop else set_items
             xmldata = xmldata_root.childOfType(self.xmlDataElementType().qname()) if xmldata_root is not None else None
             if href is None:
 
@@ -718,10 +720,10 @@
                 if ifmatch:
                     ifmatch = str(ifmatch.children[0]) if len(ifmatch.children) == 1 else None
                 if delete is None:
-                    if set is None:
-                        raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body - no set of delete operation"))
+                    if set_items is None:
+                        raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body - no set_items of delete operation"))
                     if xmldata is None:
-                        raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body for set operation"))
+                        raise HTTPError(StatusResponse(responsecode.BAD_REQUEST, "Could not parse valid data from request body for set_items operation"))
                     yield self.crudUpdate(request, str(href), xmldata, ifmatch, return_changed, xmlresponses)
                     updateCount += 1
                 else:
@@ -1062,7 +1064,7 @@
         isowner = (yield self.isOwner(request))
         accessPrincipal = (yield self.resourceOwnerPrincipal(request))
 
-        for name, uid, type in (yield maybeDeferred(self.index().bruteForceSearch)):  # @UnusedVariable
+        for name, _ignore_uid, _ignore_type in (yield maybeDeferred(self.index().bruteForceSearch)):
             try:
                 child = yield request.locateChildResource(self, name)
             except TypeError:
@@ -1314,15 +1316,15 @@
         return None
 
 
-    def readProperty(self, property, request):
-        if type(property) is tuple:
-            qname = property
+    def readProperty(self, prop, request):
+        if type(prop) is tuple:
+            qname = prop
         else:
-            qname = property.qname()
+            qname = prop.qname()
 
         if qname == (dav_namespace, "resourcetype"):
             return succeed(self.resourceType())
-        return super(_GetChildHelper, self).readProperty(property, request)
+        return super(_GetChildHelper, self).readProperty(prop, request)
 
 
     def davComplianceClasses(self):
@@ -1370,7 +1372,7 @@
 
 
     def resourceType(self,):
-        return davxml.ResourceType.dropboxhome  # @UndefinedVariable
+        return davxml.ResourceType.dropboxhome # @UndefinedVariable
 
 
     def listChildren(self):
@@ -1418,7 +1420,7 @@
 
 
     def resourceType(self):
-        return davxml.ResourceType.dropbox  # @UndefinedVariable
+        return davxml.ResourceType.dropbox # @UndefinedVariable
 
 
     @inlineCallbacks
@@ -1619,11 +1621,54 @@
 
 
 
+class AttachmentsCollection(_GetChildHelper):
+    """
+    A collection of all managed attachments, presented as a
+    resource under the user's calendar home.
+    """
+    # FIXME: no direct tests for this class at all.
+
+    def __init__(self, parent, *a, **kw):
+        kw.update(principalCollections=parent.principalCollections())
+        super(AttachmentsCollection, self).__init__(*a, **kw)
+        self._newStoreHome = parent._newStoreHome
+        parent.propagateTransaction(self)
+
+
+    def isCollection(self):
+        """
+        It is a collection.
+        """
+        return True
+
+
+    @inlineCallbacks
+    def getChild(self, name):
+        attachmentObject = yield self._newStoreHome.attachmentObjectWithName(name)
+        result = CalendarAttachment(
+            None,
+            attachmentObject,
+            name,
+            principalCollections=self.principalCollections()
+        )
+        self.propagateTransaction(result)
+        returnValue(result)
+
+
+    def resourceType(self,):
+        return davxml.ResourceType.collection # @UndefinedVariable
+
+
+    def listChildren(self):
+        return self._newStoreHome.getAllAttachmentNames()
+
+
+
 class CalendarAttachment(_NewStoreFileMetaDataHelper, _GetChildHelper):
 
     def __init__(self, calendarObject, attachment, attachmentName, **kw):
         super(CalendarAttachment, self).__init__(**kw)
-        self._newStoreCalendarObject = calendarObject
+        self._newStoreCalendarObject = calendarObject # This can be None for a managed attachment
         self._newStoreAttachment = self._newStoreObject = attachment
         self._dead_properties = NonePropertyStore(self)
         self.attachmentName = attachmentName
@@ -1633,12 +1678,24 @@
         return None
 
 
+    def displayName(self):
+        if self._newStoreObject is not None:
+            dispositionName = self._newStoreObject.dispositionName()
+            return dispositionName if dispositionName else self.name()
+        else:
+            return self._name
+
+
     @requiresPermissions(davxml.WriteContent())
     @inlineCallbacks
     def http_PUT(self, request):
         # FIXME: direct test
         # FIXME: CDT test to make sure that permissions are enforced.
 
+        # Cannot PUT to a managed attachment
+        if self._newStoreAttachment.isManaged():
+            raise HTTPError(FORBIDDEN)
+
         content_type = request.headers.getHeader("content-type")
         if content_type is None:
             content_type = MimeType("application", "octet-stream")
@@ -1685,12 +1742,19 @@
         except IOError, e:
             log.error("Unable to read attachment: %s, due to: %s" % (self, e,))
             raise HTTPError(responsecode.NOT_FOUND)
-        return Response(OK, {"content-type": self.contentType()}, stream)
 
+        headers = {"content-type": self.contentType()}
+        headers["content-disposition"] = MimeDisposition("attachment", params={"filename": self.displayName()})
+        return Response(OK, headers, stream)
 
+
     @requiresPermissions(fromParent=[davxml.Unbind()])
     @inlineCallbacks
     def http_DELETE(self, request):
+        # Cannot DELETE a managed attachment
+        if self._newStoreAttachment.isManaged():
+            raise HTTPError(FORBIDDEN)
+
         if not self.exists():
             log.debug("Resource not found: %s" % (self,))
             raise HTTPError(responsecode.NOT_FOUND)
@@ -1777,7 +1841,7 @@
 
         output = yield self.component()
 
-        response = Response(200, {}, str(output))
+        response = Response(responsecode.OK, {}, str(output))
         response.headers.setHeader("content-type", self.contentType())
         returnValue(response)
 
@@ -1884,7 +1948,7 @@
         self.name = name
 
 
-    def __get__(self, oself, type=None):
+    def __get__(self, oself, ptype=None):
         if oself._newStoreObject:
             return getattr(oself._newStoreObject, self.name)
         else:
@@ -2062,7 +2126,94 @@
         returnValue(NO_CONTENT)
 
 
+    @inlineCallbacks
+    def POST_handler_attachment(self, request, action):
+        """
+        Handle a managed attachments request on the calendar object resource.
 
+        @param request: HTTP request object
+        @type request: L{Request}
+        @param action: The request-URI 'action' argument
+        @type action: C{str}
+
+        @return: an HTTP response
+        """
+
+        # Resource must exist to allow attachment operations
+        if not self.exists():
+            raise HTTPError(responsecode.NOT_FOUND)
+
+        def _getRIDs():
+            rids = request.args.get("rid")
+            if rids is not None:
+                rids = rids.split(",")
+                try:
+                    rids = [PyCalendarDateTime.parseText(rid) if rid != "M" else None for rid in rids]
+                except ValueError:
+                    raise HTTPError(ErrorResponse(
+                        FORBIDDEN,
+                        (caldav_namespace, "valid-rid-parameter",),
+                        "The rid parameter in the request-URI contains an invalid value",
+                    ))
+            return rids
+
+        def _getContentInfo():
+            content_type = request.headers.getHeader("content-type")
+            if content_type is None:
+                content_type = MimeType("application", "octet-stream")
+            content_disposition = request.headers.getHeader("content-disposition")
+            if content_disposition is None or "filename" not in content_disposition.params:
+                filename = str(uuid.uuid4())
+            else:
+                filename = content_disposition.params["filename"]
+            return content_type, filename
+
+        # Dispatch to store object
+        if action == "attachment-add":
+            rids = _getRIDs()
+            content_type, filename = _getContentInfo()
+            uri = "https://caldav.corp.apple.com:8443/calendars/__uids__/%s/attachments/%s"
+            try:
+                attachment, location = (yield self._newStoreObject.addAttachment(uri, rids, content_type, filename, request.stream))
+            except AttachmentStoreFailed:
+                raise HTTPError(ErrorResponse(
+                    FORBIDDEN,
+                    (caldav_namespace, "valid-attachment-add",),
+                    "Could not store the supplied attachment",
+                ))
+            except QuotaExceeded:
+                raise HTTPError(ErrorResponse(
+                    INSUFFICIENT_STORAGE_SPACE,
+                    (dav_namespace, "quota-not-exceeded"),
+                    "Could not store the supplied attachment because user quota would be exceeded",
+                ))
+
+            # Look for Prefer header
+            if "return-representation" in request.headers.getHeader("prefer", {}):
+                result = (yield self.render(request))
+                result.code = responsecode.OK
+                result.headers.setHeader("content-location", request.path)
+            else:
+                result = Response(CREATED)
+                result.headers.setHeader("location", location)
+            result.headers.addRawHeader("Cal-Managed-ID", attachment.dropboxID())
+            returnValue(result)
+
+        elif action == "attachment-update":
+            pass
+
+        elif action == "attachment-remove":
+            pass
+
+        else:
+            raise HTTPError(ErrorResponse(
+                FORBIDDEN,
+                (caldav_namespace, "valid-action-parameter",),
+                "The action parameter in the request-URI is not valid",
+            ))
+
+
+
 class AddressBookCollectionResource(_CommonHomeChildCollectionMixin, CalDAVResource):
     """
     Wrapper around a L{txdav.carddav.iaddressbook.IAddressBook}.
@@ -2398,16 +2549,16 @@
 
 
     @inlineCallbacks
-    def readProperty(self, property, request):
-        if type(property) is tuple:
-            qname = property
+    def readProperty(self, prop, request):
+        if type(prop) is tuple:
+            qname = prop
         else:
-            qname = property.qname()
+            qname = prop.qname()
 
         if qname == customxml.NotificationType.qname():
             returnValue(self._newStoreObject.xmlType())
 
-        returnValue((yield super(StoreNotificationObjectFile, self).readProperty(property, request)))
+        returnValue((yield super(StoreNotificationObjectFile, self).readProperty(prop, request)))
 
 
     def isCollection(self):

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/sql.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/sql.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/sql.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -1,5 +1,5 @@
 # -*- test-case-name: txdav.caldav.datastore.test.test_sql -*-
-##
+# #
 # Copyright (c) 2010-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +13,9 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-##
+# #
+from twext.web2.stream import readStream
+from pycalendar.value import PyCalendarValue
 
 """
 SQL backend for CalDAV storage.
@@ -37,27 +39,28 @@
 from twistedcaldav import caldavxml, customxml
 from twistedcaldav.caldavxml import ScheduleCalendarTransp, Opaque
 from twistedcaldav.config import config
-from twistedcaldav.dateops import normalizeForIndex, datetimeMktime,\
+from twistedcaldav.dateops import normalizeForIndex, datetimeMktime, \
     parseSQLTimestamp, pyCalendarTodatetime, parseSQLDateToPyCalendar
-from twistedcaldav.ical import Component, InvalidICalendarDataError
+from twistedcaldav.ical import Component, InvalidICalendarDataError, Property
 from twistedcaldav.instance import InvalidOverriddenInstanceError
 from twistedcaldav.memcacher import Memcacher
 
 from txdav.base.propertystore.base import PropertyName
-from txdav.caldav.datastore.util import validateCalendarComponent,\
+from txdav.caldav.datastore.util import validateCalendarComponent, \
     dropboxIDFromCalendarObject
-from txdav.caldav.icalendarstore import ICalendarHome, ICalendar, ICalendarObject,\
-    IAttachment
-from txdav.common.datastore.sql import CommonHome, CommonHomeChild,\
+from txdav.caldav.icalendarstore import ICalendarHome, ICalendar, ICalendarObject, \
+    IAttachment, AttachmentStoreFailed
+from txdav.common.datastore.sql import CommonHome, CommonHomeChild, \
     CommonObjectResource, ECALENDARTYPE
-from txdav.common.datastore.sql_legacy import PostgresLegacyIndexEmulator,\
+from txdav.common.datastore.sql_legacy import PostgresLegacyIndexEmulator, \
     PostgresLegacyInboxIndexEmulator
-from txdav.common.datastore.sql_tables import CALENDAR_TABLE,\
-    CALENDAR_BIND_TABLE, CALENDAR_OBJECT_REVISIONS_TABLE, CALENDAR_OBJECT_TABLE,\
-    _ATTACHMENTS_MODE_NONE, _ATTACHMENTS_MODE_READ, _ATTACHMENTS_MODE_WRITE,\
-    CALENDAR_HOME_TABLE, CALENDAR_HOME_METADATA_TABLE,\
-    CALENDAR_AND_CALENDAR_BIND, CALENDAR_OBJECT_REVISIONS_AND_BIND_TABLE,\
-    CALENDAR_OBJECT_AND_BIND_TABLE, _BIND_STATUS_INVITED, schema
+from txdav.common.datastore.sql_tables import CALENDAR_TABLE, \
+    CALENDAR_BIND_TABLE, CALENDAR_OBJECT_REVISIONS_TABLE, CALENDAR_OBJECT_TABLE, \
+    _ATTACHMENTS_MODE_NONE, _ATTACHMENTS_MODE_READ, _ATTACHMENTS_MODE_WRITE, \
+    CALENDAR_HOME_TABLE, CALENDAR_HOME_METADATA_TABLE, \
+    CALENDAR_AND_CALENDAR_BIND, CALENDAR_OBJECT_REVISIONS_AND_BIND_TABLE, \
+    CALENDAR_OBJECT_AND_BIND_TABLE, _BIND_STATUS_INVITED, schema, \
+    _ATTACHMENT_STATUS_DROPBOX, _ATTACHMENT_STATUS_MANAGED
 from twext.enterprise.dal.syntax import Select, Count, ColumnSyntax
 from twext.enterprise.dal.syntax import Insert
 from twext.enterprise.dal.syntax import Update
@@ -70,8 +73,8 @@
 from txdav.caldav.icalendarstore import QuotaExceeded
 
 from txdav.caldav.datastore.util import StorageTransportBase
-from txdav.common.icommondatastore import IndexedSearchException,\
-    InternalDataStoreError, HomeChildNameAlreadyExistsError,\
+from txdav.common.icommondatastore import IndexedSearchException, \
+    InternalDataStoreError, HomeChildNameAlreadyExistsError, \
     HomeChildNameNotAllowedError
 
 from pycalendar.datetime import PyCalendarDateTime
@@ -115,7 +118,6 @@
         self._childClass = Calendar
         super(CalendarHome, self).__init__(transaction, ownerUID, notifiers)
 
-
     createCalendarWithName = CommonHome.createChildWithName
     removeCalendarWithName = CommonHome.removeChildWithName
     calendarWithName = CommonHome.childWithName
@@ -133,14 +135,14 @@
 
         # delete attachments corresponding to this home, also removing from disk
         rows = (yield Select(
-            [at.DROPBOX_ID, at.PATH, ],
+            [at.STATUS, at.DROPBOX_ID, at.PATH, ],
             From=at,
             Where=(
                 at.CALENDAR_HOME_RESOURCE_ID == self._resourceID
             ),
         ).on(self._txn))
-        for dropboxID, path in rows:
-            attachment = Attachment._attachmentPathRoot(self._txn, dropboxID).child(path)
+        for status, dropboxID, path in rows:
+            attachment = Attachment._attachmentPathRoot(self._txn, status, dropboxID).child(path)
             if attachment.exists():
                 yield attachment.remove()
 
@@ -175,7 +177,7 @@
 
 
     @inlineCallbacks
-    def hasCalendarResourceUIDSomewhereElse(self, uid, ok_object, type):
+    def hasCalendarResourceUIDSomewhereElse(self, uid, ok_object, mode):
         """
         Determine if this calendar home contains any calendar objects which
         would potentially conflict with the given UID for scheduling purposes.
@@ -188,7 +190,7 @@
             being updated).  May be C{None} if all objects potentially count.
         @type ok_object: L{CalendarObject} or C{NoneType}
 
-        @param type: a string, indicating the mode to check for conflicts.  If
+        @param mode: a string, indicating the mode to check for conflicts.  If
             this is the string "schedule", then we are checking for potential
             conflicts with a new scheduled calendar object, which will conflict
             with any calendar object matching the given C{uid} in the home.
@@ -210,9 +212,9 @@
         for objectResource in objectResources:
             if ok_object and objectResource._resourceID == ok_object._resourceID:
                 continue
-            matched_type = ("schedule" if objectResource.isScheduleObject
+            matched_mode = ("schedule" if objectResource.isScheduleObject
                             else "calendar")
-            if type == "schedule" or matched_type == "schedule":
+            if mode == "schedule" or matched_mode == "schedule":
                 returnValue(True)
 
         returnValue(False)
@@ -270,42 +272,62 @@
 
 
     @inlineCallbacks
+    def getAllAttachmentNames(self):
+        att = schema.ATTACHMENT
+        rows = (yield Select(
+            [att.DROPBOX_ID],
+            From=att,
+            Where=(att.CALENDAR_HOME_RESOURCE_ID == self._resourceID),
+            OrderBy=att.DROPBOX_ID
+        ).on(self._txn))
+        returnValue([row[0] for row in rows])
+
+
+    @inlineCallbacks
+    def attachmentObjectWithName(self, name):
+        attach = (yield Attachment.loadWithName(self._txn, name))
+        returnValue(attach)
+
+
+    @inlineCallbacks
     def createdHome(self):
-        
+
         # Default calendar
         defaultCal = yield self.createCalendarWithName("calendar")
         props = defaultCal.properties()
         props[PropertyName(*ScheduleCalendarTransp.qname())] = ScheduleCalendarTransp(Opaque())
-        
+
         # Check whether components type must be separate
         if config.RestrictCalendarsToOneComponentType:
             yield defaultCal.setSupportedComponents("VEVENT")
-            
+
             # Default tasks
             defaultTasks = yield self.createCalendarWithName("tasks")
             yield defaultTasks.setSupportedComponents("VTODO")
-            
+
         yield self.createCalendarWithName("inbox")
 
+
     @inlineCallbacks
     def splitCalendars(self):
         """
         Split all regular calendars by component type
         """
-        
+
         # Make sure the loop does not operate on any new calendars created during the loop
         self.log_warn("Splitting calendars for user %s" % (self._ownerUID,))
         calendars = yield self.calendars()
         for calendar in calendars:
-            
-            # Ignore inbox - also shared calendars are not part of .calendars() 
+
+            # Ignore inbox - also shared calendars are not part of .calendars()
             if calendar.name() == "inbox":
                 continue
             split_count = yield calendar.splitCollectionByComponentTypes()
-            self.log_warn("  Calendar: '%s', split into %d" % (calendar.name(), split_count+1,))
+            self.log_warn("  Calendar: '%s', split into %d" % (calendar.name(), split_count + 1,))
 
         yield self.ensureDefaultCalendarsExist()
 
+
     @inlineCallbacks
     def ensureDefaultCalendarsExist(self):
         """
@@ -339,7 +361,7 @@
 
 
     @classproperty
-    def _unacceptedSharesQuery(cls): #@NoSelf
+    def _unacceptedSharesQuery(cls): # @NoSelf
         cb = schema.CALENDAR_BIND
         return Select([cb.CALENDAR_RESOURCE_NAME],
             From=cb,
@@ -356,7 +378,7 @@
         cb = schema.CALENDAR_BIND
         rows = yield self._unacceptedSharesQuery.on(self._txn, homeResourceID=self._resourceID)
         for (resourceName,) in rows:
-            kwds = { "ResourceName" : resourceName }
+            kwds = {"ResourceName" : resourceName}
             yield Delete(
                 From=inv,
                 Where=(
@@ -378,7 +400,7 @@
         Remove all remaining invite entries for this home.
         """
         inv = schema.INVITE
-        kwds = { "HomeResourceID" : self._resourceID }
+        kwds = {"HomeResourceID" : self._resourceID}
         yield Delete(
             From=inv,
             Where=(inv.HOME_RESOURCE_ID == Parameter("HomeResourceID"))
@@ -431,15 +453,16 @@
         different child classes to have their own type specific data, but still make use of the
         common base logic.
         """
-        
+
         # Common behavior is to have created and modified
-        
+
         return (
             cls._homeChildMetaDataSchema.CREATED,
             cls._homeChildMetaDataSchema.MODIFIED,
             cls._homeChildMetaDataSchema.SUPPORTED_COMPONENTS,
         )
-        
+
+
     @classmethod
     def metadataAttributes(cls):
         """
@@ -447,15 +470,16 @@
         different child classes to have their own type specific data, but still make use of the
         common base logic.
         """
-        
+
         # Common behavior is to have created and modified
-        
+
         return (
             "_created",
             "_modified",
             "_supportedComponents",
         )
-        
+
+
     @property
     def _calendarHome(self):
         return self._home
@@ -463,9 +487,8 @@
 
     # FIXME: resource type is DAV.  This doesn't belong in the data store.  -wsv
     def resourceType(self):
-        return ResourceType.calendar #@UndefinedVariable
+        return ResourceType.calendar # @UndefinedVariable
 
-
     ownerCalendarHome = CommonHomeChild.ownerHome
     viewerCalendarHome = CommonHomeChild.viewerHome
     calendarObjects = CommonHomeChild.objectResources
@@ -511,15 +534,18 @@
             cacheKey = queryCacher.keyForHomeChildMetaData(self._resourceID)
             yield queryCacher.invalidateAfterCommit(self._txn, cacheKey)
 
+
     def getSupportedComponents(self):
         return self._supportedComponents
 
+
     def isSupportedComponent(self, componentType):
         if self._supportedComponents:
             return componentType.upper() in self._supportedComponents.split(",")
         else:
             return True
 
+
     def initPropertyStore(self, props):
         # Setup peruser special properties
         props.setSpecialProperties(
@@ -533,6 +559,7 @@
             ),
         )
 
+
     # FIXME: this is DAV-ish.  Data store calendar objects don't have
     # mime types.  -wsv
     def contentType(self):
@@ -541,6 +568,7 @@
         """
         return MimeType.fromString("text/calendar; charset=utf-8")
 
+
     @inlineCallbacks
     def splitCollectionByComponentTypes(self):
         """
@@ -549,7 +577,7 @@
         on any new calendars created. Also restrict the new calendars to only the one appropriate component type. Return
         the number of splits done.
         """
-        
+
         # First see how many different component types there are
         split_count = 0
         components = yield self._countComponentTypes()
@@ -560,11 +588,11 @@
             yield self.setSupportedComponents(component.upper())
 
             returnValue(split_count)
-        
+
         # We will leave the component type with the highest count in the current calendar and create new calendars
         # for the others which will be moved over
-        maxComponent = max(components, key=lambda x:x[1])[0]
-        
+        maxComponent = max(components, key=lambda x: x[1])[0]
+
         for component, _ignore_count in components:
             if component == maxComponent:
                 continue
@@ -576,12 +604,13 @@
 
         returnValue(split_count)
 
+
     @inlineCallbacks
     def _countComponentTypes(self):
         """
         Count each component type in this calendar.
-        
-        @return: a C{tuple} of C{tuple} containing the component type name and count. 
+
+        @return: a C{tuple} of C{tuple} containing the component type name and count.
         """
 
         ob = self._objectSchema
@@ -595,17 +624,18 @@
         rows = yield _componentsQuery.on(self._txn, calID=self._resourceID)
         result = tuple([(componentType, componentCount) for componentType, componentCount in sorted(rows, key=lambda x:x[0])])
         returnValue(result)
-        
+
+
     @inlineCallbacks
     def _splitComponentType(self, component):
         """
         Create a new calendar and move all components of the specified component type into the new one.
         Make sure properties and sharing state is preserved on the new calendar.
-        
+
         @param component: Component type to split out
         @type component: C{str}
         """
-        
+
         # Create the new calendar
         try:
             newcalendar = yield self._home.createCalendarWithName("%s-%s" % (self._name, component.lower(),))
@@ -613,25 +643,26 @@
             # If the name we want exists, try repeating with up to ten more
             for ctr in range(10):
                 try:
-                    newcalendar = yield self._home.createCalendarWithName("%s-%s-%d" % (self._name, component.lower(), ctr+1,))
+                    newcalendar = yield self._home.createCalendarWithName("%s-%s-%d" % (self._name, component.lower(), ctr + 1,))
                 except HomeChildNameAlreadyExistsError:
                     continue
             else:
                 # At this point we are stuck
                 raise HomeChildNameNotAllowedError
-        
+
         # Restrict calendar to single component type
         yield newcalendar.setSupportedComponents(component.upper())
-        
+
         # Transfer properties over
         yield newcalendar._properties.copyAllProperties(self._properties)
-        
+
         # Transfer sharing
         yield self._transferSharingDetails(newcalendar, component)
-        
+
         # Now move calendar data over
         yield self._transferCalendarObjects(newcalendar, component)
-        
+
+
     @inlineCallbacks
     def _transferSharingDetails(self, newcalendar, component):
         """
@@ -646,22 +677,23 @@
             Where=(cb.CALENDAR_RESOURCE_ID == Parameter('calID')).And(
                 cb.CALENDAR_HOME_RESOURCE_ID != Parameter('homeID'))
         )
-        
+
         rows = yield _bindQuery.on(
             self._txn,
             calID=self._resourceID,
             homeID=self._home._resourceID,
         )
-        
+
         if len(rows) == 0:
             returnValue(None)
-        
+
         for row in rows:
             columnMap = dict(zip(columns, row))
             columnMap[cb.CALENDAR_RESOURCE_ID] = newcalendar._resourceID
             columnMap[cb.CALENDAR_RESOURCE_NAME] = "%s-%s" % (columnMap[cb.CALENDAR_RESOURCE_NAME], component.lower(),)
-            yield Insert(columnMap).on(self._txn)   
+            yield Insert(columnMap).on(self._txn)
 
+
     @inlineCallbacks
     def _transferCalendarObjects(self, newcalendar, component):
         """
@@ -682,17 +714,18 @@
             calID=self._resourceID,
             componentType=component,
         )
-        
+
         if len(rows) == 0:
             returnValue(None)
-        
+
         for row in rows:
             resourceID = row[0]
             child = yield self.objectResourceWithID(resourceID)
             yield self.moveObjectResource(child, newcalendar)
 
+
     @classproperty
-    def _moveTimeRangeUpdateQuery(cls): #@NoSelf
+    def _moveTimeRangeUpdateQuery(cls): # @NoSelf
         """
         DAL query to update a child to be in a new parent.
         """
@@ -702,6 +735,7 @@
             Where=tr.CALENDAR_OBJECT_RESOURCE_ID == Parameter("resourceID")
         )
 
+
     @inlineCallbacks
     def _movedObjectResource(self, child, newparent):
         """
@@ -713,6 +747,7 @@
             resourceID=child._resourceID
         )
 
+
     def unshare(self):
         """
         Unshares a collection, regardless of which "direction" it was shared.
@@ -743,7 +778,7 @@
     Component.ACCESS_CONFIDENTIAL: 3,
     Component.ACCESS_RESTRICTED  : 4,
 }
-accesstype_to_accessMode = dict([(v, k) for k,v in accessMode_to_type.items()])
+accesstype_to_accessMode = dict([(v, k) for k, v in accessMode_to_type.items()])
 
 def _pathToName(path):
     return path.rsplit(".", 1)[0]
@@ -768,7 +803,6 @@
         self.scheduleEtags = metadata.get("scheduleEtags", "")
         self.hasPrivateComment = metadata.get("hasPrivateComment", False)
 
-
     _allColumns = [
         _objectSchema.RESOURCE_ID,
         _objectSchema.RESOURCE_NAME,
@@ -854,7 +888,7 @@
         # freebusy related properties have changed (e.g. an attendee reply and refresh). In those cases
         # the component will have a special attribute present to let us know to suppress the instance indexing.
         instanceIndexingRequired = not hasattr(component, "noInstanceIndexing") or inserting or reCreate
-        
+
         if instanceIndexingRequired:
 
             # Decide how far to expand based on the component. doInstanceIndexing will indicate whether we
@@ -862,28 +896,28 @@
             # operation.
             doInstanceIndexing = False
             master = component.masterComponent()
-            if ( master is None or not component.isRecurring() ):
+            if (master is None or not component.isRecurring()):
                 # When there is no master we have a set of overridden components -
                 #   index them all.
                 # When there is one instance - index it.
                 expand = PyCalendarDateTime(2100, 1, 1, 0, 0, 0, tzid=PyCalendarTimezone(utc=True))
                 doInstanceIndexing = True
             else:
-    
+
                 # If migrating or re-creating or config option for delayed indexing is off, always index
                 if reCreate or txn._migrating or (not config.FreeBusyIndexDelayedExpand and not isInboxItem):
                     doInstanceIndexing = True
-    
+
                 # Duration into the future through which recurrences are expanded in the index
                 # by default.  This is a caching parameter which affects the size of the index;
                 # it does not affect search results beyond this period, but it may affect
                 # performance of such a search.
                 expand = (PyCalendarDateTime.getToday() +
                           PyCalendarDuration(days=config.FreeBusyIndexExpandAheadDays))
-    
+
                 if expand_until and expand_until > expand:
                     expand = expand_until
-    
+
                 # Maximum duration into the future through which recurrences are expanded in the
                 # index.  This is a caching parameter which affects the size of the index; it
                 # does not affect search results beyond this period, but it may affect
@@ -899,7 +933,7 @@
                 if expand > (PyCalendarDateTime.getToday() +
                              PyCalendarDuration(days=config.FreeBusyIndexExpandMaxDays)):
                     raise IndexedSearchException
-    
+
             if config.FreeBusyIndexLowerLimitDays:
                 truncateLowerLimit = PyCalendarDateTime.getToday()
                 truncateLowerLimit.offsetDay(-config.FreeBusyIndexLowerLimitDays)
@@ -915,7 +949,7 @@
             except InvalidOverriddenInstanceError, e:
                 self.log_error("Invalid instance %s when indexing %s in %s" %
                                (e.rid, self._name, self._calendar,))
-    
+
                 if txn._migrating:
                     # TODO: fix the data here by re-writing component then re-index
                     instances = component.expandTimeRanges(expand, lowerLimit=truncateLowerLimit, ignoreInvalidInstances=True)
@@ -923,7 +957,7 @@
                     recurrenceLowerLimit = instances.lowerLimit
                 else:
                     raise
-    
+
             # Now coerce indexing to off if needed
             if not doInstanceIndexing:
                 instances = None
@@ -1003,7 +1037,7 @@
                         Where=co.RESOURCE_ID == self._resourceID
                     ).on(txn)
                 )[0][0]
-                
+
                 # Need to wipe the existing time-range for this and rebuild if required
                 if instanceIndexingRequired:
                     yield Delete(
@@ -1029,8 +1063,8 @@
 
         if instanceIndexingRequired and doInstanceIndexing:
             yield self._addInstances(component, instances, truncateLowerLimit, txn)
-    
-    
+
+
     @inlineCallbacks
     def _addInstances(self, component, instances, truncateLowerLimit, txn):
         """
@@ -1052,18 +1086,18 @@
             instance = instances[key]
             start = instance.start
             end = instance.end
-            float = instance.start.floating()
+            floating = instance.start.floating()
             transp = instance.component.propertyValue("TRANSP") == "TRANSPARENT"
             fbtype = instance.component.getFBType()
             start.setTimezoneUTC(True)
             end.setTimezoneUTC(True)
 
-            # Ignore if below the lower limit            
+            # Ignore if below the lower limit
             if truncateLowerLimit and end < truncateLowerLimit:
                 lowerLimitApplied = True
                 continue
 
-            yield self._addInstanceDetails(component, instance.rid, start, end, float, transp, fbtype, txn)
+            yield self._addInstanceDetails(component, instance.rid, start, end, floating, transp, fbtype, txn)
 
         # For truncated items we insert a tomb stone lower bound so that a time-range
         # query with just an end bound will match
@@ -1075,7 +1109,7 @@
         # Special - for unbounded recurrence we insert a value for "infinity"
         # that will allow an open-ended time-range to always match it.
         # We also need to add the "infinity" value if the event was bounded but
-        # starts after the future expansion cut-off limit. 
+        # starts after the future expansion cut-off limit.
         if component.isRecurringUnbounded() or instances.limit and len(instances.instances) == 0:
             start = PyCalendarDateTime(2100, 1, 1, 0, 0, 0, tzid=PyCalendarTimezone(utc=True))
             end = PyCalendarDateTime(2100, 1, 1, 1, 0, 0, tzid=PyCalendarTimezone(utc=True))
@@ -1083,7 +1117,7 @@
 
 
     @inlineCallbacks
-    def _addInstanceDetails(self, component, rid, start, end, float, transp, fbtype, txn):
+    def _addInstanceDetails(self, component, rid, start, end, floating, transp, fbtype, txn):
 
         tr = schema.TIME_RANGE
         tpy = schema.TRANSPARENCY
@@ -1091,7 +1125,7 @@
         instanceid = (yield Insert({
             tr.CALENDAR_RESOURCE_ID        : self._calendar._resourceID,
             tr.CALENDAR_OBJECT_RESOURCE_ID : self._resourceID,
-            tr.FLOATING                    : float,
+            tr.FLOATING                    : floating,
             tr.START_DATE                  : pyCalendarTodatetime(start),
             tr.END_DATE                    : pyCalendarTodatetime(end),
             tr.FBTYPE                      : icalfbtype_to_indexfbtype.get(fbtype, icalfbtype_to_indexfbtype["FREE"]),
@@ -1142,13 +1176,13 @@
 
 
     @classproperty
-    def _recurrenceMinMaxByIDQuery(cls): #@NoSelf
+    def _recurrenceMinMaxByIDQuery(cls): # @NoSelf
         """
         DAL query to load RECURRANCE_MIN, RECURRANCE_MAX via an object's resource ID.
         """
         co = schema.CALENDAR_OBJECT
         return Select(
-            [co.RECURRANCE_MIN, co.RECURRANCE_MAX,],
+            [co.RECURRANCE_MIN, co.RECURRANCE_MAX, ],
             From=co,
             Where=co.RESOURCE_ID == Parameter("resourceID"),
         )
@@ -1159,7 +1193,7 @@
         """
         Get the RECURRANCE_MIN, RECURRANCE_MAX value from the database. Occasionally we might need to do an
         update to time-range data via a separate transaction, so we allow that to be passed in.
-    
+
         @return: L{PyCalendarDateTime} result
         """
         # Setup appropriate txn
@@ -1176,7 +1210,7 @@
 
 
     @classproperty
-    def _instanceQuery(cls): #@NoSelf
+    def _instanceQuery(cls): # @NoSelf
         """
         DAL query to load TIME_RANGE data via an object's resource ID.
         """
@@ -1196,7 +1230,7 @@
     def instances(self, txn=None):
         """
         Get the set of instances from the database.
-    
+
         @return: C{list} result
         """
         # Setup appropriate txn
@@ -1223,9 +1257,11 @@
         metadata["hasPrivateComment"] = self.hasPrivateComment
         return metadata
 
+
     def _get_accessMode(self):
         return accesstype_to_accessMode[self._access]
 
+
     def _set_accessMode(self, value):
         self._access = accessMode_to_type[value]
 
@@ -1234,6 +1270,7 @@
     def _get_isScheduleObject(self):
         return self._schedule_object
 
+
     def _set_isScheduleObject(self, value):
         self._schedule_object = value
 
@@ -1242,6 +1279,7 @@
     def _get_scheduleTag(self):
         return self._schedule_tag
 
+
     def _set_scheduleTag(self, value):
         self._schedule_tag = value
 
@@ -1250,6 +1288,7 @@
     def _get_scheduleEtags(self):
         return tuple(self._schedule_etags.split(",")) if self._schedule_etags else ()
 
+
     def _set_scheduleEtags(self, value):
         self._schedule_etags = ",".join(value) if value else ""
 
@@ -1258,38 +1297,99 @@
     def _get_hasPrivateComment(self):
         return self._private_comments
 
+
     def _set_hasPrivateComment(self, value):
         self._private_comments = value
 
     hasPrivateComment = property(_get_hasPrivateComment, _set_hasPrivateComment)
 
     @inlineCallbacks
+    def addAttachment(self, pathpattern, rids, content_type, filename, stream):
+
+        # First write the data stream
+
+        # We need to know the resource_ID of the home collection of the owner
+        # (not sharee) of this event
+        try:
+            attachment = (yield self.createManagedAttachment())
+            t = attachment.store(content_type, filename)
+            yield readStream(stream, t.write)
+        except Exception, e:
+            self.log_error("Unable to store attachment: %s" % (e,))
+            raise AttachmentStoreFailed
+        yield t.loseConnection()
+
+        # Now try and adjust the actual calendar data
+        calendar = (yield self.component())
+
+        location = pathpattern % (self._parentCollection.ownerHome().name(), attachment.dropboxID(),)
+        attach = Property("ATTACH", location, params={
+            "MANAGED-ID": attachment.dropboxID(),
+            "MTAG": attachment.md5(),
+            "FMT-TYPE": "%s/%s" % (attachment.contentType().mediaType, attachment.contentType().mediaSubtype),
+            "FILENAME": attachment.dispositionName(),
+            "SIZE": str(attachment.size()),
+        }, valuetype=PyCalendarValue.VALUETYPE_URI)
+        if rids is None:
+            calendar.addPropertyToAllComponents(attach)
+
+        # Store the data
+        yield self.setComponent(calendar)
+
+        returnValue((attachment, location,))
+
+
+    def updateAttachment(self, managed_id, content_type, filename, stream):
+        pass
+
+
+    def removeAttachment(self, rids, managed_id):
+        pass
+
+
+    @inlineCallbacks
+    def createManagedAttachment(self):
+
+        # We need to know the resource_ID of the home collection of the owner
+        # (not sharee) of this event
+        sharerHomeID = (yield self._parentCollection.sharerHomeID())
+        managedID = str(uuid.uuid4())
+        returnValue((
+            yield Attachment.create(
+                self._txn, _ATTACHMENT_STATUS_MANAGED, managedID, "data", sharerHomeID, self._resourceID,
+            )
+        ))
+
+
+    @inlineCallbacks
     def createAttachmentWithName(self, name):
 
         # We need to know the resource_ID of the home collection of the owner
         # (not sharee) of this event
         sharerHomeID = (yield self._parentCollection.sharerHomeID())
+        dropboxID = (yield self.dropboxID())
         returnValue((
             yield Attachment.create(
-                self._txn, (yield self.dropboxID()), name, sharerHomeID
+                self._txn, _ATTACHMENT_STATUS_DROPBOX, dropboxID, name, sharerHomeID, self._resourceID,
             )
         ))
 
+
     @inlineCallbacks
     def removeAttachmentWithName(self, name):
         attachment = (yield self.attachmentWithName(name))
         yield attachment.remove()
 
+
     def attachmentWithName(self, name):
         return Attachment.loadWithName(self._txn, self._dropboxID, name)
 
+
     def attendeesCanManageAttachments(self):
         return self._attachment == _ATTACHMENTS_MODE_WRITE
 
-
     dropboxID = dropboxIDFromCalendarObject
 
-
     _attachmentsQuery = Select(
         [schema.ATTACHMENT.PATH],
         From=schema.ATTACHMENT,
@@ -1322,6 +1422,7 @@
             ),
         )
 
+
     # IDataStoreObject
     def contentType(self):
         """
@@ -1335,9 +1436,9 @@
 
     _TEMPORARY_UPLOADS_DIRECTORY = "Temporary"
 
-    def __init__(self, attachment, contentType, creating=False):
+    def __init__(self, attachment, contentType, dispositionName, creating=False):
         super(AttachmentStorageTransport, self).__init__(
-            attachment, contentType)
+            attachment, contentType, dispositionName)
 
         fileDescriptor, fileName = self._temporaryFile()
         # Wrap the file descriptor in a file object we can write to
@@ -1400,6 +1501,7 @@
 
         self._path.moveTo(self._attachment._path)
         self._attachment._contentType = self._contentType
+        self._attachment._dispositionName = self._dispositionName
         self._attachment._md5 = self._hash.hexdigest()
         self._attachment._size = newSize
         att = schema.ATTACHMENT
@@ -1407,10 +1509,11 @@
             sqltime,
             (yield Update(
                 {
-                    att.CONTENT_TYPE : generateContentType(self._contentType),
-                    att.SIZE         : self._attachment._size,
-                    att.MD5          : self._attachment._md5,
-                    att.MODIFIED     : utcNowSQL
+                    att.CONTENT_TYPE    : generateContentType(self._contentType),
+                    att.SIZE            : self._attachment._size,
+                    att.MD5             : self._attachment._md5,
+                    att.MODIFIED        : utcNowSQL,
+                    att.DISPLAYNAME     : self._dispositionName,
                 },
                 Where=(att.PATH == self._attachment.name()).And(
                     att.DROPBOX_ID == self._attachment._dropboxID
@@ -1430,57 +1533,97 @@
 def sqltime(value):
     return datetimeMktime(parseSQLTimestamp(value))
 
+
+
 class Attachment(object):
 
     implements(IAttachment)
 
-    def __init__(self, txn, dropboxID, name, ownerHomeID=None, justCreated=False):
+    def __init__(self, txn, a_id, status, dropboxID, name, ownerHomeID=None, justCreated=False):
         self._txn = txn
+        self._attachmentID = a_id
+        self._attachmentStatus = status
         self._dropboxID = dropboxID
         self._name = name
         self._ownerHomeID = ownerHomeID
         self._size = 0
+        self._created = None
+        self._modified = None
         self._justCreated = justCreated
 
 
     @classmethod
-    def _attachmentPathRoot(cls, txn, dropboxID):
+    def _attachmentPathRoot(cls, txn, status, dropboxID):
         attachmentRoot = txn._store.attachmentsPath
 
-        # Use directory hashing scheme based on MD5 of dropboxID
-        hasheduid = hashlib.md5(dropboxID).hexdigest()
-        return attachmentRoot.child(hasheduid[0:2]).child(
-            hasheduid[2:4]).child(hasheduid)
+        # Use directory hashing scheme based on MD5 of dropboxID if using dropbox, else
+        # just use dropboxID as-is if managed (since we know it is a uuid in that case)
+        hasheduid = hashlib.md5(dropboxID).hexdigest() if status == _ATTACHMENT_STATUS_DROPBOX else dropboxID
+        return attachmentRoot.child(hasheduid[0:2]).child(hasheduid[2:4]).child(hasheduid)
 
 
     @classmethod
     @inlineCallbacks
-    def create(cls, txn, dropboxID, name, ownerHomeID):
+    def create(cls, txn, status, dropboxID, name, ownerHomeID, referencedBy):
+        """
+        Create a new Attachment object.
 
+        @param txn: The transaction to use
+        @type txn: L{CommonStoreTransaction}
+        @param status: the type of attachment (dropbox or managed)
+        @type status: C{int}
+        @param dropboxID: the identifier for the attachment (dropbox id or managed id)
+        @type dropboxID: C{str}
+        @param name: the name of the attachment
+        @type name: C{str}
+        @param ownerHomeID: the resource-id of the home collection of the attachment owner
+        @type ownerHomeID: C{int}
+        @param referencedBy: the resource-id of the calendar object referencing the attachment (managed only)
+        @type referencedBy: C{int}
+        """
+
         # File system paths need to exist
         try:
-            cls._attachmentPathRoot(txn, dropboxID).makedirs()
+            cls._attachmentPathRoot(txn, status, dropboxID).makedirs()
         except:
             pass
 
         # Now create the DB entry
-        attachment = cls(txn, dropboxID, name, ownerHomeID, True)
         att = schema.ATTACHMENT
-        yield Insert({
+        rows = (yield Insert({
             att.CALENDAR_HOME_RESOURCE_ID : ownerHomeID,
+            att.STATUS                    : status,
             att.DROPBOX_ID                : dropboxID,
             att.CONTENT_TYPE              : "",
             att.SIZE                      : 0,
             att.MD5                       : "",
-            att.PATH                      : name
-        }).on(txn)
+            att.PATH                      : name,
+            att.DISPLAYNAME               : None,
+        }, Return=(att.ATTACHMENT_ID, att.CREATED, att.MODIFIED)).on(txn))
+
+        row_iter = iter(rows[0])
+        a_id = row_iter.next()
+        created = sqltime(row_iter.next())
+        modified = sqltime(row_iter.next())
+
+        # Create the attachment<->calendar object relationship for managed attachments
+        if status == _ATTACHMENT_STATUS_MANAGED:
+            attco = schema.ATTACHMENT_CALENDAR_OBJECT
+            rows = (yield Insert({
+                attco.ATTACHMENT_ID               : a_id,
+                attco.CALENDAR_OBJECT_RESOURCE_ID : referencedBy,
+            }).on(txn))
+
+        attachment = cls(txn, a_id, status, dropboxID, name, ownerHomeID, True)
+        attachment._created = created
+        attachment._modified = modified
         returnValue(attachment)
 
 
     @classmethod
     @inlineCallbacks
-    def loadWithName(cls, txn, dropboxID, name):
-        attachment = cls(txn, dropboxID, name)
+    def loadWithName(cls, txn, dropboxID, name="data"):
+        attachment = cls(txn, None, None, dropboxID, name)
         attachment = (yield attachment.initFromStore())
         returnValue(attachment)
 
@@ -1493,47 +1636,68 @@
         @return: C{True} if this attachment exists, C{False} otherwise.
         """
         att = schema.ATTACHMENT
-        rows = (yield Select([att.CALENDAR_HOME_RESOURCE_ID, att.CONTENT_TYPE,
-                              att.SIZE, att.MD5, att.CREATED, att.MODIFIED],
-                             From=att,
-                             Where=(att.DROPBOX_ID == self._dropboxID).And(
-                                 att.PATH == self._name)).on(self._txn))
+        rows = (yield Select(
+            [
+                att.ATTACHMENT_ID,
+                att.STATUS,
+                att.CALENDAR_HOME_RESOURCE_ID,
+                att.CONTENT_TYPE,
+                att.SIZE,
+                att.MD5,
+                att.CREATED,
+                att.MODIFIED,
+                att.DISPLAYNAME,
+            ],
+            From=att,
+            Where=(att.DROPBOX_ID == self._dropboxID).And(
+                   att.PATH == self._name)
+        ).on(self._txn))
+
         if not rows:
             returnValue(None)
-        self._ownerHomeID = rows[0][0]
-        self._contentType = MimeType.fromString(rows[0][1])
-        self._size = rows[0][2]
-        self._md5 = rows[0][3]
-        self._created = sqltime(rows[0][4])
-        self._modified = sqltime(rows[0][5])
+
+        row_iter = iter(rows[0])
+        self._attachmentID = row_iter.next()
+        self._attachmentStatus = row_iter.next()
+        self._ownerHomeID = row_iter.next()
+        self._contentType = MimeType.fromString(row_iter.next())
+        self._size = row_iter.next()
+        self._md5 = row_iter.next()
+        self._created = sqltime(row_iter.next())
+        self._modified = sqltime(row_iter.next())
+        self._dispositionName = row_iter.next()
         returnValue(self)
 
 
+    def dropboxID(self):
+        return self._dropboxID
+
+
+    def isManaged(self):
+        return self._attachmentStatus == _ATTACHMENT_STATUS_MANAGED
+
+
     def name(self):
         return self._name
 
 
     @property
     def _path(self):
-        attachmentRoot = self._txn._store.attachmentsPath
-        # Use directory hashing scheme based on MD5 of dropboxID
-        hasheduid = hashlib.md5(self._dropboxID).hexdigest()
-        return attachmentRoot.child(hasheduid[0:2]).child(
-            hasheduid[2:4]).child(hasheduid).child(self.name())
+        attachmentRoot = self._attachmentPathRoot(self._txn, self._attachmentStatus, self._dropboxID)
+        return attachmentRoot.child(self.name())
 
 
     def properties(self):
         pass # stub
 
 
-    def store(self, contentType):
-        return AttachmentStorageTransport(self, contentType, self._justCreated)
+    def store(self, contentType, dispositionName=None):
+        return AttachmentStorageTransport(self, contentType, dispositionName, self._justCreated)
 
 
     def retrieve(self, protocol):
         return AttachmentRetrievalTransport(self._path).start(protocol)
 
-
     _removeStatement = Delete(
         From=schema.ATTACHMENT,
         Where=(schema.ATTACHMENT.DROPBOX_ID == Parameter("dropboxID")).And(
@@ -1586,4 +1750,9 @@
         return self._modified
 
 
+    # IAttachment
+    def dispositionName(self):
+        return self._dispositionName
+
+
 Calendar._objectResourceClass = CalendarObject

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/util.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/util.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/datastore/util.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -468,7 +468,7 @@
 
     contentTypes = loadMimeTypes()
 
-    def __init__(self, attachment, contentType):
+    def __init__(self, attachment, contentType, dispositionName):
         """
         Create a storage transport with a reference to an L{IAttachment} and a
         L{twext.web2.http_headers.MimeType}.
@@ -477,9 +477,10 @@
         self._clock = reactor
         self._attachment = attachment
         self._contentType = contentType
+        self._dispositionName = dispositionName
         self._producer = None
 
-        # Make sure we have some kind of contrent-type
+        # Make sure we have some kind of content-type
         if self._contentType is None:
             self._contentType = http_headers.MimeType.fromString(getType(self._attachment.name(), self.contentTypes))
 

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/icalendarstore.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/icalendarstore.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/caldav/icalendarstore.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -1,5 +1,5 @@
 # -*- test-case-name: txdav.caldav.datastore -*-
-##
+# #
 # Copyright (c) 2010-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +13,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-##
+# #
 
 """
 Calendar store interfaces
@@ -56,6 +56,13 @@
 
 
 
+class AttachmentStoreFailed(Exception):
+    """
+    Unable to store an attachment.
+    """
+
+
+
 class QuotaExceeded(Exception):
     """
     The quota for a particular user has been exceeded.
@@ -528,6 +535,63 @@
         """
 
 
+    #
+    # New managed attachment APIs that supersede dropbox
+    #
+
+    def addAttachment(rids, content_type, filename, stream):
+        """
+        Add a managed attachment to the calendar data.
+
+        @param rids: set of RECURRENCE-ID values (not adjusted for UTC or TZID offset) to add the
+            new attachment to. The server must create necessary overrides if none already exist.
+        @type rids: C{iterable}
+        @param content_type: content-type information for the attachment data.
+        @type content_type: L{MimeType}
+        @param filename: display file name to use for the attachment.
+        @type filename: C{str}
+        @param stream: stream from which attachment data can be retrieved.
+        @type stream: L{IStream}
+
+        @raise: if anything goes wrong...
+        """
+
+
+    def updateAttachment(managed_id, content_type, filename, stream):
+        """
+        Update an existing managed attachment in the calendar data.
+
+        @param managed_id: the identifier of the attachment to update.
+        @type managed_id: C{str}
+        @param content_type: content-type information for the attachment data.
+        @type content_type: L{MimeType}
+        @param filename: display file name to use for the attachment.
+        @type filename: C{str}
+        @param stream: stream from which attachment data can be retrieved.
+        @type stream: L{IStream}
+
+        @raise: if anything goes wrong...
+        """
+
+
+    def removeAttachment(rids, managed_id):
+        """
+        Remove an existing managed attachment from the calendar data.
+
+        @param rids: set of RECURRENCE-ID values (not adjusted for UTC or TZID offset) to remove the
+            attachment from. The server must create necessary overrides if none already exist.
+        @type rids: C{iterable}
+        @param managed_id: the identifier of the attachment to remove.
+        @type managed_id: C{str}
+
+        @raise: if anything goes wrong...
+        """
+
+    #
+    # The following APIs are for the older Dropbox protocol, which is now deprecated in favor of
+    # managed attachments
+    #
+
     def dropboxID():
         """
         An identifier, unique to the calendar home, that specifies a location
@@ -660,3 +724,11 @@
             that the stream is complete to its C{connectionLost} method.
         @type protocol: L{IProtocol}
         """
+
+    def dispositionName():
+        """
+        The content-disposition filename for the attachment. Note that this is not necessarily the same as
+        the path name used to store the attachment.
+
+        @rtype: C{str}
+        """

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/current.sql
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/current.sql	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/current.sql	2012-10-31 20:12:45 UTC (rev 9998)
@@ -46,7 +46,7 @@
 create table CALENDAR_HOME (
   RESOURCE_ID      integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
   OWNER_UID        varchar(255) not null unique,                                 -- implicit index
-  DATAVERSION	   integer      default 0 not null
+  DATAVERSION      integer      default 0 not null
 );
 
 ----------------------------
@@ -306,22 +306,46 @@
 -- Attachment --
 ----------------
 
+create sequence ATTACHMENT_ID_SEQ;
+
 create table ATTACHMENT (
-  CALENDAR_HOME_RESOURCE_ID   integer       not null references CALENDAR_HOME,
-  DROPBOX_ID                  varchar(255)  not null,
-  CONTENT_TYPE                varchar(255)  not null,
-  SIZE                        integer       not null,
-  MD5                         char(32)      not null,
+  ATTACHMENT_ID               integer           primary key default nextval('ATTACHMENT_ID_SEQ'), -- implicit index
+  STATUS                      integer default 0 not null,
+  CALENDAR_HOME_RESOURCE_ID   integer           not null references CALENDAR_HOME,
+  DROPBOX_ID                  varchar(255)      not null,
+  CONTENT_TYPE                varchar(255)      not null,
+  SIZE                        integer           not null,
+  MD5                         char(32)          not null,
   CREATED                     timestamp default timezone('UTC', CURRENT_TIMESTAMP),
   MODIFIED                    timestamp default timezone('UTC', CURRENT_TIMESTAMP),
-  PATH                        varchar(1024) not null,
+  PATH                        varchar(1024)     not null,
+  DISPLAYNAME                 varchar(255),
 
-  primary key(DROPBOX_ID, PATH) --implicit index
+  unique(DROPBOX_ID, PATH) --implicit index
 );
 
 create index ATTACHMENT_CALENDAR_HOME_RESOURCE_ID on
   ATTACHMENT(CALENDAR_HOME_RESOURCE_ID);
 
+-- Many-to-many relationship between attachments and calendar objects
+create table ATTACHMENT_CALENDAR_OBJECT (
+  ATTACHMENT_ID                  integer not null references ATTACHMENT on delete cascade,
+  CALENDAR_OBJECT_RESOURCE_ID    integer not null references CALENDAR_OBJECT on delete cascade,
+
+  primary key(ATTACHMENT_ID, CALENDAR_OBJECT_RESOURCE_ID) -- implicit index
+);
+
+-- Enumeration of attachment status
+
+create table ATTACHMENT_STATUS (
+  ID          integer     primary key,
+  DESCRIPTION varchar(16) not null unique
+);
+
+insert into ATTACHMENT_STATUS values (0, 'dropbox');
+insert into ATTACHMENT_STATUS values (1, 'managed');
+
+
 -----------------------
 -- Resource Property --
 -----------------------
@@ -343,7 +367,7 @@
 create table ADDRESSBOOK_HOME (
   RESOURCE_ID      integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
   OWNER_UID        varchar(255) not null unique,                                -- implicit index
-  DATAVERSION	   integer      default 0 not null
+  DATAVERSION      integer      default 0 not null
 );
 
 -------------------------------
@@ -513,6 +537,6 @@
   VALUE                         varchar(255)
 );
 
-insert into CALENDARSERVER values ('VERSION', '12');
+insert into CALENDARSERVER values ('VERSION', '13');
 insert into CALENDARSERVER values ('CALENDAR-DATAVERSION', '3');
 insert into CALENDARSERVER values ('ADDRESSBOOK-DATAVERSION', '1');

Added: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql	                        (rev 0)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/oracle-dialect/v12.sql	2012-10-31 20:12:45 UTC (rev 9998)
@@ -0,0 +1,370 @@
+create sequence RESOURCE_ID_SEQ;
+create sequence INSTANCE_ID_SEQ;
+create sequence REVISION_SEQ;
+create table NODE_INFO (
+    "HOSTNAME" nvarchar2(255),
+    "PID" integer not null,
+    "PORT" integer not null,
+    "TIME" timestamp default CURRENT_TIMESTAMP at time zone 'UTC' not null, 
+    primary key("HOSTNAME", "PORT")
+);
+
+create table CALENDAR_HOME (
+    "RESOURCE_ID" integer primary key,
+    "OWNER_UID" nvarchar2(255) unique,
+    "DATAVERSION" integer default 0 not null
+);
+
+create table CALENDAR_HOME_METADATA (
+    "RESOURCE_ID" integer primary key references CALENDAR_HOME on delete cascade,
+    "QUOTA_USED_BYTES" integer default 0 not null,
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table CALENDAR (
+    "RESOURCE_ID" integer primary key
+);
+
+create table CALENDAR_METADATA (
+    "RESOURCE_ID" integer primary key references CALENDAR on delete cascade,
+    "SUPPORTED_COMPONENTS" nvarchar2(255) default null,
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table INVITE (
+    "INVITE_UID" nvarchar2(255),
+    "NAME" nvarchar2(255),
+    "RECIPIENT_ADDRESS" nvarchar2(255),
+    "HOME_RESOURCE_ID" integer not null,
+    "RESOURCE_ID" integer not null
+);
+
+create table NOTIFICATION_HOME (
+    "RESOURCE_ID" integer primary key,
+    "OWNER_UID" nvarchar2(255) unique
+);
+
+create table NOTIFICATION (
+    "RESOURCE_ID" integer primary key,
+    "NOTIFICATION_HOME_RESOURCE_ID" integer not null references NOTIFICATION_HOME,
+    "NOTIFICATION_UID" nvarchar2(255),
+    "XML_TYPE" nvarchar2(255),
+    "XML_DATA" nclob,
+    "MD5" nchar(32),
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC', 
+    unique("NOTIFICATION_UID", "NOTIFICATION_HOME_RESOURCE_ID")
+);
+
+create table CALENDAR_BIND (
+    "CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
+    "CALENDAR_RESOURCE_ID" integer not null references CALENDAR on delete cascade,
+    "CALENDAR_RESOURCE_NAME" nvarchar2(255),
+    "BIND_MODE" integer not null,
+    "BIND_STATUS" integer not null,
+    "SEEN_BY_OWNER" integer not null,
+    "SEEN_BY_SHAREE" integer not null,
+    "MESSAGE" nclob, 
+    primary key("CALENDAR_HOME_RESOURCE_ID", "CALENDAR_RESOURCE_ID"), 
+    unique("CALENDAR_HOME_RESOURCE_ID", "CALENDAR_RESOURCE_NAME")
+);
+
+create table CALENDAR_BIND_MODE (
+    "ID" integer primary key,
+    "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('own', 0);
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('read', 1);
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('write', 2);
+insert into CALENDAR_BIND_MODE (DESCRIPTION, ID) values ('direct', 3);
+create table CALENDAR_BIND_STATUS (
+    "ID" integer primary key,
+    "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('invited', 0);
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('accepted', 1);
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('declined', 2);
+insert into CALENDAR_BIND_STATUS (DESCRIPTION, ID) values ('invalid', 3);
+create table CALENDAR_OBJECT (
+    "RESOURCE_ID" integer primary key,
+    "CALENDAR_RESOURCE_ID" integer not null references CALENDAR on delete cascade,
+    "RESOURCE_NAME" nvarchar2(255),
+    "ICALENDAR_TEXT" nclob,
+    "ICALENDAR_UID" nvarchar2(255),
+    "ICALENDAR_TYPE" nvarchar2(255),
+    "ATTACHMENTS_MODE" integer default 0 not null,
+    "DROPBOX_ID" nvarchar2(255),
+    "ORGANIZER" nvarchar2(255),
+    "ORGANIZER_OBJECT" integer references CALENDAR_OBJECT,
+    "RECURRANCE_MIN" date,
+    "RECURRANCE_MAX" date,
+    "ACCESS" integer default 0 not null,
+    "SCHEDULE_OBJECT" integer default 0,
+    "SCHEDULE_TAG" nvarchar2(36) default null,
+    "SCHEDULE_ETAGS" nclob default null,
+    "PRIVATE_COMMENTS" integer default 0 not null,
+    "MD5" nchar(32),
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC', 
+    unique("CALENDAR_RESOURCE_ID", "RESOURCE_NAME")
+);
+
+create table CALENDAR_OBJECT_ATTACHMENTS_MO (
+    "ID" integer primary key,
+    "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into CALENDAR_OBJECT_ATTACHMENTS_MO (DESCRIPTION, ID) values ('none', 0);
+insert into CALENDAR_OBJECT_ATTACHMENTS_MO (DESCRIPTION, ID) values ('read', 1);
+insert into CALENDAR_OBJECT_ATTACHMENTS_MO (DESCRIPTION, ID) values ('write', 2);
+create table CALENDAR_ACCESS_TYPE (
+    "ID" integer primary key,
+    "DESCRIPTION" nvarchar2(32) unique
+);
+
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('', 0);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('public', 1);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('private', 2);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('confidential', 3);
+insert into CALENDAR_ACCESS_TYPE (DESCRIPTION, ID) values ('restricted', 4);
+create table TIME_RANGE (
+    "INSTANCE_ID" integer primary key,
+    "CALENDAR_RESOURCE_ID" integer not null references CALENDAR on delete cascade,
+    "CALENDAR_OBJECT_RESOURCE_ID" integer not null references CALENDAR_OBJECT on delete cascade,
+    "FLOATING" integer not null,
+    "START_DATE" timestamp not null,
+    "END_DATE" timestamp not null,
+    "FBTYPE" integer not null,
+    "TRANSPARENT" integer not null
+);
+
+create table FREE_BUSY_TYPE (
+    "ID" integer primary key,
+    "DESCRIPTION" nvarchar2(16) unique
+);
+
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('unknown', 0);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('free', 1);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('busy', 2);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('busy-unavailable', 3);
+insert into FREE_BUSY_TYPE (DESCRIPTION, ID) values ('busy-tentative', 4);
+create table TRANSPARENCY (
+    "TIME_RANGE_INSTANCE_ID" integer not null references TIME_RANGE on delete cascade,
+    "USER_ID" nvarchar2(255),
+    "TRANSPARENT" integer not null
+);
+
+create table ATTACHMENT (
+    "CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
+    "DROPBOX_ID" nvarchar2(255),
+    "CONTENT_TYPE" nvarchar2(255),
+    "SIZE" integer not null,
+    "MD5" nchar(32),
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "PATH" nvarchar2(1024), 
+    primary key("DROPBOX_ID", "PATH")
+);
+
+create table RESOURCE_PROPERTY (
+    "RESOURCE_ID" integer not null,
+    "NAME" nvarchar2(255),
+    "VALUE" nclob,
+    "VIEWER_UID" nvarchar2(255), 
+    primary key("RESOURCE_ID", "NAME", "VIEWER_UID")
+);
+
+create table ADDRESSBOOK_HOME (
+    "RESOURCE_ID" integer primary key,
+    "OWNER_UID" nvarchar2(255) unique,
+    "DATAVERSION" integer default 0 not null
+);
+
+create table ADDRESSBOOK_HOME_METADATA (
+    "RESOURCE_ID" integer primary key references ADDRESSBOOK_HOME on delete cascade,
+    "QUOTA_USED_BYTES" integer default 0 not null,
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table ADDRESSBOOK (
+    "RESOURCE_ID" integer primary key
+);
+
+create table ADDRESSBOOK_METADATA (
+    "RESOURCE_ID" integer primary key references ADDRESSBOOK on delete cascade,
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC'
+);
+
+create table ADDRESSBOOK_BIND (
+    "ADDRESSBOOK_HOME_RESOURCE_ID" integer not null references ADDRESSBOOK_HOME,
+    "ADDRESSBOOK_RESOURCE_ID" integer not null references ADDRESSBOOK on delete cascade,
+    "ADDRESSBOOK_RESOURCE_NAME" nvarchar2(255),
+    "BIND_MODE" integer not null,
+    "BIND_STATUS" integer not null,
+    "SEEN_BY_OWNER" integer not null,
+    "SEEN_BY_SHAREE" integer not null,
+    "MESSAGE" nclob, 
+    primary key("ADDRESSBOOK_HOME_RESOURCE_ID", "ADDRESSBOOK_RESOURCE_ID"), 
+    unique("ADDRESSBOOK_HOME_RESOURCE_ID", "ADDRESSBOOK_RESOURCE_NAME")
+);
+
+create table ADDRESSBOOK_OBJECT (
+    "RESOURCE_ID" integer primary key,
+    "ADDRESSBOOK_RESOURCE_ID" integer not null references ADDRESSBOOK on delete cascade,
+    "RESOURCE_NAME" nvarchar2(255),
+    "VCARD_TEXT" nclob,
+    "VCARD_UID" nvarchar2(255),
+    "MD5" nchar(32),
+    "CREATED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC',
+    "MODIFIED" timestamp default CURRENT_TIMESTAMP at time zone 'UTC', 
+    unique("ADDRESSBOOK_RESOURCE_ID", "RESOURCE_NAME"), 
+    unique("ADDRESSBOOK_RESOURCE_ID", "VCARD_UID")
+);
+
+create table CALENDAR_OBJECT_REVISIONS (
+    "CALENDAR_HOME_RESOURCE_ID" integer not null references CALENDAR_HOME,
+    "CALENDAR_RESOURCE_ID" integer references CALENDAR,
+    "CALENDAR_NAME" nvarchar2(255) default null,
+    "RESOURCE_NAME" nvarchar2(255),
+    "REVISION" integer not null,
+    "DELETED" integer not null
+);
+
+create table ADDRESSBOOK_OBJECT_REVISIONS (
+    "ADDRESSBOOK_HOME_RESOURCE_ID" integer not null references ADDRESSBOOK_HOME,
+    "ADDRESSBOOK_RESOURCE_ID" integer references ADDRESSBOOK,
+    "ADDRESSBOOK_NAME" nvarchar2(255) default null,
+    "RESOURCE_NAME" nvarchar2(255),
+    "REVISION" integer not null,
+    "DELETED" integer not null
+);
+
+create table NOTIFICATION_OBJECT_REVISIONS (
+    "NOTIFICATION_HOME_RESOURCE_ID" integer not null references NOTIFICATION_HOME on delete cascade,
+    "RESOURCE_NAME" nvarchar2(255),
+    "REVISION" integer not null,
+    "DELETED" integer not null, 
+    unique("NOTIFICATION_HOME_RESOURCE_ID", "RESOURCE_NAME")
+);
+
+create table APN_SUBSCRIPTIONS (
+    "TOKEN" nvarchar2(255),
+    "RESOURCE_KEY" nvarchar2(255),
+    "MODIFIED" integer not null,
+    "SUBSCRIBER_GUID" nvarchar2(255),
+    "USER_AGENT" nvarchar2(255) default null,
+    "IP_ADDR" nvarchar2(255) default null, 
+    primary key("TOKEN", "RESOURCE_KEY")
+);
+
+create table CALENDARSERVER (
+    "NAME" nvarchar2(255) primary key,
+    "VALUE" nvarchar2(255)
+);
+
+insert into CALENDARSERVER (NAME, VALUE) values ('VERSION', '12');
+insert into CALENDARSERVER (NAME, VALUE) values ('CALENDAR-DATAVERSION', '3');
+insert into CALENDARSERVER (NAME, VALUE) values ('ADDRESSBOOK-DATAVERSION', '1');
+create index INVITE_INVITE_UID_9b0902ff on INVITE (
+    INVITE_UID
+);
+
+create index INVITE_RESOURCE_ID_b36ddc23 on INVITE (
+    RESOURCE_ID
+);
+
+create index INVITE_HOME_RESOURCE__e9bdf77e on INVITE (
+    HOME_RESOURCE_ID
+);
+
+create index NOTIFICATION_NOTIFICA_f891f5f9 on NOTIFICATION (
+    NOTIFICATION_HOME_RESOURCE_ID
+);
+
+create index CALENDAR_BIND_RESOURC_e57964d4 on CALENDAR_BIND (
+    CALENDAR_RESOURCE_ID
+);
+
+create index CALENDAR_OBJECT_CALEN_a9a453a9 on CALENDAR_OBJECT (
+    CALENDAR_RESOURCE_ID,
+    ICALENDAR_UID
+);
+
+create index CALENDAR_OBJECT_CALEN_96e83b73 on CALENDAR_OBJECT (
+    CALENDAR_RESOURCE_ID,
+    RECURRANCE_MAX
+);
+
+create index CALENDAR_OBJECT_ORGAN_7ce24750 on CALENDAR_OBJECT (
+    ORGANIZER_OBJECT
+);
+
+create index CALENDAR_OBJECT_DROPB_de041d80 on CALENDAR_OBJECT (
+    DROPBOX_ID
+);
+
+create index TIME_RANGE_CALENDAR_R_beb6e7eb on TIME_RANGE (
+    CALENDAR_RESOURCE_ID
+);
+
+create index TIME_RANGE_CALENDAR_O_acf37bd1 on TIME_RANGE (
+    CALENDAR_OBJECT_RESOURCE_ID
+);
+
+create index TRANSPARENCY_TIME_RAN_5f34467f on TRANSPARENCY (
+    TIME_RANGE_INSTANCE_ID
+);
+
+create index ATTACHMENT_CALENDAR_H_0078845c on ATTACHMENT (
+    CALENDAR_HOME_RESOURCE_ID
+);
+
+create index ADDRESSBOOK_BIND_RESO_205aa75c on ADDRESSBOOK_BIND (
+    ADDRESSBOOK_RESOURCE_ID
+);
+
+create index CALENDAR_OBJECT_REVIS_3a3956c4 on CALENDAR_OBJECT_REVISIONS (
+    CALENDAR_HOME_RESOURCE_ID,
+    CALENDAR_RESOURCE_ID
+);
+
+create index CALENDAR_OBJECT_REVIS_2643d556 on CALENDAR_OBJECT_REVISIONS (
+    CALENDAR_RESOURCE_ID,
+    RESOURCE_NAME
+);
+
+create index CALENDAR_OBJECT_REVIS_265c8acf on CALENDAR_OBJECT_REVISIONS (
+    CALENDAR_RESOURCE_ID,
+    REVISION
+);
+
+create index ADDRESSBOOK_OBJECT_RE_f460d62d on ADDRESSBOOK_OBJECT_REVISIONS (
+    ADDRESSBOOK_HOME_RESOURCE_ID,
+    ADDRESSBOOK_RESOURCE_ID
+);
+
+create index ADDRESSBOOK_OBJECT_RE_9a848f39 on ADDRESSBOOK_OBJECT_REVISIONS (
+    ADDRESSBOOK_RESOURCE_ID,
+    RESOURCE_NAME
+);
+
+create index ADDRESSBOOK_OBJECT_RE_cb101e6b on ADDRESSBOOK_OBJECT_REVISIONS (
+    ADDRESSBOOK_RESOURCE_ID,
+    REVISION
+);
+
+create index NOTIFICATION_OBJECT_R_036a9cee on NOTIFICATION_OBJECT_REVISIONS (
+    NOTIFICATION_HOME_RESOURCE_ID,
+    REVISION
+);
+
+create index APN_SUBSCRIPTIONS_RES_9610d78e on APN_SUBSCRIPTIONS (
+    RESOURCE_KEY
+);
+

Added: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql	                        (rev 0)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/old/postgres-dialect/v12.sql	2012-10-31 20:12:45 UTC (rev 9998)
@@ -0,0 +1,518 @@
+-- -*- test-case-name: txdav.caldav.datastore.test.test_sql,txdav.carddav.datastore.test.test_sql -*-
+
+----
+-- Copyright (c) 2010-2012 Apple Inc. All rights reserved.
+--
+-- Licensed under the Apache License, Version 2.0 (the "License");
+-- you may not use this file except in compliance with the License.
+-- You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+----
+
+-----------------
+-- Resource ID --
+-----------------
+
+create sequence RESOURCE_ID_SEQ;
+
+-------------------------
+-- Cluster Bookkeeping --
+-------------------------
+
+-- Information about a process connected to this database.
+
+-- Note that this must match the node info schema in twext.enterprise.queue.
+create table NODE_INFO (
+  HOSTNAME  varchar(255) not null,
+  PID       integer not null,
+  PORT      integer not null,
+  TIME      timestamp not null default timezone('UTC', CURRENT_TIMESTAMP),
+
+  primary key(HOSTNAME, PORT)
+);
+
+
+-------------------
+-- Calendar Home --
+-------------------
+
+create table CALENDAR_HOME (
+  RESOURCE_ID      integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+  OWNER_UID        varchar(255) not null unique,                                 -- implicit index
+  DATAVERSION	   integer      default 0 not null
+);
+
+----------------------------
+-- Calendar Home Metadata --
+----------------------------
+
+create table CALENDAR_HOME_METADATA (
+  RESOURCE_ID      integer      primary key references CALENDAR_HOME on delete cascade, -- implicit index
+  QUOTA_USED_BYTES integer      default 0 not null,
+  CREATED          timestamp    default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED         timestamp    default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+--------------
+-- Calendar --
+--------------
+
+create table CALENDAR (
+  RESOURCE_ID integer   primary key default nextval('RESOURCE_ID_SEQ') -- implicit index
+);
+
+
+-----------------------
+-- Calendar Metadata --
+-----------------------
+
+create table CALENDAR_METADATA (
+  RESOURCE_ID           integer   primary key references CALENDAR on delete cascade, -- implicit index
+  SUPPORTED_COMPONENTS  varchar(255) default null,
+  CREATED               timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED              timestamp default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+
+------------------------
+-- Sharing Invitation --
+------------------------
+
+create table INVITE (
+    INVITE_UID         varchar(255) not null,
+    NAME               varchar(255) not null,
+    RECIPIENT_ADDRESS  varchar(255) not null,
+    HOME_RESOURCE_ID   integer      not null,
+    RESOURCE_ID        integer      not null
+
+    -- Need primary key on (INVITE_UID, NAME, RECIPIENT_ADDRESS)?
+);
+
+create index INVITE_INVITE_UID on INVITE(INVITE_UID);
+create index INVITE_RESOURCE_ID on INVITE(RESOURCE_ID);
+create index INVITE_HOME_RESOURCE_ID on INVITE(HOME_RESOURCE_ID);
+
+---------------------------
+-- Sharing Notifications --
+---------------------------
+
+create table NOTIFICATION_HOME (
+  RESOURCE_ID integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+  OWNER_UID   varchar(255) not null unique                                 -- implicit index
+);
+
+create table NOTIFICATION (
+  RESOURCE_ID                   integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+  NOTIFICATION_HOME_RESOURCE_ID integer      not null references NOTIFICATION_HOME,
+  NOTIFICATION_UID              varchar(255) not null,
+  XML_TYPE                      varchar(255) not null,
+  XML_DATA                      text         not null,
+  MD5                           char(32)     not null,
+  CREATED                       timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED                      timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+
+  unique(NOTIFICATION_UID, NOTIFICATION_HOME_RESOURCE_ID) -- implicit index
+);
+
+create index NOTIFICATION_NOTIFICATION_HOME_RESOURCE_ID on
+  NOTIFICATION(NOTIFICATION_HOME_RESOURCE_ID);
+
+-------------------
+-- Calendar Bind --
+-------------------
+
+-- Joins CALENDAR_HOME and CALENDAR
+
+create table CALENDAR_BIND (
+  CALENDAR_HOME_RESOURCE_ID integer      not null references CALENDAR_HOME,
+  CALENDAR_RESOURCE_ID      integer      not null references CALENDAR on delete cascade,
+
+  -- An invitation which hasn't been accepted yet will not yet have a resource
+  -- name, so this field may be null.
+
+  CALENDAR_RESOURCE_NAME    varchar(255),
+  BIND_MODE                 integer      not null, -- enum CALENDAR_BIND_MODE
+  BIND_STATUS               integer      not null, -- enum CALENDAR_BIND_STATUS
+  SEEN_BY_OWNER             boolean      not null,
+  SEEN_BY_SHAREE            boolean      not null,
+  MESSAGE                   text,
+
+  primary key(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_ID), -- implicit index
+  unique(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_NAME)     -- implicit index
+);
+
+create index CALENDAR_BIND_RESOURCE_ID on CALENDAR_BIND(CALENDAR_RESOURCE_ID);
+
+-- Enumeration of calendar bind modes
+
+create table CALENDAR_BIND_MODE (
+  ID          integer     primary key,
+  DESCRIPTION varchar(16) not null unique
+);
+
+insert into CALENDAR_BIND_MODE values (0, 'own'  );
+insert into CALENDAR_BIND_MODE values (1, 'read' );
+insert into CALENDAR_BIND_MODE values (2, 'write');
+insert into CALENDAR_BIND_MODE values (3, 'direct');
+
+-- Enumeration of statuses
+
+create table CALENDAR_BIND_STATUS (
+  ID          integer     primary key,
+  DESCRIPTION varchar(16) not null unique
+);
+
+insert into CALENDAR_BIND_STATUS values (0, 'invited' );
+insert into CALENDAR_BIND_STATUS values (1, 'accepted');
+insert into CALENDAR_BIND_STATUS values (2, 'declined');
+insert into CALENDAR_BIND_STATUS values (3, 'invalid');
+
+
+---------------------
+-- Calendar Object --
+---------------------
+
+create table CALENDAR_OBJECT (
+  RESOURCE_ID          integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+  CALENDAR_RESOURCE_ID integer      not null references CALENDAR on delete cascade,
+  RESOURCE_NAME        varchar(255) not null,
+  ICALENDAR_TEXT       text         not null,
+  ICALENDAR_UID        varchar(255) not null,
+  ICALENDAR_TYPE       varchar(255) not null,
+  ATTACHMENTS_MODE     integer      default 0 not null, -- enum CALENDAR_OBJECT_ATTACHMENTS_MODE
+  DROPBOX_ID           varchar(255),
+  ORGANIZER            varchar(255),
+  ORGANIZER_OBJECT     integer      references CALENDAR_OBJECT,
+  RECURRANCE_MIN       date,        -- minimum date that recurrences have been expanded to.
+  RECURRANCE_MAX       date,        -- maximum date that recurrences have been expanded to.
+  ACCESS               integer      default 0 not null,
+  SCHEDULE_OBJECT      boolean      default false,
+  SCHEDULE_TAG         varchar(36)  default null,
+  SCHEDULE_ETAGS       text         default null,
+  PRIVATE_COMMENTS     boolean      default false not null,
+  MD5                  char(32)     not null,
+  CREATED              timestamp    default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED             timestamp    default timezone('UTC', CURRENT_TIMESTAMP),
+
+  unique(CALENDAR_RESOURCE_ID, RESOURCE_NAME) -- implicit index
+
+  -- since the 'inbox' is a 'calendar resource' for the purpose of storing
+  -- calendar objects, this constraint has to be selectively enforced by the
+  -- application layer.
+
+  -- unique(CALENDAR_RESOURCE_ID, ICALENDAR_UID)
+);
+
+create index CALENDAR_OBJECT_CALENDAR_RESOURCE_ID_AND_ICALENDAR_UID on
+  CALENDAR_OBJECT(CALENDAR_RESOURCE_ID, ICALENDAR_UID);
+
+create index CALENDAR_OBJECT_CALENDAR_RESOURCE_ID_RECURRANCE_MAX on
+  CALENDAR_OBJECT(CALENDAR_RESOURCE_ID, RECURRANCE_MAX);
+
+create index CALENDAR_OBJECT_ORGANIZER_OBJECT on
+  CALENDAR_OBJECT(ORGANIZER_OBJECT);
+
+create index CALENDAR_OBJECT_DROPBOX_ID on
+  CALENDAR_OBJECT(DROPBOX_ID);
+
+-- Enumeration of attachment modes
+
+create table CALENDAR_OBJECT_ATTACHMENTS_MODE (
+  ID          integer     primary key,
+  DESCRIPTION varchar(16) not null unique
+);
+
+insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (0, 'none' );
+insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (1, 'read' );
+insert into CALENDAR_OBJECT_ATTACHMENTS_MODE values (2, 'write');
+
+
+-- Enumeration of calendar access types
+
+create table CALENDAR_ACCESS_TYPE (
+  ID          integer     primary key,
+  DESCRIPTION varchar(32) not null unique
+);
+
+insert into CALENDAR_ACCESS_TYPE values (0, ''             );
+insert into CALENDAR_ACCESS_TYPE values (1, 'public'       );
+insert into CALENDAR_ACCESS_TYPE values (2, 'private'      );
+insert into CALENDAR_ACCESS_TYPE values (3, 'confidential' );
+insert into CALENDAR_ACCESS_TYPE values (4, 'restricted'   );
+
+-----------------
+-- Instance ID --
+-----------------
+
+create sequence INSTANCE_ID_SEQ;
+
+
+----------------
+-- Time Range --
+----------------
+
+create table TIME_RANGE (
+  INSTANCE_ID                 integer        primary key default nextval('INSTANCE_ID_SEQ'), -- implicit index
+  CALENDAR_RESOURCE_ID        integer        not null references CALENDAR on delete cascade,
+  CALENDAR_OBJECT_RESOURCE_ID integer        not null references CALENDAR_OBJECT on delete cascade,
+  FLOATING                    boolean        not null,
+  START_DATE                  timestamp      not null,
+  END_DATE                    timestamp      not null,
+  FBTYPE                      integer        not null,
+  TRANSPARENT                 boolean        not null
+);
+
+create index TIME_RANGE_CALENDAR_RESOURCE_ID on
+  TIME_RANGE(CALENDAR_RESOURCE_ID);
+create index TIME_RANGE_CALENDAR_OBJECT_RESOURCE_ID on
+  TIME_RANGE(CALENDAR_OBJECT_RESOURCE_ID);
+
+
+-- Enumeration of free/busy types
+
+create table FREE_BUSY_TYPE (
+  ID          integer     primary key,
+  DESCRIPTION varchar(16) not null unique
+);
+
+insert into FREE_BUSY_TYPE values (0, 'unknown'         );
+insert into FREE_BUSY_TYPE values (1, 'free'            );
+insert into FREE_BUSY_TYPE values (2, 'busy'            );
+insert into FREE_BUSY_TYPE values (3, 'busy-unavailable');
+insert into FREE_BUSY_TYPE values (4, 'busy-tentative'  );
+
+
+------------------
+-- Transparency --
+------------------
+
+create table TRANSPARENCY (
+  TIME_RANGE_INSTANCE_ID      integer      not null references TIME_RANGE on delete cascade,
+  USER_ID                     varchar(255) not null,
+  TRANSPARENT                 boolean      not null
+);
+
+create index TRANSPARENCY_TIME_RANGE_INSTANCE_ID on
+  TRANSPARENCY(TIME_RANGE_INSTANCE_ID);
+
+----------------
+-- Attachment --
+----------------
+
+create table ATTACHMENT (
+  CALENDAR_HOME_RESOURCE_ID   integer       not null references CALENDAR_HOME,
+  DROPBOX_ID                  varchar(255)  not null,
+  CONTENT_TYPE                varchar(255)  not null,
+  SIZE                        integer       not null,
+  MD5                         char(32)      not null,
+  CREATED                     timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED                    timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+  PATH                        varchar(1024) not null,
+
+  primary key(DROPBOX_ID, PATH) --implicit index
+);
+
+create index ATTACHMENT_CALENDAR_HOME_RESOURCE_ID on
+  ATTACHMENT(CALENDAR_HOME_RESOURCE_ID);
+
+-----------------------
+-- Resource Property --
+-----------------------
+
+create table RESOURCE_PROPERTY (
+  RESOURCE_ID integer      not null, -- foreign key: *.RESOURCE_ID
+  NAME        varchar(255) not null,
+  VALUE       text         not null, -- FIXME: xml?
+  VIEWER_UID  varchar(255),
+
+  primary key(RESOURCE_ID, NAME, VIEWER_UID) -- implicit index
+);
+
+
+----------------------
+-- AddressBook Home --
+----------------------
+
+create table ADDRESSBOOK_HOME (
+  RESOURCE_ID      integer      primary key default nextval('RESOURCE_ID_SEQ'), -- implicit index
+  OWNER_UID        varchar(255) not null unique,                                -- implicit index
+  DATAVERSION	   integer      default 0 not null
+);
+
+-------------------------------
+-- AddressBook Home Metadata --
+-------------------------------
+
+create table ADDRESSBOOK_HOME_METADATA (
+  RESOURCE_ID      integer      primary key references ADDRESSBOOK_HOME on delete cascade, -- implicit index
+  QUOTA_USED_BYTES integer      default 0 not null,
+  CREATED          timestamp    default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED         timestamp    default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+-----------------
+-- AddressBook --
+-----------------
+
+create table ADDRESSBOOK (
+  RESOURCE_ID integer   primary key default nextval('RESOURCE_ID_SEQ') -- implicit index
+);
+
+
+--------------------------
+-- AddressBook Metadata --
+--------------------------
+
+create table ADDRESSBOOK_METADATA (
+  RESOURCE_ID integer   primary key references ADDRESSBOOK on delete cascade, -- implicit index
+  CREATED     timestamp default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED    timestamp default timezone('UTC', CURRENT_TIMESTAMP)
+);
+
+
+----------------------
+-- AddressBook Bind --
+----------------------
+
+-- Joins ADDRESSBOOK_HOME and ADDRESSBOOK
+
+create table ADDRESSBOOK_BIND (
+  ADDRESSBOOK_HOME_RESOURCE_ID integer      not null references ADDRESSBOOK_HOME,
+  ADDRESSBOOK_RESOURCE_ID      integer      not null references ADDRESSBOOK on delete cascade,
+
+  -- An invitation which hasn't been accepted yet will not yet have a resource
+  -- name, so this field may be null.
+
+  ADDRESSBOOK_RESOURCE_NAME    varchar(255),
+  BIND_MODE                    integer      not null, -- enum CALENDAR_BIND_MODE
+  BIND_STATUS                  integer      not null, -- enum CALENDAR_BIND_STATUS
+  SEEN_BY_OWNER                boolean      not null,
+  SEEN_BY_SHAREE               boolean      not null,
+  MESSAGE                      text,                  -- FIXME: xml?
+
+  primary key(ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_ID), -- implicit index
+  unique(ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_NAME)     -- implicit index
+);
+
+create index ADDRESSBOOK_BIND_RESOURCE_ID on
+  ADDRESSBOOK_BIND(ADDRESSBOOK_RESOURCE_ID);
+
+create table ADDRESSBOOK_OBJECT (
+  RESOURCE_ID             integer      primary key default nextval('RESOURCE_ID_SEQ'),    -- implicit index
+  ADDRESSBOOK_RESOURCE_ID integer      not null references ADDRESSBOOK on delete cascade,
+  RESOURCE_NAME           varchar(255) not null,
+  VCARD_TEXT              text         not null,
+  VCARD_UID               varchar(255) not null,
+  MD5                     char(32)     not null,
+  CREATED                 timestamp    default timezone('UTC', CURRENT_TIMESTAMP),
+  MODIFIED                timestamp    default timezone('UTC', CURRENT_TIMESTAMP),
+
+  unique(ADDRESSBOOK_RESOURCE_ID, RESOURCE_NAME), -- implicit index
+  unique(ADDRESSBOOK_RESOURCE_ID, VCARD_UID)      -- implicit index
+);
+
+---------------
+-- Revisions --
+---------------
+
+create sequence REVISION_SEQ;
+
+
+---------------
+-- Revisions --
+---------------
+
+create table CALENDAR_OBJECT_REVISIONS (
+  CALENDAR_HOME_RESOURCE_ID integer      not null references CALENDAR_HOME,
+  CALENDAR_RESOURCE_ID      integer      references CALENDAR,
+  CALENDAR_NAME             varchar(255) default null,
+  RESOURCE_NAME             varchar(255),
+  REVISION                  integer      default nextval('REVISION_SEQ') not null,
+  DELETED                   boolean      not null
+);
+
+create index CALENDAR_OBJECT_REVISIONS_HOME_RESOURCE_ID_CALENDAR_RESOURCE_ID
+  on CALENDAR_OBJECT_REVISIONS(CALENDAR_HOME_RESOURCE_ID, CALENDAR_RESOURCE_ID);
+
+create index CALENDAR_OBJECT_REVISIONS_RESOURCE_ID_RESOURCE_NAME
+  on CALENDAR_OBJECT_REVISIONS(CALENDAR_RESOURCE_ID, RESOURCE_NAME);
+
+create index CALENDAR_OBJECT_REVISIONS_RESOURCE_ID_REVISION
+  on CALENDAR_OBJECT_REVISIONS(CALENDAR_RESOURCE_ID, REVISION);
+
+-------------------------------
+-- AddressBook Object Revisions --
+-------------------------------
+
+create table ADDRESSBOOK_OBJECT_REVISIONS (
+  ADDRESSBOOK_HOME_RESOURCE_ID integer      not null references ADDRESSBOOK_HOME,
+  ADDRESSBOOK_RESOURCE_ID      integer      references ADDRESSBOOK,
+  ADDRESSBOOK_NAME             varchar(255) default null,
+  RESOURCE_NAME                varchar(255),
+  REVISION                     integer      default nextval('REVISION_SEQ') not null,
+  DELETED                      boolean      not null
+);
+
+create index ADDRESSBOOK_OBJECT_REVISIONS_HOME_RESOURCE_ID_ADDRESSBOOK_RESOURCE_ID
+  on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_HOME_RESOURCE_ID, ADDRESSBOOK_RESOURCE_ID);
+
+create index ADDRESSBOOK_OBJECT_REVISIONS_RESOURCE_ID_RESOURCE_NAME
+  on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_RESOURCE_ID, RESOURCE_NAME);
+
+create index ADDRESSBOOK_OBJECT_REVISIONS_RESOURCE_ID_REVISION
+  on ADDRESSBOOK_OBJECT_REVISIONS(ADDRESSBOOK_RESOURCE_ID, REVISION);
+
+-----------------------------------
+-- Notification Object Revisions --
+-----------------------------------
+
+create table NOTIFICATION_OBJECT_REVISIONS (
+  NOTIFICATION_HOME_RESOURCE_ID integer      not null references NOTIFICATION_HOME on delete cascade,
+  RESOURCE_NAME                 varchar(255),
+  REVISION                      integer      default nextval('REVISION_SEQ') not null,
+  DELETED                       boolean      not null,
+
+  unique(NOTIFICATION_HOME_RESOURCE_ID, RESOURCE_NAME) -- implicit index
+);
+
+create index NOTIFICATION_OBJECT_REVISIONS_RESOURCE_ID_REVISION
+  on NOTIFICATION_OBJECT_REVISIONS(NOTIFICATION_HOME_RESOURCE_ID, REVISION);
+
+-------------------------------------------
+-- Apple Push Notification Subscriptions --
+-------------------------------------------
+
+create table APN_SUBSCRIPTIONS (
+  TOKEN                         varchar(255) not null,
+  RESOURCE_KEY                  varchar(255) not null,
+  MODIFIED                      integer not null,
+  SUBSCRIBER_GUID               varchar(255) not null,
+  USER_AGENT                    varchar(255) default null,
+  IP_ADDR                       varchar(255) default null,
+
+  primary key(TOKEN, RESOURCE_KEY) -- implicit index
+);
+
+create index APN_SUBSCRIPTIONS_RESOURCE_KEY
+   on APN_SUBSCRIPTIONS(RESOURCE_KEY);
+
+
+--------------------
+-- Schema Version --
+--------------------
+
+create table CALENDARSERVER (
+  NAME                          varchar(255) primary key, -- implicit index
+  VALUE                         varchar(255)
+);
+
+insert into CALENDARSERVER values ('VERSION', '12');
+insert into CALENDARSERVER values ('CALENDAR-DATAVERSION', '3');
+insert into CALENDARSERVER values ('ADDRESSBOOK-DATAVERSION', '1');

Added: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql	                        (rev 0)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_schema/upgrades/postgres-dialect/upgrade_from_12_to_13.sql	2012-10-31 20:12:45 UTC (rev 9998)
@@ -0,0 +1,52 @@
+----
+-- Copyright (c) 2012 Apple Inc. All rights reserved.
+--
+-- Licensed under the Apache License, Version 2.0 (the "License");
+-- you may not use this file except in compliance with the License.
+-- You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+----
+
+---------------------------------------------------
+-- Upgrade database schema from VERSION 12 to 13 --
+---------------------------------------------------
+
+
+-- Attachment related updates
+
+create sequence ATTACHMENT_ID_SEQ;
+
+
+alter table ATTACHMENT
+ drop constraint ATTACHMENT_PKEY,
+ add column ATTACHMENT_ID integer primary key default nextval('ATTACHMENT_ID_SEQ'),
+ add column DISPLAYNAME varchar(255),
+ add column STATUS integer default 0 not null,
+ add unique(DROPBOX_ID, PATH);
+
+create table ATTACHMENT_CALENDAR_OBJECT (
+  ATTACHMENT_ID                  integer not null references ATTACHMENT on delete cascade,
+  CALENDAR_OBJECT_RESOURCE_ID    integer not null references CALENDAR_OBJECT on delete cascade,
+
+  primary key(ATTACHMENT_ID, CALENDAR_OBJECT_RESOURCE_ID) -- implicit index
+);
+
+create table ATTACHMENT_STATUS (
+  ID          integer     primary key,
+  DESCRIPTION varchar(16) not null unique
+);
+
+insert into ATTACHMENT_STATUS values (0, 'dropbox');
+insert into ATTACHMENT_STATUS values (1, 'managed');
+
+
+-- Now update the version
+-- No data upgrades
+update CALENDARSERVER set VALUE = '13' where NAME = 'VERSION';

Modified: CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_tables.py
===================================================================
--- CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_tables.py	2012-10-31 20:11:19 UTC (rev 9997)
+++ CalendarServer/branches/users/cdaboo/managed-attachments/txdav/common/datastore/sql_tables.py	2012-10-31 20:12:45 UTC (rev 9998)
@@ -1,5 +1,5 @@
 # -*- test-case-name: txdav.common.datastore.test.test_sql_tables -*-
-##
+# #
 # Copyright (c) 2010-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,7 +13,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-##
+# #
 
 """
 SQL Table definitions.
@@ -36,7 +36,7 @@
     """
     Generate the global L{SchemaSyntax}.
     """
-    
+
     if version is None:
         pathObj = getModule(__name__).filePath.sibling("sql_schema").child("current.sql")
     else:
@@ -102,7 +102,7 @@
     result = {}
     for tableRole, tableDictionary in kw.items():
         result.update([("%s:%s" % (tableRole, k), v)
-                       for k,v in tableDictionary.items()])
+                       for k, v in tableDictionary.items()])
     return result
 
 
@@ -114,8 +114,8 @@
     """
     result = {}
     result['name'] = tableSyntax.model.name
-    #pkey = tableSyntax.model.primaryKey
-    #if pkey is not None:
+    # pkey = tableSyntax.model.primaryKey
+    # if pkey is not None:
     #    default = pkey.default
     #    if isinstance(default, Sequence):
     #        result['sequence'] = default.name
@@ -147,18 +147,18 @@
     schema.CALENDAR_BIND_STATUS.ID
 )
 
-_BIND_STATUS_INVITED  = _bindStatus('invited')
+_BIND_STATUS_INVITED = _bindStatus('invited')
 _BIND_STATUS_ACCEPTED = _bindStatus('accepted')
 _BIND_STATUS_DECLINED = _bindStatus('declined')
-_BIND_STATUS_INVALID  = _bindStatus('invalid')
+_BIND_STATUS_INVALID = _bindStatus('invalid')
 
 _attachmentsMode = _schemaConstants(
     schema.CALENDAR_OBJECT_ATTACHMENTS_MODE.DESCRIPTION,
     schema.CALENDAR_OBJECT_ATTACHMENTS_MODE.ID
 )
 
-_ATTACHMENTS_MODE_NONE  = _attachmentsMode('none')
-_ATTACHMENTS_MODE_READ  = _attachmentsMode('read')
+_ATTACHMENTS_MODE_NONE = _attachmentsMode('none')
+_ATTACHMENTS_MODE_READ = _attachmentsMode('read')
 _ATTACHMENTS_MODE_WRITE = _attachmentsMode('write')
 
 
@@ -174,21 +174,31 @@
 _BIND_MODE_DIRECT = _bindMode('direct')
 
 
+_attachmentStatus = _schemaConstants(
+    schema.ATTACHMENT_STATUS.DESCRIPTION,
+    schema.ATTACHMENT_STATUS.ID
+)
+
+
+_ATTACHMENT_STATUS_DROPBOX = _attachmentStatus('dropbox')
+_ATTACHMENT_STATUS_MANAGED = _attachmentStatus('managed')
+
+
 # Compatibility tables for string formatting:
-CALENDAR_HOME_TABLE                 = _S(schema.CALENDAR_HOME)
-CALENDAR_HOME_METADATA_TABLE        = _S(schema.CALENDAR_HOME_METADATA)
-ADDRESSBOOK_HOME_TABLE              = _S(schema.ADDRESSBOOK_HOME)
-ADDRESSBOOK_HOME_METADATA_TABLE     = _S(schema.ADDRESSBOOK_HOME_METADATA)
-NOTIFICATION_HOME_TABLE             = _S(schema.NOTIFICATION_HOME)
-CALENDAR_TABLE                      = _S(schema.CALENDAR)
-ADDRESSBOOK_TABLE                   = _S(schema.ADDRESSBOOK)
-CALENDAR_BIND_TABLE                 = _S(schema.CALENDAR_BIND)
-ADDRESSBOOK_BIND_TABLE              = _S(schema.ADDRESSBOOK_BIND)
-CALENDAR_OBJECT_REVISIONS_TABLE     = _S(schema.CALENDAR_OBJECT_REVISIONS)
-ADDRESSBOOK_OBJECT_REVISIONS_TABLE  = _S(schema.ADDRESSBOOK_OBJECT_REVISIONS)
+CALENDAR_HOME_TABLE = _S(schema.CALENDAR_HOME)
+CALENDAR_HOME_METADATA_TABLE = _S(schema.CALENDAR_HOME_METADATA)
+ADDRESSBOOK_HOME_TABLE = _S(schema.ADDRESSBOOK_HOME)
+ADDRESSBOOK_HOME_METADATA_TABLE = _S(schema.ADDRESSBOOK_HOME_METADATA)
+NOTIFICATION_HOME_TABLE = _S(schema.NOTIFICATION_HOME)
+CALENDAR_TABLE = _S(schema.CALENDAR)
+ADDRESSBOOK_TABLE = _S(schema.ADDRESSBOOK)
+CALENDAR_BIND_TABLE = _S(schema.CALENDAR_BIND)
+ADDRESSBOOK_BIND_TABLE = _S(schema.ADDRESSBOOK_BIND)
+CALENDAR_OBJECT_REVISIONS_TABLE = _S(schema.CALENDAR_OBJECT_REVISIONS)
+ADDRESSBOOK_OBJECT_REVISIONS_TABLE = _S(schema.ADDRESSBOOK_OBJECT_REVISIONS)
 NOTIFICATION_OBJECT_REVISIONS_TABLE = _S(schema.NOTIFICATION_OBJECT_REVISIONS)
-CALENDAR_OBJECT_TABLE               = _S(schema.CALENDAR_OBJECT)
-ADDRESSBOOK_OBJECT_TABLE            = _S(schema.ADDRESSBOOK_OBJECT)
+CALENDAR_OBJECT_TABLE = _S(schema.CALENDAR_OBJECT)
+ADDRESSBOOK_OBJECT_TABLE = _S(schema.ADDRESSBOOK_OBJECT)
 
 # Some combined tables used in join-string-formatting.
 CALENDAR_AND_CALENDAR_BIND = _combine(CHILD=CALENDAR_TABLE,
@@ -322,12 +332,12 @@
                         elif default is False:
                             default = 0
                         out.write(" " + repr(default))
-            if ( (not column.model.canBeNull())
+            if ((not column.model.canBeNull())
                  # Oracle treats empty strings as NULLs, so we have to accept
                  # NULL values in columns of a string type.  Other types should
                  # be okay though.
                  and typeName not in ('varchar', 'nclob', 'char', 'nchar',
-                                      'nvarchar', 'nvarchar2') ):
+                                      'nvarchar', 'nvarchar2')):
                 out.write(' not null')
             if [column.model] in list(table.model.uniques()):
                 out.write(' unique')
@@ -387,11 +397,8 @@
 if __name__ == '__main__':
     import sys
     if len(sys.argv) == 2:
-        # Argument is the name of a old/postgres-dialect file (without the .sql suffix), e.g. "v4" 
+        # Argument is the name of a old/postgres-dialect file (without the .sql suffix), e.g. "v4"
         schema = _populateSchema(sys.argv[1])
     else:
         schema = _populateSchema()
     _translateSchema(sys.stdout, schema=schema)
-
-
-
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20121031/c1ce6031/attachment-0001.html>


More information about the calendarserver-changes mailing list