[CalendarServer-changes] [9458] CalendarServer/branches/users/gaya/inviteclean

source_changes at macosforge.org source_changes at macosforge.org
Mon Jul 16 17:20:25 PDT 2012


Revision: 9458
          http://trac.macosforge.org/projects/calendarserver/changeset/9458
Author:   gaya at apple.com
Date:     2012-07-16 17:20:24 -0700 (Mon, 16 Jul 2012)
Log Message:
-----------
update to trunk, so that free-busy works

Modified Paths:
--------------
    CalendarServer/branches/users/gaya/inviteclean/calendarserver/accesslog.py
    CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/calverify.py
    CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/test/test_calverify.py
    CalendarServer/branches/users/gaya/inviteclean/conf/caldavd-test.plist
    CalendarServer/branches/users/gaya/inviteclean/contrib/migration/calendarmigrator.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/migration/test/test_migrator.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.dist.plist
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.plist
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/ical.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/population.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/profiles.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/stats.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/performance/test_stats.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/tools/protocolanalysis.py
    CalendarServer/branches/users/gaya/inviteclean/contrib/tools/request_monitor.py
    CalendarServer/branches/users/gaya/inviteclean/twext/web2/log.py
    CalendarServer/branches/users/gaya/inviteclean/twext/web2/server.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/ldapdirectory.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/principal.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/test/test_ldapdirectory.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/ical.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/method/report_common.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/ischedule.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/scheduler.py
    CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/stdconfig.py

Modified: CalendarServer/branches/users/gaya/inviteclean/calendarserver/accesslog.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/calendarserver/accesslog.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/calendarserver/accesslog.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -1,5 +1,5 @@
 ##
-# Copyright (c) 2006-2009 Apple Inc. All rights reserved.
+# Copyright (c) 2006-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -124,8 +124,24 @@
                 formats = [
                     format,
                     # Performance monitoring extensions
-                    'i=%(serverInstance)s t=%(timeSpent).1f or=%(outstandingRequests)s',
+                    'i=%(serverInstance)s or=%(outstandingRequests)s',
                 ]
+
+                # Tags for time stamps collected along the way - the first one in the list is the initial
+                # time for request creation - we use that to track the entire request/response time
+                nowtime = time.time()
+                if config.EnableExtendedTimingAccessLog:
+                    basetime = request.timeStamps[0][1]
+                    request.timeStamps[0] = ("t", time.time(),)
+                    for tag, timestamp in request.timeStamps:
+                        formats.append("%s=%.1f" % (tag, (timestamp - basetime) * 1000))
+                        if tag != "t":
+                            basetime = timestamp
+                    if len(request.timeStamps) > 1:
+                        formats.append("%s=%.1f" % ("t-log", (nowtime - basetime) * 1000))
+                else:
+                    formats.append("%s=%.1f" % ("t", (nowtime - request.timeStamps[0][1]) * 1000))
+
                 if hasattr(request, "extendedLogItems"):
                     for k, v in request.extendedLogItems.iteritems():
                         k = str(k).replace('"', "%22")
@@ -158,7 +174,6 @@
                 "referer"             : request.headers.getHeader("referer", "-"),
                 "userAgent"           : request.headers.getHeader("user-agent", "-"),
                 "serverInstance"      : config.LogID,
-                "timeSpent"           : (time.time() - request.initTime) * 1000,
                 "outstandingRequests" : request.chanRequest.channel.factory.outstandingRequests,
                 "fwd"                 : forwardedFor,
             }
@@ -199,7 +214,7 @@
         self.logpath = logpath
         self.globalHitCount = 0 
         self.globalHitHistory = [] 
-        for i in range(0, config.GlobalStatsLoggingFrequency + 1): 
+        for _ignore in range(0, config.GlobalStatsLoggingFrequency + 1): 
             self.globalHitHistory.append({"time":int(time.time()), "hits":0})
 
     def logMessage(self, message, allowrotate=True):

Modified: CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/calverify.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/calverify.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/calverify.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -50,8 +50,7 @@
 from twext.enterprise.dal.syntax import Select, Parameter, Count
 from twisted.application.service import Service
 from twisted.internet.defer import inlineCallbacks, returnValue, succeed
-from twisted.python import log
-from twisted.python.text import wordWrap
+from twisted.python import log, usage
 from twisted.python.usage import Options
 from twistedcaldav import caldavxml
 from twistedcaldav.dateops import pyCalendarTodatetime
@@ -65,15 +64,111 @@
 from txdav.common.icommondatastore import InternalDataStoreError
 import base64
 import collections
-import os
 import sys
 import time
 import traceback
 import uuid
 
-VERSION = "5"
+# Monkey patch
+def new_validRecurrenceIDs(self, doFix=True):
 
-def usage(e=None):
+    fixed = []
+    unfixed = []
+
+    # Detect invalid occurrences and fix by adding RDATEs for them
+    master = self.masterComponent()
+    if master is not None:
+        # Get the set of all recurrence IDs
+        all_rids = set(self.getComponentInstances())
+        if None in all_rids:
+            all_rids.remove(None)
+
+        # If the master has no recurrence properties treat any other components as invalid
+        if master.isRecurring():
+            
+            # Remove all EXDATEs with a matching RECURRENCE-ID. Do this before we start
+            # processing of valid instances just in case the matching R-ID is also not valid and
+            # thus will need RDATE added. 
+            exdates = {}
+            for property in list(master.properties("EXDATE")):
+                for exdate in property.value():
+                    exdates[exdate.getValue()] = property
+            for rid in all_rids:
+                if rid in exdates:
+                    if doFix:
+                        property = exdates[rid]
+                        for value in property.value():
+                            if value.getValue() == rid:
+                                property.value().remove(value)
+                                break
+                        master.removeProperty(property)
+                        if len(property.value()) > 0:
+                            master.addProperty(property)
+                        del exdates[rid]
+                        fixed.append("Removed EXDATE for valid override: %s" % (rid,))
+                    else:
+                        unfixed.append("EXDATE for valid override: %s" % (rid,))
+            
+            # Get the set of all valid recurrence IDs
+            valid_rids = self.validInstances(all_rids, ignoreInvalidInstances=True)
+
+            # Get the set of all RDATEs and add those to the valid set
+            rdates = []
+            for property in master.properties("RDATE"):
+                rdates.extend([_rdate.getValue() for _rdate in property.value()])
+            valid_rids.update(set(rdates))
+
+
+            # Remove EXDATEs predating master
+            dtstart = master.propertyValue("DTSTART")
+            if dtstart is not None:
+                for property in list(master.properties("EXDATE")):
+                    newValues = []
+                    changed = False
+                    for exdate in property.value():
+                        exdateValue = exdate.getValue()
+                        if exdateValue < dtstart:
+                            if doFix:
+                                fixed.append("Removed earlier EXDATE: %s" % (exdateValue,))
+                            else:
+                                unfixed.append("EXDATE earlier than master: %s" % (exdateValue,))
+                            changed = True
+                        else:
+                            newValues.append(exdateValue)
+
+                    if changed and doFix:
+                        # Remove the property...
+                        master.removeProperty(property)
+                        if newValues:
+                            # ...and add it back only if it still has values
+                            property.setValue(newValues)
+                            master.addProperty(property)
+
+
+        else:
+            valid_rids = set()
+
+        # Determine the invalid recurrence IDs by set subtraction
+        invalid_rids = all_rids - valid_rids
+
+        # Add RDATEs for the invalid ones, or remove any EXDATE.
+        for invalid_rid in invalid_rids:
+            brokenComponent = self.overriddenComponent(invalid_rid)
+            brokenRID = brokenComponent.propertyValue("RECURRENCE-ID")
+            if doFix:
+                master.addProperty(Property("RDATE", [brokenRID,]))
+                fixed.append("Added RDATE for invalid occurrence: %s" %
+                    (brokenRID,))
+            else:
+                unfixed.append("Invalid occurrence: %s" % (brokenRID,))
+
+    return fixed, unfixed
+
+Component.validRecurrenceIDs = new_validRecurrenceIDs
+
+VERSION = "6"
+
+def printusage(e=None):
     if e:
         print e
         print ""
@@ -87,17 +182,51 @@
         sys.exit(0)
 
 
-description = ''.join(
-    wordWrap(
-        """
-        Usage: calendarserver_verify_data [options] [input specifiers]
-        """,
-        int(os.environ.get('COLUMNS', '80'))
-    )
-)
-description += "\nVersion: %s" % (VERSION,)
+description = """
+Usage: calendarserver_verify_data [options]
+Version: %s
 
+This tool scans the calendar store to look for and correct any
+problems.
 
+OPTIONS:
+
+Modes of operation:
+
+-h                  : print help and exit.
+--ical              : verify iCalendar data.
+--mismatch          : verify scheduling state.
+--missing           : display orphaned calendar homes - can be used.
+                      with either --ical or --mismatch.
+
+--nuke PATH|RID     : remove specific calendar resources - can
+                      only be used by itself. PATH is the full
+                      /calendars/__uids__/XXX/YYY/ZZZ.ics object
+                      resource path, RID is the SQL DB resource-id.
+
+Options for all modes:
+
+--fix      : changes are only made when this is present.        
+--config   : caldavd.plist file for the server.
+-v         : verbose logging
+
+Options for --ical:
+
+--badcua   : only look for with bad CALENDARSERVER-OLD-CUA.
+--nobase64 : do not apply base64 encoding to CALENDARSERVER-OLD-CUA.
+--uuid     : only scan specified calendar homes. Can be a partial GUID
+             to scan all GUIDs with that as a prefix.
+--uid      : scan only calendar data with the specific iCalendar UID.
+
+Options for --mismatch:
+
+--uid      : look for mismatches with the specified iCalendar UID only.
+--details  : log extended details on each mismatch.
+--tzid     : timezone to adjust details to.
+
+""" % (VERSION,)
+
+
 def safePercent(x, y, multiplier=100.0):
     return ((multiplier * x) / y) if y else 0
 
@@ -123,9 +252,9 @@
 
     optParameters = [
         ['config', 'f', DEFAULT_CONFIG_FILE, "Specify caldavd.plist configuration path."],
-        ['data', 'd', "./calverify-data", "Path where ancillary data is stored."],
         ['uuid', 'u', "", "Only check this user."],
         ['uid', 'U', "", "Only this event UID."],
+        ['nuke', 'e', "", "Remove event given its path"]
     ]
 
 
@@ -133,6 +262,8 @@
         super(CalVerifyOptions, self).__init__()
         self.outputName = '-'
 
+    def getUsage(self, width=None):
+        return ""
 
     def opt_output(self, filename):
         """
@@ -153,7 +284,6 @@
             return open(self.outputName, 'wb')
 
 
-
 class CalVerifyService(Service, object):
     """
     Service which runs, exports the appropriate records, then stops the reactor.
@@ -216,14 +346,17 @@
         self.output.write("\n---- CalVerify version: %s ----\n" % (VERSION,))
 
         try:
-            if self.options["missing"]:
-                yield self.doOrphans()
-                
-            if self.options["mismatch"] or self.options["ical"] or self.options["badcua"]:
-                yield self.doScan(self.options["ical"] or self.options["badcua"], self.options["mismatch"], self.options["fix"])
+            if self.options["nuke"]:
+                yield self.doNuke()
+            else:
+                if self.options["missing"]:
+                    yield self.doOrphans()
+                    
+                if self.options["mismatch"] or self.options["ical"] or self.options["badcua"]:
+                    yield self.doScan(self.options["ical"] or self.options["badcua"], self.options["mismatch"], self.options["fix"])
+    
+                self.printSummary()
 
-            self.printSummary()
-
             self.output.close()
         except:
             log.err()
@@ -232,6 +365,51 @@
 
 
     @inlineCallbacks
+    def doNuke(self):
+        """
+        Remove a resource using either its path or resource id. When doing this do not
+        read the iCalendar data which may be corrupt.
+        """
+
+        self.output.write("\n---- Removing calendar resource ----\n")
+        self.txn = self.store.newTransaction()
+
+        nuke = self.options["nuke"]
+        if nuke.startswith("/calendars/__uids__/"):
+            pathbits = nuke.split("/")
+            if len(pathbits) != 6:
+                printusage("Not a valid calendar object resource path: %s" % (nuke,))
+            homeName = pathbits[3]
+            calendarName = pathbits[4]
+            resourceName = pathbits[5]
+            
+            rid = yield self.getResourceID(homeName, calendarName, resourceName)
+            if rid is None:
+                yield self.txn.commit()
+                self.txn = None
+                self.output.write("\n")
+                self.output.write("Path does not exist. Nothing nuked.\n")
+                returnValue(None)
+            rid = int(rid)
+        else:
+            try:
+                rid = int(nuke)
+            except ValueError:
+                printusage("nuke argument must be a calendar object path or an SQL resource-id")
+        
+        if self.options["fix"]:
+            result = yield self.fixByRemovingEvent(rid)
+            if result:
+                self.output.write("\n")
+                self.output.write("Removed resource: %s.\n" % (rid,))
+        else:
+            self.output.write("\n")
+            self.output.write("Resource: %s.\n" % (rid,))
+        yield self.txn.commit()
+        self.txn = None
+            
+        
+    @inlineCallbacks
     def doOrphans(self):
         """
         Report on home collections for which there are no directory records, or record is for user on
@@ -575,6 +753,30 @@
         returnValue(rows[0])
 
 
+    @inlineCallbacks
+    def getResourceID(self, home, calendar, resource):
+        co = schema.CALENDAR_OBJECT
+        cb = schema.CALENDAR_BIND
+        ch = schema.CALENDAR_HOME
+        
+        kwds = {
+            "home":home,
+            "calendar":calendar,
+            "resource":resource,
+        }
+        rows = (yield Select(
+            [co.RESOURCE_ID],
+            From=ch.join(
+                cb, type="inner", on=(ch.RESOURCE_ID == cb.CALENDAR_HOME_RESOURCE_ID)).join(
+                co, type="inner", on=(cb.CALENDAR_RESOURCE_ID == co.CALENDAR_RESOURCE_ID)),
+            Where=(ch.OWNER_UID == Parameter("home")).And(
+                cb.CALENDAR_RESOURCE_NAME == Parameter("calendar")).And(
+                co.RESOURCE_NAME == Parameter("resource")
+            ),
+        ).on(self.txn, **kwds))
+        returnValue(rows[0][0] if rows else None)
+
+    
     def buildResourceInfo(self, rows, onlyOrganizer=False, onlyAttendee=False):
         skipped = 0
         inboxes = 0
@@ -672,18 +874,18 @@
         yield self.txn.commit()
         self.txn = None
         if self.options["verbose"]:
-                    self.output.write((
-                        "\r" + 
-                        ("%s" % badlen).rjust(rjust) +
-                        ("%s" % count).rjust(rjust) +
-                        ("%s" % total).rjust(rjust) +
-                        ("%d%%" % safePercent(count, total)).rjust(rjust)
-                    ).ljust(80) + "\n")
+            self.output.write((
+                "\r" + 
+                ("%s" % badlen).rjust(rjust) +
+                ("%s" % count).rjust(rjust) +
+                ("%s" % total).rjust(rjust) +
+                ("%d%%" % safePercent(count, total)).rjust(rjust)
+            ).ljust(80) + "\n")
         
         # Print table of results
         table = tables.Table()
         table.addHeader(("Owner", "Event UID", "RID", "Problem",))
-        for item in results_bad:
+        for item in sorted(results_bad, key=lambda x:(x[0],x[1])):
             owner, uid, resid, message = item
             owner_record = self.directoryService().recordWithGUID(owner)
             table.addRow((
@@ -704,7 +906,7 @@
             diff_time = time.time() - t
             self.output.write("Time: %.2f s  Average: %.1f ms/resource\n" % (
                 diff_time,
-                (1000.0 * diff_time) / total,
+                safePercent(diff_time, total, 1000.0),
             ))
 
     errorPrefix = "Calendar data had unfixable problems:\n  "
@@ -1654,16 +1856,22 @@
     if reactor is None:
         from twisted.internet import reactor
     options = CalVerifyOptions()
-    options.parseOptions(argv[1:])
     try:
+        options.parseOptions(argv[1:])
+    except usage.UsageError, e:
+        printusage(e)
+
+    try:
         output = options.openOutput()
     except IOError, e:
         stderr.write("Unable to open output file for writing: %s\n" % (e))
         sys.exit(1)
+
     def makeService(store):
         from twistedcaldav.config import config
         config.TransactionTimeoutSeconds = 0
         return CalVerifyService(store, options, output, reactor, config)
+
     utilityMain(options['config'], makeService, reactor)
 
 if __name__ == '__main__':

Modified: CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/test/test_calverify.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/test/test_calverify.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/calendarserver/tools/test/test_calverify.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -359,8 +359,39 @@
 END:VCALENDAR
 """.replace("\n", "\r\n")
 
+# Bad recurrence EXDATE
+BAD11_ICS = """BEGIN:VCALENDAR
+VERSION:2.0
+PRODID:-//Apple Inc.//iCal 4.0.1//EN
+CALSCALE:GREGORIAN
+BEGIN:VEVENT
+CREATED:20100303T181216Z
+UID:BAD11
+DTEND:20100307T151500Z
+TRANSP:OPAQUE
+SUMMARY:Ancient event
+DTSTART:20100307T111500Z
+DTSTAMP:20100303T181220Z
+EXDATE:20100314T111500Z
+RRULE:FREQ=WEEKLY
+SEQUENCE:2
+END:VEVENT
+BEGIN:VEVENT
+CREATED:20100303T181216Z
+UID:BAD11
+RECURRENCE-ID:20100314T111500Z
+DTEND:20100314T151500Z
+TRANSP:OPAQUE
+SUMMARY:Ancient event
+DTSTART:20100314T111500Z
+DTSTAMP:20100303T181220Z
+SEQUENCE:2
+END:VEVENT
+END:VCALENDAR
+""".replace("\n", "\r\n")
 
 
+
 class CalVerifyDataTests(CommonCommonTests, unittest.TestCase):
     """
     Tests calverify for iCalendar data problems.
@@ -388,6 +419,7 @@
                 "ok8.ics"  : (OK8_ICS, metadata,),
                 "bad9.ics" : (BAD9_ICS, metadata,),
                 "bad10.ics" : (BAD10_ICS, metadata,),
+                "bad11.ics" : (BAD11_ICS, metadata,),
             }
         },
     }
@@ -488,7 +520,7 @@
         calverify.emailDomain = "example.com"
         yield calverify.doScan(True, False, False)
 
-        self.assertEqual(calverify.results["Number of events to process"], 11)
+        self.assertEqual(calverify.results["Number of events to process"], 12)
         self.verifyResultsByUID(calverify.results["Bad iCalendar data"], set((
             ("home1", "BAD1",),
             ("home1", "BAD2",),
@@ -499,6 +531,7 @@
             ("home1", "BAD7",),
             ("home1", "BAD9",),
             ("home1", "BAD10",),
+            ("home1", "BAD11",),
         )))
 
         sync_token_new = (yield (yield self.calendarUnderTest()).syncToken())
@@ -532,7 +565,7 @@
         calverify.emailDomain = "example.com"
         yield calverify.doScan(True, False, True)
 
-        self.assertEqual(calverify.results["Number of events to process"], 11)
+        self.assertEqual(calverify.results["Number of events to process"], 12)
         self.verifyResultsByUID(calverify.results["Bad iCalendar data"], set((
             ("home1", "BAD1",),
             ("home1", "BAD2",),
@@ -543,6 +576,7 @@
             ("home1", "BAD7",),
             ("home1", "BAD9",),
             ("home1", "BAD10",),
+            ("home1", "BAD11",),
         )))
 
         # Do scan
@@ -550,7 +584,7 @@
         calverify.emailDomain = "example.com"
         yield calverify.doScan(True, False, False)
 
-        self.assertEqual(calverify.results["Number of events to process"], 11)
+        self.assertEqual(calverify.results["Number of events to process"], 12)
         self.verifyResultsByUID(calverify.results["Bad iCalendar data"], set((
             ("home1", "BAD1",),
         )))
@@ -595,7 +629,7 @@
         calverify.emailDomain = "example.com"
         yield calverify.doScan(True, False, False)
 
-        self.assertEqual(calverify.results["Number of events to process"], 11)
+        self.assertEqual(calverify.results["Number of events to process"], 12)
         self.verifyResultsByUID(calverify.results["Bad iCalendar data"], set((
             ("home1", "BAD4",),
             ("home1", "BAD5",),
@@ -636,7 +670,7 @@
         calverify.emailDomain = "example.com"
         yield calverify.doScan(True, False, True)
 
-        self.assertEqual(calverify.results["Number of events to process"], 11)
+        self.assertEqual(calverify.results["Number of events to process"], 12)
         self.verifyResultsByUID(calverify.results["Bad iCalendar data"], set((
             ("home1", "BAD4",),
             ("home1", "BAD5",),
@@ -651,7 +685,7 @@
         calverify.emailDomain = "example.com"
         yield calverify.doScan(True, False, False)
 
-        self.assertEqual(calverify.results["Number of events to process"], 11)
+        self.assertEqual(calverify.results["Number of events to process"], 12)
         self.verifyResultsByUID(calverify.results["Bad iCalendar data"], set((
         )))
 

Modified: CalendarServer/branches/users/gaya/inviteclean/conf/caldavd-test.plist
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/conf/caldavd-test.plist	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/conf/caldavd-test.plist	2012-07-17 00:20:24 UTC (rev 9458)
@@ -267,7 +267,9 @@
               <key>fullName</key>
               <string>cn</string>
               <key>emailAddresses</key>
-              <string>mail</string>
+              <array>
+                  <string>mail</string>
+              </array>
               <key>firstName</key>
               <string>givenName</string>
               <key>lastName</key>
@@ -291,7 +293,9 @@
               <key>fullName</key>
               <string>cn</string>
               <key>emailAddresses</key>
-              <string>mail</string>
+              <array>
+                  <string>mail</string>
+              </array>
               <key>firstName</key>
               <string>givenName</string>
               <key>lastName</key>

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/migration/calendarmigrator.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/migration/calendarmigrator.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/migration/calendarmigrator.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -596,7 +596,8 @@
                 newDocumentRoot = os.path.join(newDataRoot, newDocumentRootValue)
                 # Move aside whatever is there
                 if diskAccessor.exists(newDataRoot):
-                    diskAccessor.rename(newDataRoot, newDataRoot + ".bak")
+                    renameTo = nextAvailable(newDataRoot + ".bak", diskAccessor=diskAccessor)
+                    diskAccessor.rename(newDataRoot, renameTo)
 
                 if diskAccessor.exists(absolutePathWithRoot(sourceRoot, oldCalDataRootValueProcessed)):
                     diskAccessor.ditto(
@@ -768,6 +769,31 @@
         return os.path.join(root, path)
 
 
+def nextAvailable(path, diskAccessor=None):
+    """
+    If path doesn't exist, return path.  Otherwise return the first path name
+    following the path.NNN pattern that doesn't exist, where NNN starts at 1
+    and increments until a non-existent path name is determined.
+
+    @param path: path to examine
+    @type path: C{str}
+    @returns: non-existent path name C{str}
+    """
+
+    if diskAccessor is None:
+        diskAccessor = DiskAccessor()
+
+    if not diskAccessor.exists(path):
+        return path
+
+    i = 1
+    while(True):
+        newPath = "%s.%d" % (path, i)
+        if not diskAccessor.exists(newPath):
+            return newPath
+        i += 1
+
+
 class DiskAccessor(object):
     """
     A wrapper around various disk access methods so that unit tests can easily
@@ -795,7 +821,7 @@
             return os.rename(before, after)
         except OSError:
             # Can't rename because it's cross-volume; must copy/delete
-            shutil.copy2(before, after)
+            self.ditto(before, after)
             return os.remove(before)
 
     def isfile(self, path):

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/migration/test/test_migrator.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/migration/test/test_migrator.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/migration/test/test_migrator.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -17,7 +17,7 @@
 import twistedcaldav.test.util
 from contrib.migration.calendarmigrator import (
     mergePlist, examinePreviousSystem, relocateData, relativize, isServiceDisabled,
-    ServiceStateError
+    ServiceStateError, nextAvailable
 )
 import contrib.migration.calendarmigrator
 
@@ -1039,6 +1039,9 @@
 
                 "/Volumes/External/CalendarServer/Documents/calendars/" : True,
                 "/Volumes/External/CalendarServer/Calendar and Contacts Data/" : True,
+                "/Volumes/External/CalendarServer/Calendar and Contacts Data.bak/" : True,
+                "/Volumes/External/CalendarServer/Calendar and Contacts Data.bak.1/" : True,
+                "/Volumes/External/CalendarServer/Calendar and Contacts Data.bak.2/" : True,
                 "/Library/Server/Previous/Library/CalendarServer/Data/" : True,
                 "/Volumes/External/AddressBookServer/Documents/addressbooks/" : True,
                 "/Library/Server/Previous/Library/AddressBookServer/Data/" : True,
@@ -1061,7 +1064,7 @@
             [   # expected DiskAccessor history
                 ('rename',
                  '/Volumes/External/CalendarServer/Calendar and Contacts Data',
-                 '/Volumes/External/CalendarServer/Calendar and Contacts Data.bak'),
+                 '/Volumes/External/CalendarServer/Calendar and Contacts Data.bak.3'),
                 ('ditto', '/Library/Server/Previous/Library/CalendarServer/Data', '/Volumes/External/CalendarServer/Calendar and Contacts Data'),
                 ('rename', '/Volumes/External/CalendarServer/Documents', '/Volumes/External/CalendarServer/Calendar and Contacts Data/Documents'),
                 ('chown-recursive', '/Volumes/External/CalendarServer/Calendar and Contacts Data', FakeUser.pw_uid, FakeGroup.gr_gid),
@@ -1505,6 +1508,18 @@
             self.assertEquals(history, accessor.history)
 
 
+    def test_nextAvailable(self):
+        data = [
+            ( { }, "a.bak" ),
+            ( { "a.bak": True }, "a.bak.1" ),
+            ( { "a.bak": True, "a.bak.1" : True }, "a.bak.2" ),
+        ]
+        for paths, expected in data:
+            accessor = StubDiskAccessor(paths)
+            actual = nextAvailable("a.bak", diskAccessor=accessor)
+            self.assertEquals(actual, expected)
+
+
     def test_stubDiskAccessor(self):
 
         paths = {

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.dist.plist
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.dist.plist	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.dist.plist	2012-07-17 00:20:24 UTC (rev 9458)
@@ -210,6 +210,52 @@
 									<string>America/Los_Angeles</string>
 								</dict>
 							</dict>
+
+							<!-- Define how recurrences are created. -->
+							<key>recurrenceDistribution</key>
+							<dict>
+
+								<!-- This distribution is pretty specialized.  We have a fixed set of
+								     RRULEs defined for this distribution and pick each based on a
+								     weight. -->
+								<key>type</key>
+								<string>contrib.performance.stats.RecurrenceDistribution</string>
+
+								<key>params</key>
+								<dict>
+									<!-- False to disable RRULEs -->
+									<key>allowRecurrence</key>
+									<true/>
+
+									<!-- These are the weights for the specific set of RRULEs. -->
+									<key>weights</key>
+									<dict>
+										<!-- Half of all events will be non-recurring -->
+										<key>none</key>
+										<integer>50</integer>
+										
+										<!-- Daily and weekly are pretty common -->
+										<key>daily</key>
+										<integer>10</integer>
+										<key>weekly</key>
+										<integer>20</integer>
+										
+										<!-- Monthly, yearly, daily & weekly limit not so common -->
+										<key>monthly</key>
+										<integer>2</integer>
+										<key>yearly</key>
+										<integer>1</integer>
+										<key>dailylimit</key>
+										<integer>2</integer>
+										<key>weeklylimit</key>
+										<integer>5</integer>
+										
+										<!-- Work days pretty common -->
+										<key>workdays</key>
+										<integer>10</integer>
+									</dict>
+								</dict>
+							</dict>
 						</dict>
 					</dict>
 
@@ -336,8 +382,8 @@
 									<!-- maximum -->
 									<key>maximum</key>
 									<real>100</real>
-						</dict>
-					</dict>
+								</dict>
+							</dict>
 
 							<!-- Define how start times (DTSTART) for the randomly generated events 
 								will be selected. This is an example of a "Distribution" parameter. The value 
@@ -378,6 +424,52 @@
 									<string>America/Los_Angeles</string>
 								</dict>
 							</dict>
+
+							<!-- Define how recurrences are created. -->
+							<key>recurrenceDistribution</key>
+							<dict>
+
+								<!-- This distribution is pretty specialized.  We have a fixed set of
+								     RRULEs defined for this distribution and pick each based on a
+								     weight. -->
+								<key>type</key>
+								<string>contrib.performance.stats.RecurrenceDistribution</string>
+
+								<key>params</key>
+								<dict>
+									<!-- False to disable RRULEs -->
+									<key>allowRecurrence</key>
+									<true/>
+
+									<!-- These are the weights for the specific set of RRULEs. -->
+									<key>weights</key>
+									<dict>
+										<!-- Half of all events will be non-recurring -->
+										<key>none</key>
+										<integer>50</integer>
+										
+										<!-- Daily and weekly are pretty common -->
+										<key>daily</key>
+										<integer>10</integer>
+										<key>weekly</key>
+										<integer>20</integer>
+										
+										<!-- Monthly, yearly, daily & weekly limit not so common -->
+										<key>monthly</key>
+										<integer>2</integer>
+										<key>yearly</key>
+										<integer>1</integer>
+										<key>dailylimit</key>
+										<integer>2</integer>
+										<key>weeklylimit</key>
+										<integer>5</integer>
+										
+										<!-- Work days pretty common -->
+										<key>workdays</key>
+										<integer>10</integer>
+									</dict>
+								</dict>
+							</dict>
 						</dict>
 					</dict>
 

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.plist
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.plist	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/config.plist	2012-07-17 00:20:24 UTC (rev 9458)
@@ -204,6 +204,52 @@
 									<string>America/Los_Angeles</string>
 								</dict>
 							</dict>
+
+							<!-- Define how recurrences are created. -->
+							<key>recurrenceDistribution</key>
+							<dict>
+
+								<!-- This distribution is pretty specialized.  We have a fixed set of
+								     RRULEs defined for this distribution and pick each based on a
+								     weight. -->
+								<key>type</key>
+								<string>contrib.performance.stats.RecurrenceDistribution</string>
+
+								<key>params</key>
+								<dict>
+									<!-- False to disable RRULEs -->
+									<key>allowRecurrence</key>
+									<true/>
+
+									<!-- These are the weights for the specific set of RRULEs. -->
+									<key>weights</key>
+									<dict>
+										<!-- Half of all events will be non-recurring -->
+										<key>none</key>
+										<integer>50</integer>
+										
+										<!-- Daily and weekly are pretty common -->
+										<key>daily</key>
+										<integer>10</integer>
+										<key>weekly</key>
+										<integer>20</integer>
+										
+										<!-- Monthly, yearly, daily & weekly limit not so common -->
+										<key>monthly</key>
+										<integer>2</integer>
+										<key>yearly</key>
+										<integer>1</integer>
+										<key>dailylimit</key>
+										<integer>2</integer>
+										<key>weeklylimit</key>
+										<integer>5</integer>
+										
+										<!-- Work days pretty common -->
+										<key>workdays</key>
+										<integer>10</integer>
+									</dict>
+								</dict>
+							</dict>
 						</dict>
 					</dict>
 
@@ -372,6 +418,52 @@
 									<string>America/Los_Angeles</string>
 								</dict>
 							</dict>
+
+							<!-- Define how recurrences are created. -->
+							<key>recurrenceDistribution</key>
+							<dict>
+
+								<!-- This distribution is pretty specialized.  We have a fixed set of
+								     RRULEs defined for this distribution and pick each based on a
+								     weight. -->
+								<key>type</key>
+								<string>contrib.performance.stats.RecurrenceDistribution</string>
+
+								<key>params</key>
+								<dict>
+									<!-- False to disable RRULEs -->
+									<key>allowRecurrence</key>
+									<true/>
+
+									<!-- These are the weights for the specific set of RRULEs. -->
+									<key>weights</key>
+									<dict>
+										<!-- Half of all events will be non-recurring -->
+										<key>none</key>
+										<integer>50</integer>
+										
+										<!-- Daily and weekly are pretty common -->
+										<key>daily</key>
+										<integer>10</integer>
+										<key>weekly</key>
+										<integer>20</integer>
+										
+										<!-- Monthly, yearly, daily & weekly limit not so common -->
+										<key>monthly</key>
+										<integer>2</integer>
+										<key>yearly</key>
+										<integer>1</integer>
+										<key>dailylimit</key>
+										<integer>2</integer>
+										<key>weeklylimit</key>
+										<integer>5</integer>
+										
+										<!-- Work days pretty common -->
+										<key>workdays</key>
+										<integer>10</integer>
+									</dict>
+								</dict>
+							</dict>
 						</dict>
 					</dict>
 

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/ical.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/ical.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/ical.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -342,7 +342,7 @@
         calendarHomePollInterval=None,
         supportPush=True,
         supportAmpPush=True,
-        ampPushHost="localhost",
+        ampPushHost=None,
         ampPushPort=62311,
     ):
         
@@ -366,6 +366,8 @@
         self.supportPush = supportPush
 
         self.supportAmpPush = supportAmpPush
+        if ampPushHost is None:
+            ampPushHost = urlparse(self.root)[1].split(":")[0]
         self.ampPushHost = ampPushHost
         self.ampPushPort = ampPushPort
 
@@ -886,7 +888,11 @@
     
             multistatus = yield self._eventReport(calendar.url, batchedHrefs)
             for responseHref in batchedHrefs:
-                res = multistatus[responseHref]
+                try:
+                    res = multistatus[responseHref]
+                except KeyError:
+                    # Resource might have been deleted
+                    continue
                 if res.getStatus() == 200:
                     text = res.getTextProperties()
                     etag = text[davxml.getetag]

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/population.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/population.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/population.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -381,7 +381,7 @@
         # Load parameters from config 
         if "thresholdsPath" in params:
             jsondata = json.load(open(params["thresholdsPath"]))
-        if "thresholds" in params:
+        elif "thresholds" in params:
             jsondata = params["thresholds"]
         else:
             jsondata = self._thresholds_default

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/profiles.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/profiles.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/loadtest/profiles.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -37,7 +37,7 @@
 from twistedcaldav.ical import Property, Component
 
 from contrib.performance.stats import NearFutureDistribution, NormalDistribution, UniformDiscreteDistribution, mean, median
-from contrib.performance.stats import LogNormalDistribution
+from contrib.performance.stats import LogNormalDistribution, RecurrenceDistribution
 from contrib.performance.loadtest.logger import SummarizingMixin
 from contrib.performance.loadtest.ical import IncorrectResponseCode
 
@@ -298,7 +298,8 @@
             15 * 60, 30 * 60,
             45 * 60, 60 * 60,
             120 * 60
-        ])
+        ]),
+        recurrenceDistribution=RecurrenceDistribution(False),
     ):
         self.enabled = enabled
         self._sendInvitationDistribution = sendInvitationDistribution
@@ -306,6 +307,7 @@
         self._inviteeCountDistribution = inviteeCountDistribution
         self._eventStartDistribution = eventStartDistribution
         self._eventDurationDistribution = eventDurationDistribution
+        self._recurrenceDistribution = recurrenceDistribution
 
 
     def run(self):
@@ -386,6 +388,10 @@
             vevent.replaceProperty(Property("DTSTART", dtstart))
             vevent.replaceProperty(Property("DTEND", dtend))
             vevent.replaceProperty(Property("UID", uid))
+            
+            rrule = self._recurrenceDistribution.sample()
+            if rrule is not None:
+                vevent.addProperty(Property(None, None, None, pycalendar=rrule))
 
             vevent.addProperty(self._client._makeSelfOrganizer())
             vevent.addProperty(self._client._makeSelfAttendee())
@@ -582,12 +588,14 @@
             15 * 60, 30 * 60,
             45 * 60, 60 * 60,
             120 * 60
-        ])
+        ]),
+        recurrenceDistribution=RecurrenceDistribution(False),
     ):
         self.enabled = enabled
         self._interval = interval
         self._eventStartDistribution = eventStartDistribution
         self._eventDurationDistribution = eventDurationDistribution
+        self._recurrenceDistribution = recurrenceDistribution
 
 
     def run(self):
@@ -618,6 +626,10 @@
             vevent.replaceProperty(Property("DTSTART", dtstart))
             vevent.replaceProperty(Property("DTEND", dtend))
             vevent.replaceProperty(Property("UID", uid))
+            
+            rrule = self._recurrenceDistribution.sample()
+            if rrule is not None:
+                vevent.addProperty(Property(None, None, None, pycalendar=rrule))
 
             href = '%s%s.ics' % (calendar.url, uid)
             d = self._client.addEvent(href, vcalendar)
@@ -735,7 +747,7 @@
         # Load parameters from config 
         if "thresholdsPath" in params:
             jsondata = json.load(open(params["thresholdsPath"]))
-        if "thresholds" in params:
+        elif "thresholds" in params:
             jsondata = params["thresholds"]
         else:
             jsondata = self._thresholds_default

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/stats.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/stats.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/stats.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -24,6 +24,7 @@
 from pycalendar.datetime import PyCalendarDateTime
 from pycalendar.duration import PyCalendarDuration
 from pycalendar.timezone import PyCalendarTimezone
+from pycalendar.property import PyCalendarProperty
 
 NANO = 1000000000.0
 
@@ -222,7 +223,7 @@
 
 
 class IPopulation(Interface):
-    def sample():
+    def sample(): #@NoSelf
         pass
 
 
@@ -234,14 +235,16 @@
 
     compareAttributes = ['_values']
 
-    def __init__(self, values):
+    def __init__(self, values, randomize=True):
         self._values = values
+        self._randomize = randomize
         self._refill()
 
 
     def _refill(self):
         self._remaining = self._values[:]
-        random.shuffle(self._remaining)
+        if self._randomize:
+            random.shuffle(self._remaining)
 
 
     def sample(self):
@@ -378,6 +381,42 @@
             offset.setDuration(offset.getTotalSeconds() - (end - start).getTotalSeconds())
             beginning = end
 
+class RecurrenceDistribution(object, FancyEqMixin):
+    compareAttributes = ["_allowRecurrence", "_weights"]
+
+    _model_rrules = {
+        "none":        None,
+        "daily":       "RRULE:FREQ=DAILY",
+        "weekly":      "RRULE:FREQ=WEEKLY",
+        "monthly":     "RRULE:FREQ=MONTHLY",
+        "yearly":      "RRULE:FREQ=YEARLY",
+        "dailylimit":  "RRULE:FREQ=DAILY;COUNT=14",
+        "weeklylimit": "RRULE:FREQ=WEEKLY;COUNT=4",
+        "workdays":    "RRULE:FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR"
+    } 
+
+    def __init__(self, allowRecurrence, weights={}):
+        self._allowRecurrence = allowRecurrence
+        self._rrules = []
+        if self._allowRecurrence:
+            for rrule, count in sorted(weights.items(), key=lambda x:x[0]):
+                for _ignore in range(count):
+                    self._rrules.append(self._model_rrules[rrule])
+        self._helperDistribution = UniformIntegerDistribution(0, len(self._rrules)-1)
+
+
+    def sample(self):
+        
+        if self._allowRecurrence:
+            index = self._helperDistribution.sample()
+            rrule = self._rrules[index]
+            if rrule:
+                prop = PyCalendarProperty()
+                prop.parse(rrule)
+                return prop
+        
+        return None
+
 if __name__ == '__main__':
     
     from collections import defaultdict

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/performance/test_stats.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/performance/test_stats.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/performance/test_stats.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -1,5 +1,5 @@
 ##
-# Copyright (c) 2010 Apple Inc. All rights reserved.
+# Copyright (c) 2010-2012 Apple Inc. All rights reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -18,7 +18,8 @@
 
 from stats import (
     SQLDuration, LogNormalDistribution, UniformDiscreteDistribution,
-    UniformIntegerDistribution, WorkDistribution, quantize)
+    UniformIntegerDistribution, WorkDistribution, quantize,
+    RecurrenceDistribution)
 from pycalendar.datetime import PyCalendarDateTime
 from pycalendar.timezone import PyCalendarTimezone
 
@@ -91,6 +92,25 @@
     #test_workdistribution.todo = "Somehow timezones mess this up"
 
 
+    def test_recurrencedistribution(self):
+        dist = RecurrenceDistribution(False)
+        for _ignore in range(100):
+            value = dist.sample()
+            self.assertTrue(value is None)
+
+        dist = RecurrenceDistribution(True, {"daily":1, "none":2, "weekly":1})
+        dist._helperDistribution = UniformDiscreteDistribution([0, 3, 2, 1, 0], randomize=False)
+        value = dist.sample()
+        self.assertTrue(value is not None)
+        value = dist.sample()
+        self.assertTrue(value is None)
+        value = dist.sample()
+        self.assertTrue(value is None)
+        value = dist.sample()
+        self.assertTrue(value is not None)
+        value = dist.sample()
+        self.assertTrue(value is not None)
+
     def test_uniform(self):
         dist = UniformIntegerDistribution(-5, 10)
         for _ignore_i in range(100):

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/tools/protocolanalysis.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/tools/protocolanalysis.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/tools/protocolanalysis.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -16,7 +16,6 @@
 ##
 
 from gzip import GzipFile
-import math
 import collections
 import datetime
 import getopt
@@ -203,8 +202,9 @@
 
     class LogLine(object):
         
-        def __init__(self, userid, logDateTime, logTime, method, uri, status, reqbytes, referer, client, extended):
+        def __init__(self, ipaddr, userid, logDateTime, logTime, method, uri, status, reqbytes, referer, client, extended):
 
+            self.ipaddr = ipaddr
             self.userid = userid
             self.logDateTime = logDateTime
             self.logTime = logTime
@@ -230,7 +230,7 @@
         self.startHour = startHour
         self.endHour = endHour
         self.utcoffset = utcoffset
-        self.logStart = None
+        self.adjustHour = None
         self.filterByUser = filterByUser
         self.filterByClient = filterByClient
         self.ignoreNonHTTPMethods = ignoreNonHTTPMethods
@@ -247,7 +247,7 @@
 
         self.hourlyTotals = [[0, 0, 0, collections.defaultdict(int), 0.0,] for _ignore in xrange(self.timeBucketCount)]
         
-        self.clientTotals = collections.defaultdict(lambda:[0, set()])
+        self.clientTotals = collections.defaultdict(lambda:[0, set(), set()])
         self.clientIDMap = {}
         self.clientByMethodCount = collections.defaultdict(lambda:collections.defaultdict(int))
         self.clientIDByMethodCount = {}
@@ -285,9 +285,9 @@
         self.otherUserCalendarRequests = {}
 
         self.currentLine = None
-        self.linesRead = 0
+        self.linesRead = collections.defaultdict(int)
         
-    def analyzeLogFile(self, logFilePath, lineCtr):
+    def analyzeLogFile(self, logFilePath):
         fpath = os.path.expanduser(logFilePath)
         if fpath.endswith(".gz"):
             f = GzipFile(fpath)
@@ -296,11 +296,12 @@
             
         self.maxIndex = (self.endHour - self.startHour + 1) * 60 / self.resolutionMinutes
         try:
+            lineCtr = 0
             for line in f:
                 lineCtr += 1
-                if lineCtr <= self.linesRead:
+                if lineCtr <= self.linesRead[logFilePath]:
                     continue
-                self.linesRead += 1
+                self.linesRead[logFilePath] += 1
                 if line.startswith("Log"):
                     continue
         
@@ -318,9 +319,9 @@
                 logHour = int(self.currentLine.logTime[0:2])
                 logMinute = int(self.currentLine.logTime[3:5])
                 
-                if self.logStart is None:
-                    self.logStart = logHour
-                hourFromStart = logHour - self.logStart - self.startHour
+                if self.adjustHour is None:
+                    self.adjustHour = self.startHour if self.startHour is not None else logHour
+                hourFromStart = logHour - self.adjustHour
                 if hourFromStart < 0:
                     hourFromStart += 24
                 if logHour < self.startHour:
@@ -367,8 +368,10 @@
                 if not is503:
                     self.clientTotals[" TOTAL"][0] += 1
                     self.clientTotals[" TOTAL"][1].add(self.currentLine.userid)
+                    self.clientTotals[" TOTAL"][2].add(self.currentLine.ipaddr)
                     self.clientTotals[adjustedClient][0] += 1
                     self.clientTotals[adjustedClient][1].add(self.currentLine.userid)
+                    self.clientTotals[adjustedClient][2].add(self.currentLine.ipaddr)
                     
                     self.clientByMethodCount[" TOTAL"][" TOTAL"] += 1
                     self.clientByMethodCount[" TOTAL"][adjustedMethod] += 1
@@ -509,12 +512,12 @@
         for client, data in self.clientByMethodCount.iteritems():
             self.clientIDByMethodCount[self.clientIDMap[client]] = data
 
-        return lineCtr
-
     def parseLine(self, line):
     
         startPos = line.find("- ")
         endPos = line.find(" [")
+        
+        ipaddr = line[0:startPos-2]
         userid = line[startPos+2:endPos]
         
         startPos = endPos + 1
@@ -574,7 +577,7 @@
             items = line[startPos:].split()
             extended = dict([item.split('=') for item in items])
     
-        self.currentLine = CalendarServerLogAnalyzer.LogLine(userid, logDateTime, logTime, method, uri, status, reqbytes, referrer, client, extended)
+        self.currentLine = CalendarServerLogAnalyzer.LogLine(ipaddr, userid, logDateTime, logTime, method, uri, status, reqbytes, referrer, client, extended)
     
     def getClientAdjustedName(self):
     
@@ -838,47 +841,49 @@
         return key
 
     # Determine method weight 1 - 10
-    weighting = {
-        "ACL": lambda x: 3,
-        "DELETE" : lambda x: 5,
-        "GET" : lambda x:  3 * (1 + x.bytes / (1024 * 1024)),
-        METHOD_GET_DROPBOX : lambda x: 3 * (1 + x.bytes / (1024 * 1024)),
-        "HEAD" : lambda x: 1,
-        "MKCALENDAR" : lambda x: 2,
-        "MKCOL" : lambda x: 2,
-        "MOVE" : lambda x: 3,
-        "OPTIONS" : lambda x: 1,
-        METHOD_POST_FREEBUSY : lambda x: 5 * int(x.extended.get("recipients", 1)),
-        METHOD_PUT_ORGANIZER : lambda x: 5 * int(x.extended.get("recipients", 1)),
-        METHOD_PUT_ATTENDEE : lambda x: 5 * int(x.extended.get("recipients", 1)),
-        "PROPFIND" : lambda x: 3 * int(x.extended.get("responses", 1)),
-        METHOD_PROPFIND_CALENDAR : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
-        METHOD_PROPFIND_CALENDAR_HOME : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
-        "PROPFIND inbox" : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
-        METHOD_PROPFIND_PRINCIPALS : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
-        METHOD_PROPFIND_CACHED_CALENDAR_HOME : lambda x: 2,
-        METHOD_PROPFIND_CACHED_PRINCIPALS : lambda x: 2,
-        "PROPPATCH" : lambda x: 4,
-        METHOD_PROPPATCH_CALENDAR : lambda x:8,
-        METHOD_PUT_ICS : lambda x: 4,
-        METHOD_PUT_ORGANIZER : lambda x: 8,
-        METHOD_PUT_ATTENDEE : lambda x: 6,
-        METHOD_PUT_DROPBOX : lambda x: 10,
-        "REPORT" : lambda x: 5,
-        METHOD_REPORT_CALENDAR_MULTIGET : lambda x: 5 * int(x.extended.get("rcount", 1)),
-        METHOD_REPORT_CALENDAR_QUERY : lambda x: 4 * int(x.extended.get("responses", 1)),
-        METHOD_REPORT_EXPAND_P : lambda x: 5,
-        "REPORT principal-match" : lambda x: 5,
-    }
+#    weighting = {
+#        "ACL": lambda x: 3,
+#        "DELETE" : lambda x: 5,
+#        "GET" : lambda x:  3 * (1 + x.bytes / (1024 * 1024)),
+#        METHOD_GET_DROPBOX : lambda x: 3 * (1 + x.bytes / (1024 * 1024)),
+#        "HEAD" : lambda x: 1,
+#        "MKCALENDAR" : lambda x: 2,
+#        "MKCOL" : lambda x: 2,
+#        "MOVE" : lambda x: 3,
+#        "OPTIONS" : lambda x: 1,
+#        METHOD_POST_FREEBUSY : lambda x: 5 * int(x.extended.get("recipients", 1)),
+#        METHOD_PUT_ORGANIZER : lambda x: 5 * int(x.extended.get("recipients", 1)),
+#        METHOD_PUT_ATTENDEE : lambda x: 5 * int(x.extended.get("recipients", 1)),
+#        "PROPFIND" : lambda x: 3 * int(x.extended.get("responses", 1)),
+#        METHOD_PROPFIND_CALENDAR : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
+#        METHOD_PROPFIND_CALENDAR_HOME : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
+#        "PROPFIND inbox" : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
+#        METHOD_PROPFIND_PRINCIPALS : lambda x: 5 * (int(math.log10(float(x.extended.get("responses", 1)))) + 1),
+#        METHOD_PROPFIND_CACHED_CALENDAR_HOME : lambda x: 2,
+#        METHOD_PROPFIND_CACHED_PRINCIPALS : lambda x: 2,
+#        "PROPPATCH" : lambda x: 4,
+#        METHOD_PROPPATCH_CALENDAR : lambda x:8,
+#        METHOD_PUT_ICS : lambda x: 4,
+#        METHOD_PUT_ORGANIZER : lambda x: 8,
+#        METHOD_PUT_ATTENDEE : lambda x: 6,
+#        METHOD_PUT_DROPBOX : lambda x: 10,
+#        "REPORT" : lambda x: 5,
+#        METHOD_REPORT_CALENDAR_MULTIGET : lambda x: 5 * int(x.extended.get("rcount", 1)),
+#        METHOD_REPORT_CALENDAR_QUERY : lambda x: 4 * int(x.extended.get("responses", 1)),
+#        METHOD_REPORT_EXPAND_P : lambda x: 5,
+#        "REPORT principal-match" : lambda x: 5,
+#    }
+    weighting = {}
 
     def userAnalysis(self, adjustedMethod):
         
         if self.currentLine.userid == "-":
             return
-        try:
-            self.userWeights[self.currentLine.userid] += self.weighting[adjustedMethod](self.currentLine)
-        except KeyError:
-            self.userWeights[self.currentLine.userid] += 5
+#        try:
+#            self.userWeights[self.currentLine.userid] += self.weighting[adjustedMethod](self.currentLine)
+#        except KeyError:
+#            self.userWeights[self.currentLine.userid] += 5
+        self.userWeights[self.currentLine.userid] += 1
         
         
         responseTime = float(self.currentLine.extended.get("t", 0.0))
@@ -974,8 +979,8 @@
             #print "User Interaction Counts"
             #self.printUserInteractionCounts(doTabs)
     
-            #print "User Weights (top 100)"
-            #self.printUserWeights(doTabs)
+            print "User Weights (top 100)"
+            self.printUserWeights(doTabs)
     
             #print "User Response times"
             #self.printUserResponseTimes(doTabs)
@@ -991,7 +996,7 @@
             table.addRow(("Filtered to user:", self.filterByUser,))
         if self.filterByClient:
             table.addRow(("Filtered to client:", self.filterByClient,))
-        table.addRow(("Lines Analyzed:", self.linesRead,))
+        table.addRow(("Lines Analyzed:", sum(self.linesRead.values()),))
     
         table.printTabDelimitedData() if doTabs else table.printTable()
         print ""
@@ -1003,7 +1008,7 @@
         totalminutes = index * self.resolutionMinutes
         
         offsethour, minute = divmod(totalminutes, 60)
-        localhour = divmod(offsethour + self.logStart + self.startHour + self.utcoffset, 24)[1]
+        localhour = divmod(offsethour + self.adjustHour + self.utcoffset, 24)[1]
         utchour = divmod(localhour - self.loggedUTCOffset - self.utcoffset, 24)[1]
         
         # Clip to select hour range
@@ -1013,12 +1018,10 @@
         
         table = tables.Table()
         table.addHeader(
-            ("Local (UTC)", "Total",    "Av. Requests", "Av. Response",) if summary else
-            ("Local (UTC)", "Total",    "Av. Requests", "Av. Queue", "Max. Queue", "Av. Response",)
+            ("Local (UTC)", "Total",    "Av. Requests", "Av. Response", "Av. Queue",)
         )
         table.addHeader(
-            ("",            "Requests", "Per Second",   "Time(ms)",) if summary else
-            ("",            "Requests", "Per Second",   "Depth",     "Depth (# queues)",      "Time(ms)",)
+            ("",            "Requests", "Per Second",   "Time(ms)",     "Depth")
         )
         table.setDefaultColumnFormats(
             (
@@ -1026,48 +1029,30 @@
                 tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
                 tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
                 tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-            ) if summary else
-            (
-                tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY), 
-                tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%d (%2d)", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+                tables.Table.ColumnFormat("%.2f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
             )
         )
     
         totalRequests = 0
         totalDepth = 0
-        totalMaxDepth = 0
         totalTime = 0.0
         for ctr in xrange(self.timeBucketCount):
             hour = self.getHourFromIndex(ctr)
             if hour is None:
                 continue
             value = self.hourlyTotals[ctr]
-            countRequests, _ignore503, countDepth, maxDepth, countTime = value
-            maxDepthAll = max(maxDepth.values()) if maxDepth.values() else 0
-            maxDepthCount = list(maxDepth.values()).count(maxDepthAll)
+            countRequests, _ignore503, countDepth, _igniore_maxDepth, countTime = value
             table.addRow(
                 (
                     hour,
                     countRequests,
                     (1.0 * countRequests) / self.resolutionMinutes / 60,
                     safePercent(countTime, countRequests, 1.0),
-                ) if summary else
-                (
-                    hour,
-                    countRequests,
-                    (1.0 * countRequests) / self.resolutionMinutes / 60,
-                    safePercent(countDepth, countRequests, 1),
-                    (maxDepthAll, maxDepthCount,),
-                    safePercent(countTime, countRequests, 1.0),
+                    safePercent(float(countDepth), countRequests, 1),
                 )
             )
             totalRequests += countRequests
             totalDepth += countDepth
-            totalMaxDepth = max(totalMaxDepth, maxDepthAll)
             totalTime += countTime
     
         table.addFooter(
@@ -1076,14 +1061,7 @@
                 totalRequests,
                 (1.0 * totalRequests) / self.timeBucketCount / self.resolutionMinutes / 60,
                 safePercent(totalTime, totalRequests, 1.0),
-            ) if summary else
-            (
-                "Total:",
-                totalRequests,
-                (1.0 * totalRequests) / self.timeBucketCount / self.resolutionMinutes / 60,
-                safePercent(totalDepth, totalRequests, 1),
-                totalMaxDepth,
-                safePercent(totalTime, totalRequests, 1.0),
+                safePercent(float(totalDepth), totalRequests, 1),
             ),
             columnFormats=
             (
@@ -1091,14 +1069,7 @@
                 tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
                 tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
                 tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-            ) if summary else
-            (
-                tables.Table.ColumnFormat("%s"), 
-                tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%d     ", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
-                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+                tables.Table.ColumnFormat("%.2f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
             )
         )
     
@@ -1116,7 +1087,8 @@
             tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
         ))
     
-        table.addHeader(("Client", "Total", "Unique", "Total/Unique"))
+        table.addHeader(("Client", "Total", "Unique", "Unique"))
+        table.addHeader((      "",      "", "Users",  "IP addrs"))
         for title, clientData in sorted(self.clientTotals.iteritems(), key=lambda x:x[0].lower()):
             if title == " TOTAL":
                 continue
@@ -1124,14 +1096,14 @@
                 title,
                 "%d (%2d%%)" % (clientData[0], safePercent(clientData[0], self.clientTotals[" TOTAL"][0]),),
                 "%d (%2d%%)" % (len(clientData[1]), safePercent(len(clientData[1]), len(self.clientTotals[" TOTAL"][1])),),
-                "%d" % (safePercent(clientData[0], len(clientData[1]), 1),),
+                "%d (%2d%%)" % (len(clientData[2]), safePercent(len(clientData[2]), len(self.clientTotals[" TOTAL"][2])),),
             ))
     
         table.addFooter((
             "All",
             "%d      " % (self.clientTotals[" TOTAL"][0],),
             "%d      " % (len(self.clientTotals[" TOTAL"][1]),),
-            "",
+            "%d      " % (len(self.clientTotals[" TOTAL"][2]),),
         ))
     
         table.printTabDelimitedData() if doTabs else table.printTable()
@@ -1575,28 +1547,30 @@
         
         self.analyzers = analyzers
     
-    def printAll(self, doTabs):
+    def printAll(self, doTabs, summary):
         
         self.printInfo(doTabs)
 
         print "Load Analysis Differences"
-        self.printLoadAnalysisDetails(doTabs)
+        #self.printLoadAnalysisDetails(doTabs)
+        self.printHourlyTotals(doTabs)
 
-        print "Client Differences"
-        self.printClientTotals(doTabs)
+        if not summary:
+            print "Client Differences"
+            self.printClientTotals(doTabs)
+    
+            print "Protocol Count Differences"
+            self.printMethodCountDetails(doTabs)
+    
+            print "Average Response Time Differences"
+            self.printMethodTimingDetails("clientByMethodAveragedTime", doTabs)
+    
+            print "Total Response Time Differences"
+            self.printMethodTimingDetails("clientByMethodTotalTime", doTabs)
+            
+            print "Average Response Count Differences"
+            self.printResponseCountDetails(doTabs)
 
-        print "Protocol Count Differences"
-        self.printMethodCountDetails(doTabs)
-
-        print "Average Response Time Differences"
-        self.printMethodTimingDetails("clientByMethodAveragedTime", doTabs)
-
-        print "Total Response Time Differences"
-        self.printMethodTimingDetails("clientByMethodTotalTime", doTabs)
-        
-        print "Average Response Count Differences"
-        self.printResponseCountDetails(doTabs)
-
     def printInfo(self, doTabs):
         
         table = tables.Table()
@@ -1620,34 +1594,151 @@
             title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
 
             totalRequests = 0
-            total503 = 0
-            totalDepth = 0
             totalTime = 0.0
-            for ctr2 in xrange(self.timeBucketCount):
+            for ctr2 in xrange(analyzer.timeBucketCount):
+                hour = analyzer.getHourFromIndex(ctr2)
+                if hour is None:
+                    continue
                 value = analyzer.hourlyTotals[ctr2]
-                countRequests, count503, countDepth, _ignore_maxDepth, countTime = value
+                countRequests, _ignore503, _ignore_countDepth, _ignore_maxDepth, countTime = value
                 totalRequests += countRequests
-                total503 += count503
-                totalDepth += countDepth
                 totalTime += countTime
 
             byCategory[title]["#1 Total Requests"] = "%d" % (totalRequests,)
-            byCategory[title]["#2 503 Count"] = "%d (%2d%%)" % (total503, safePercent(total503, totalRequests),)
-            byCategory[title]["#3 Av. Queue Depth"] = "%d" % (safePercent(totalDepth, totalRequests, 1),)
-            byCategory[title]["#4 Av. Response Time (ms)"] = "%.1f" % (safePercent(totalTime, totalRequests, 1.0),)
+            byCategory[title]["#2 Av. Response Time (ms)"] = "%.1f" % (safePercent(totalTime, totalRequests, 1.0),)
             
             if ctr == 0:
-                firstData = (totalRequests, total503, safePercent(totalDepth, totalRequests, 1.0), safePercent(totalTime, totalRequests, 1.0),)
-            lastData = (totalRequests, total503, safePercent(totalDepth, totalRequests, 1.0), safePercent(totalTime, totalRequests, 1.0),)
+                firstData = (totalRequests, safePercent(totalTime, totalRequests, 1.0),)
+            lastData = (totalRequests, safePercent(totalTime, totalRequests, 1.0),)
 
         title = "Difference"
         byCategory[title]["#1 Total Requests"] = "%+d (%+.1f%%)" % (lastData[0] - firstData[0], safePercent(lastData[0] - firstData[0], firstData[0], 100.0),)
-        byCategory[title]["#2 503 Count"] = "%+d (%+.1f%%)" % (lastData[1] - firstData[1], safePercent((1.0 * lastData[1]) / lastData[0] - (1.0 * firstData[1]) / firstData[0], (1.0 * firstData[1]) / firstData[0], 100.0),)
-        byCategory[title]["#3 Av. Queue Depth"] = "%+d (%+.1f%%)" % (lastData[2] - firstData[2], safePercent(lastData[2] - firstData[2], firstData[2], 100.0),)
-        byCategory[title]["#4 Av. Response Time (ms)"] = "%+.1f (%+.1f%%)" % (lastData[3] - firstData[3], safePercent(lastData[3] - firstData[3], firstData[3], 100.0),)
+        byCategory[title]["#2 Av. Response Time (ms)"] = "%+.1f (%+.1f%%)" % (lastData[1] - firstData[1], safePercent(lastData[1] - firstData[1], firstData[1], 100.0),)
 
         self.printDictDictTable(byCategory, doTabs)
 
+    def printHourlyTotals(self, doTabs):
+        
+        table = tables.Table()
+        hdr1 = [""]
+        hdr2 = ["Local (UTC)"]
+        hdr3 = [""]
+        fmt1 = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY)]
+        fmt23 = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY)]
+        for ctr, analyzer in enumerate(self.analyzers):
+            title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+            hdr1.extend([title, "", ""])
+            hdr2.extend(["Total",    "Av. Requests", "Av. Response"])
+            hdr3.extend(["Requests", "Per Second",   "Time(ms)"])
+            fmt1.extend([
+                tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY, span=3),
+                None,
+                None,
+            ])
+            fmt23.extend([
+                tables.Table.ColumnFormat(),
+                tables.Table.ColumnFormat(),
+                tables.Table.ColumnFormat(),
+            ])
+        title = "Difference"
+        hdr1.extend([title, "", ""])
+        hdr2.extend(["Total",    "Av. Requests", "Av. Response"])
+        hdr3.extend(["Requests", "Per Second",   "Time(ms)"])
+        fmt1.extend([
+            tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY, span=3),
+            None,
+            None,
+        ])
+        fmt23.extend([
+            tables.Table.ColumnFormat(),
+            tables.Table.ColumnFormat(),
+            tables.Table.ColumnFormat(),
+        ])
+        table.addHeader(hdr1, columnFormats=fmt1)
+        table.addHeaderDivider()
+        table.addHeader(hdr2, columnFormats=fmt23)
+        table.addHeader(hdr3, columnFormats=fmt23)
+
+        fmt = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY)]
+        for ctr in range(len(self.analyzers) + 1):
+            fmt.extend([
+                tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+            ])
+        table.setDefaultColumnFormats(fmt)
+    
+        totalRequests = [0] * len(self.analyzers)
+        totalTime = [0.0] * len(self.analyzers)
+        for ctr in xrange(self.analyzers[0].timeBucketCount):
+            hour = self.analyzers[0].getHourFromIndex(ctr)
+            if hour is None:
+                continue
+            
+            diffRequests = None
+            diffRequestRate = None
+            diffTime = None
+            row = [hour]
+            for ctr2, analyzer in enumerate(self.analyzers):
+                value = analyzer.hourlyTotals[ctr]
+                countRequests, _ignore503, _ignore_countDepth, _ignore_maxDepth, countTime = value
+                
+                requestRate = (1.0 * countRequests) / analyzer.resolutionMinutes / 60
+                averageTime = safePercent(countTime, countRequests, 1.0)
+                row.extend([
+                    countRequests,
+                    requestRate,
+                    averageTime,
+                ])
+                totalRequests[ctr2] += countRequests
+                totalTime[ctr2] += countTime
+                
+                diffRequests = countRequests if diffRequests is None else countRequests - diffRequests
+                diffRequestRate = requestRate if diffRequestRate is None else requestRate - diffRequestRate
+                diffTime = averageTime if diffTime is None else averageTime - diffTime
+
+            row.extend([
+                diffRequests,
+                diffRequestRate,
+                diffTime,
+            ])
+            table.addRow(row)
+    
+        ftr = ["Total:"]
+        diffRequests = None
+        diffRequestRate = None
+        diffTime = None
+        for ctr, analyzer in enumerate(self.analyzers):
+            requestRate = (1.0 * totalRequests[ctr]) / analyzer.resolutionMinutes / 60
+            averageTime = safePercent(totalTime[ctr], totalRequests[ctr], 1.0)
+            ftr.extend([
+                totalRequests[ctr],
+                requestRate,
+                averageTime,
+            ])
+            
+            diffRequests = totalRequests[ctr] if diffRequests is None else totalRequests[ctr] - diffRequests
+            diffRequestRate = requestRate if diffRequestRate is None else requestRate - diffRequestRate
+            diffTime = averageTime if diffTime is None else averageTime - diffTime
+
+        ftr.extend([
+            diffRequests,
+            diffRequestRate,
+            diffTime,
+        ])
+
+        fmt = [tables.Table.ColumnFormat("%s")]
+        for ctr in range(len(self.analyzers) + 1):
+            fmt.extend([
+                tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+                tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+            ])
+        table.addFooter(ftr, columnFormats=fmt)
+    
+        table.printTabDelimitedData() if doTabs else table.printTable()
+        print ""
+    
     def printClientTotals(self, doTabs):
         
         table = tables.Table()
@@ -1856,8 +1947,8 @@
         repeat = False
         summary = False
         resolution = 60
-        startHour = 0
-        endHour = startHour + 23
+        startHour = None
+        endHour = None
         utcoffset = 0
         filterByUser = None
         filterByClient = None
@@ -1904,13 +1995,10 @@
         # Process arguments
         if len(args) == 0:
             args = ("/var/log/caldavd/access.log",)
-        if repeat and len(args) > 1:
-            usage("Must have one argument with --repeat")
 
         pwd = os.getcwd()
 
         analyzers = []
-        ctr = []
         for arg in args:
             arg = os.path.expanduser(arg)
             if not arg.startswith("/"):
@@ -1923,14 +2011,13 @@
            
             if diffMode or not analyzers:
                 analyzers.append(CalendarServerLogAnalyzer(startHour, endHour, utcoffset, resolution, filterByUser, filterByClient))
-                ctr.append(0)
             print "Analyzing: %s" % (arg,)
-            ctr[-1] = analyzers[-1].analyzeLogFile(arg, ctr[-1])
+            analyzers[-1].analyzeLogFile(arg)
 
         if diffMode and len(analyzers) > 1:
             Differ(analyzers).printAll(doTabDelimited, summary)
         else:
-            analyzers[-1].printAll(doTabDelimited, summary)
+            analyzers[0].printAll(doTabDelimited, summary)
             
             if repeat:
                 while True:
@@ -1938,7 +2025,9 @@
                     if again.lower()[0] == "n":
                         break
                     print "\n\n\n"
-                    analyzers[0].analyzeLogFile(arg, ctr[0])
+                    for arg in args:
+                        print "Analyzing: %s" % (arg,)
+                        analyzers[0].analyzeLogFile(arg)
                     analyzers[0].printAll(doTabDelimited, summary)
                 
     except Exception, e:

Modified: CalendarServer/branches/users/gaya/inviteclean/contrib/tools/request_monitor.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/contrib/tools/request_monitor.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/contrib/tools/request_monitor.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -25,6 +25,7 @@
 import traceback
 import collections
 import tables
+from cStringIO import StringIO
 
 # Detect which OS this is being run on
 child = Popen(
@@ -555,8 +556,10 @@
                 maxEndTime,
                 "",
             ))
-        table.printTable()
-        print
+            
+        os = StringIO()
+        table.printTable(os=os)
+        print os.getvalue()
 
         if enableListenQueue:
             lqlatency = (lqssl / avgRequests, lqnon / avgRequests,) if avgRequests else (0.0, 0.0,)

Modified: CalendarServer/branches/users/gaya/inviteclean/twext/web2/log.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twext/web2/log.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twext/web2/log.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -1,7 +1,7 @@
 # -*- test-case-name: twext.web2.test.test_log -*-
 ##
 # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
-# Copyright (c) 2010 Apple Computer, Inc. All rights reserved.
+# Copyright (c) 2010-2012 Apple Computer, Inc. All rights reserved.
 #
 # Permission is hereby granted, free of charge, to any person obtaining a copy
 # of this software and associated documentation files (the "Software"), to deal
@@ -90,12 +90,15 @@
         loginfo.bytesSent=length
         loginfo.responseCompleted=success
         loginfo.secondsTaken=time.time()-startTime
-        
+
+        if length:        
+            request.timeStamp("t-resp-wr")
         log.msg(interface=iweb.IRequest, request=request, response=response,
                  loginfo=loginfo)
         # Or just...
         # ILogger(ctx).log(...) ?
 
+    request.timeStamp("t-resp-gen")
     if response.stream:
         response.stream=_LogByteCounter(response.stream, _log)
     else:

Modified: CalendarServer/branches/users/gaya/inviteclean/twext/web2/server.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twext/web2/server.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twext/web2/server.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -1,7 +1,7 @@
 # -*- test-case-name: twext.web2.test.test_server -*-
 ##
 # Copyright (c) 2001-2008 Twisted Matrix Laboratories.
-# Copyright (c) 2010-2011 Apple Computer, Inc. All rights reserved.
+# Copyright (c) 2010-2012 Apple Computer, Inc. All rights reserved.
 #
 # Permission is hereby granted, free of charge, to any person obtaining a copy
 # of this software and associated documentation files (the "Software"), to deal
@@ -192,7 +192,7 @@
 
     def __init__(self, *args, **kw):
         
-        self.initTime = time.time()
+        self.timeStamps = [("t", time.time(),)]
 
         if kw.has_key('site'):
             self.site = kw['site']
@@ -214,6 +214,9 @@
         except AttributeError:
             self.serverInstance = "Unknown"
 
+    def timeStamp(self, tag):
+        self.timeStamps.append((tag, time.time(),))
+
     def addResponseFilter(self, filter, atEnd=False, onlyOnce=False):
         """
         Add a response filter to this request.
@@ -369,12 +372,17 @@
         d = defer.Deferred()
         d.addCallback(self._getChild, self.site.resource, self.postpath)
         d.addCallback(self._rememberResource, "/" + "/".join(quote(s) for s in self.postpath))
+        d.addCallback(self._processTimeStamp)
         d.addCallback(lambda res, req: res.renderHTTP(req), self)
         d.addCallback(self._cbFinishRender)
         d.addErrback(self._processingFailed)
         d.callback(None)
         return d
 
+    def _processTimeStamp(self, res):
+        self.timeStamp("t-req-proc")
+        return res
+
     def preprocessRequest(self):
         """Do any request processing that doesn't follow the normal
         resource lookup procedure. "OPTIONS *" is handled here, for

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/ldapdirectory.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/ldapdirectory.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/ldapdirectory.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -118,7 +118,7 @@
                     "mapping" : { # maps internal record names to LDAP
                         "recordName": "uid",
                         "fullName" : "cn",
-                        "emailAddresses" : "mail",
+                        "emailAddresses" : ["mail"], # multiple LDAP fields supported
                         "firstName" : "givenName",
                         "lastName" : "sn",
                     },
@@ -131,7 +131,7 @@
                     "mapping" : { # maps internal record names to LDAP
                         "recordName": "cn",
                         "fullName" : "cn",
-                        "emailAddresses" : "mail",
+                        "emailAddresses" : ["mail"], # multiple LDAP fields supported
                         "firstName" : "givenName",
                         "lastName" : "sn",
                     },
@@ -146,7 +146,7 @@
                     "mapping" : { # maps internal record names to LDAP
                         "recordName": "cn",
                         "fullName" : "cn",
-                        "emailAddresses" : "mail",
+                        "emailAddresses" : ["mail"], # multiple LDAP fields supported
                         "firstName" : "givenName",
                         "lastName" : "sn",
                     },
@@ -161,7 +161,7 @@
                     "mapping" : { # maps internal record names to LDAP
                         "recordName": "cn",
                         "fullName" : "cn",
-                        "emailAddresses" : "mail",
+                        "emailAddresses" : ["mail"], # multiple LDAP fields supported
                         "firstName" : "givenName",
                         "lastName" : "sn",
                     },
@@ -235,9 +235,14 @@
                 attrSet.add(self.rdnSchema[recordType]["attr"])
             if self.rdnSchema[recordType].get("calendarEnabledAttr", False):
                 attrSet.add(self.rdnSchema[recordType]["calendarEnabledAttr"])
-            for attr in self.rdnSchema[recordType]["mapping"].values():
-                if attr:
-                    attrSet.add(attr)
+            for attrList in self.rdnSchema[recordType]["mapping"].values():
+                if attrList:
+                    # Since emailAddresses can map to multiple LDAP fields,
+                    # support either string or list
+                    if isinstance(attrList, str):
+                        attrList = [attrList]
+                    for attr in attrList:
+                        attrSet.add(attr)
             # Also put the guidAttr attribute into the mappings for each type
             # so recordsMatchingFields can query on guid
             self.rdnSchema[recordType]["mapping"]["guid"] = self.rdnSchema["guidAttr"]
@@ -646,9 +651,10 @@
         """
         results = []
         for key in keys:
-            values = attrs.get(key)
-            if values is not None:
-                results += values
+            if key:
+                values = attrs.get(key)
+                if values is not None:
+                    results += values
         return results
 
 
@@ -686,7 +692,13 @@
                 raise MissingGuidException()
 
         # Find or build email
-        emailAddresses = set(self._getMultipleLdapAttributes(attrs, self.rdnSchema[recordType]["mapping"]["emailAddresses"]))
+        # (The emailAddresses mapping is a list of ldap fields)
+        emailAddressesMappedTo = self.rdnSchema[recordType]["mapping"]["emailAddresses"]
+        # Supporting either string or list for emailAddresses:
+        if isinstance(emailAddressesMappedTo, str):
+            emailAddresses = set(self._getMultipleLdapAttributes(attrs, self.rdnSchema[recordType]["mapping"]["emailAddresses"]))
+        else:
+            emailAddresses = set(self._getMultipleLdapAttributes(attrs, *self.rdnSchema[recordType]["mapping"]["emailAddresses"]))
         emailSuffix = self.rdnSchema[recordType]["emailSuffix"]
 
         if len(emailAddresses) == 0 and emailSuffix:
@@ -722,9 +734,16 @@
 
             # Normalize members if they're in DN form
             if not self.groupSchema["memberIdAttr"]: # empty = dn
-                memberGUIDs = [normalizeDNstr(dnStr) for dnStr in list(memberGUIDs)]
+                guids = list(memberGUIDs)
+                memberGUIDs = []
+                for dnStr in guids:
+                    try:
+                        dnStr = normalizeDNstr(dnStr)
+                        memberGUIDs.append(dnStr)
+                    except Exception, e:
+                        # LDAP returned an illegal DN value, log and ignore it
+                        self.log_warn("Bad LDAP DN: %s" % (dnStr,))
 
-
         elif recordType in (self.recordType_resources,
             self.recordType_locations):
             fullName = self._getUniqueLdapAttribute(attrs, self.rdnSchema[recordType]["mapping"]["fullName"])
@@ -885,7 +904,16 @@
                         ldapEsc(email)
                     )
                 else:
-                    filterstr = "(&%s(mail=%s))" % (filterstr, ldapEsc(email))
+                    # emailAddresses can map to multiple LDAP fields
+                    ldapFields = self.rdnSchema[recordType]["mapping"]["emailAddresses"]
+                    if isinstance(ldapFields, str):
+                        subfilter = "(%s=%s)" % (ldapFields, ldapEsc(email))
+                    else:
+                        subfilter = []
+                        for ldapField in ldapFields:
+                            subfilter.append("(%s=%s)" % (ldapField, ldapEsc(email)))
+                        subfilter = "(|%s)" % ("".join(subfilter))
+                    filterstr = "(&%s%s)" % (filterstr, subfilter)
 
             elif indexType == self.INDEX_TYPE_AUTHID:
                 return
@@ -1157,7 +1185,13 @@
         if ldapField:
             combined.setdefault(field, []).append((value, caseless, matchType))
             value = _convertValue(value, matchType)
-            converted.append("(%s=%s)" % (ldapField, value))
+            if isinstance(ldapField, str):
+                converted.append("(%s=%s)" % (ldapField, value))
+            else:
+                subConverted = []
+                for lf in ldapField:
+                    subConverted.append("(%s=%s)" % (lf, value))
+                converted.append("(|%s)" % "".join(subConverted))
 
     if len(converted) == 0:
         return None

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/principal.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/principal.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/principal.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -631,7 +631,7 @@
         if record.enabledForCalendaring:
             return tag.fillSlots(
                 calendarUserAddresses=formatLinks(
-                    resource.calendarUserAddresses()
+                    sorted(resource.calendarUserAddresses())
                 ),
                 calendarHomes=formatLinks(resource.calendarHomeURLs())
             )
@@ -755,7 +755,7 @@
 
             elif name == "email-address-set":
                 returnValue(customxml.EmailAddressSet(
-                    *[customxml.EmailAddressProperty(addr) for addr in self.record.emailAddresses]
+                    *[customxml.EmailAddressProperty(addr) for addr in sorted(self.record.emailAddresses)]
                 ))
 
         result = (yield super(DirectoryPrincipalResource, self).readProperty(property, request))

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/test/test_ldapdirectory.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/test/test_ldapdirectory.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/directory/test/test_ldapdirectory.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -410,7 +410,7 @@
                         "mapping": { # maps internal record names to LDAP
                             "recordName": "uid",
                             "fullName" : "cn",
-                            "emailAddresses" : "mail",
+                            "emailAddresses" : ["mail", "emailAliases"],
                             "firstName" : "givenName",
                             "lastName" : "sn",
                         },
@@ -423,7 +423,7 @@
                         "mapping": { # maps internal record names to LDAP
                             "recordName": "cn",
                             "fullName" : "cn",
-                            "emailAddresses" : "mail",
+                            "emailAddresses" : ["mail", "emailAliases"],
                             "firstName" : "givenName",
                             "lastName" : "sn",
                         },
@@ -438,7 +438,7 @@
                         "mapping": { # maps internal record names to LDAP
                             "recordName": "cn",
                             "fullName" : "cn",
-                            "emailAddresses" : "mail",
+                            "emailAddresses" : ["mail", "emailAliases"],
                             "firstName" : "givenName",
                             "lastName" : "sn",
                         },
@@ -453,7 +453,7 @@
                         "mapping": { # maps internal record names to LDAP
                             "recordName": "cn",
                             "fullName" : "cn",
-                            "emailAddresses" : "mail",
+                            "emailAddresses" : ["mail", "emailAliases"],
                             "firstName" : "givenName",
                             "lastName" : "sn",
                         },
@@ -611,6 +611,30 @@
                      ])
             )
 
+            # Group with illegal DN value in members
+
+            dn = "cn=odtestgrouptop,cn=groups,dc=example,dc=com"
+            guid = '6C6CD280-E6E3-11DF-9492-0800200C9A66'
+            attrs = {
+                'apple-generateduid': [guid],
+                'uniqueMember':
+                    [
+                        'uid=odtestamanda,cn=users,dc=example,dc=com',
+                        'uid=odtestbetty ,cn=users,dc=example,dc=com',
+                        'cn=odtestgroupb+foo,cn=groups,dc=example,dc=com',
+                    ],
+                'cn': ['odtestgrouptop']
+            }
+            record = self.service._ldapResultToRecord(dn, attrs,
+                self.service.recordType_groups)
+            self.assertEquals(record.guid, guid)
+            self.assertEquals(record.memberGUIDs(),
+                set([
+                     'uid=odtestamanda,cn=users,dc=example,dc=com',
+                     'uid=odtestbetty,cn=users,dc=example,dc=com',
+                     ])
+            )
+
             # Resource with delegates and autoSchedule = True
 
             dn = "cn=odtestresource,cn=resources,dc=example,dc=com"

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/ical.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/ical.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/ical.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -1419,6 +1419,30 @@
 
             # If the master has no recurrence properties treat any other components as invalid
             if master.isRecurring():
+
+                # Remove all EXDATEs with a matching RECURRENCE-ID. Do this before we start
+                # processing of valid instances just in case the matching R-ID is also not valid and
+                # thus will need RDATE added. 
+                exdates = {}
+                for property in list(master.properties("EXDATE")):
+                    for exdate in property.value():
+                        exdates[exdate.getValue()] = property
+                for rid in all_rids:
+                    if rid in exdates:
+                        if doFix:
+                            property = exdates[rid]
+                            for value in property.value():
+                                if value.getValue() == rid:
+                                    property.value().remove(value)
+                                    break
+                            master.removeProperty(property)
+                            if len(property.value()) > 0:
+                                master.addProperty(property)
+                            del exdates[rid]
+                            fixed.append("Removed EXDATE for valid override: %s" % (rid,))
+                        else:
+                            unfixed.append("EXDATE for valid override: %s" % (rid,))
+                
                 # Get the set of all valid recurrence IDs
                 valid_rids = self.validInstances(all_rids, ignoreInvalidInstances=True)
     
@@ -1428,11 +1452,13 @@
                     rdates.extend([_rdate.getValue() for _rdate in property.value()])
                 valid_rids.update(set(rdates))
 
+
                 # Remove EXDATEs predating master
                 dtstart = master.propertyValue("DTSTART")
                 if dtstart is not None:
                     for property in list(master.properties("EXDATE")):
                         newValues = []
+                        changed = False
                         for exdate in property.value():
                             exdateValue = exdate.getValue()
                             if exdateValue < dtstart:
@@ -1440,10 +1466,11 @@
                                     fixed.append("Removed earlier EXDATE: %s" % (exdateValue,))
                                 else:
                                     unfixed.append("EXDATE earlier than master: %s" % (exdateValue,))
+                                changed = True
                             else:
                                 newValues.append(exdateValue)
 
-                        if doFix:
+                        if changed and doFix:
                             # Remove the property...
                             master.removeProperty(property)
                             if newValues:
@@ -1458,7 +1485,7 @@
             # Determine the invalid recurrence IDs by set subtraction
             invalid_rids = all_rids - valid_rids
 
-            # Add RDATEs for the invalid ones.
+            # Add RDATEs for the invalid ones, or remove any EXDATE.
             for invalid_rid in invalid_rids:
                 brokenComponent = self.overriddenComponent(invalid_rid)
                 brokenRID = brokenComponent.propertyValue("RECURRENCE-ID")
@@ -1927,9 +1954,9 @@
 
     def getExtendedFreeBusy(self):
         """
-        Get the X-CALENDARSEREVR-EXTENDED-FREEBUSY value. Works on either a VCALENDAR or on a component.
+        Get the X-CALENDARSERVER-EXTENDED-FREEBUSY value. Works on either a VCALENDAR or on a component.
         
-        @return: the string value of the X-CALENDARSEREVR-EXTENDED-FREEBUSY property, or None
+        @return: the string value of the X-CALENDARSERVER-EXTENDED-FREEBUSY property, or None
         """
         
         # Extract appropriate sub-component if this is a VCALENDAR
@@ -1940,7 +1967,7 @@
         else:
             try:
                 # Find the primary subcomponent
-                return self.propertyValue("X-CALENDARSEREVR-EXTENDED-FREEBUSY")
+                return self.propertyValue("X-CALENDARSERVER-EXTENDED-FREEBUSY")
             except InvalidICalendarDataError:
                 pass
 

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/method/report_common.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/method/report_common.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/method/report_common.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -492,12 +492,31 @@
         tz = None
 
     # Look for possible extended free busy information
+    rich_options = {
+        "organizer": False,
+        "delegate": False,
+        "resource": False,
+    }
     do_event_details = False
     if event_details is not None and organizer_principal is not None and userPrincipal is not None:
          
-        # Check of organizer is a delegate of attendee
-        do_event_details = (yield organizer_principal.isProxyFor(userPrincipal))
+        # Check if organizer is attendee
+        if organizer_principal == userPrincipal:
+            do_event_details = True
+            rich_options["organizer"] = True
 
+        # Check if organizer is a delegate of attendee
+        proxy = (yield organizer_principal.isProxyFor(userPrincipal))
+        if config.Scheduling.Options.DelegeteRichFreeBusy and proxy:
+            do_event_details = True
+            rich_options["delegate"] = True
+
+        # Check if attendee is room or resource
+        if config.Scheduling.Options.RoomResourceRichFreeBusy and userPrincipal.getCUType() in ("RESOURCE", "ROOM",):
+            do_event_details = True
+            rich_options["resource"] = True
+
+
     # Try cache
     resources = (yield FBCacheEntry.getCacheEntry(calresource, useruid, timerange)) if config.EnableFreeBusyCache else None
 
@@ -622,7 +641,7 @@
                 if do_event_details:
                     child = (yield request.locateChildResource(calresource, name))
                     calendar = (yield child.iCalendarForUser(request))
-                    _addEventDetails(calendar, event_details, timerange, tzinfo)
+                    _addEventDetails(calendar, event_details, rich_options, timerange, tzinfo)
 
         else:
             child = (yield request.locateChildResource(calresource, name))
@@ -669,11 +688,11 @@
                 if calendar.mainType() == "VEVENT" and do_event_details:
                     child = (yield request.locateChildResource(calresource, name))
                     calendar = (yield child.iCalendarForUser(request))
-                    _addEventDetails(calendar, event_details, timerange, tzinfo)
+                    _addEventDetails(calendar, event_details, rich_options, timerange, tzinfo)
     
     returnValue(matchtotal)
 
-def _addEventDetails(calendar, event_details, timerange, tzinfo):
+def _addEventDetails(calendar, event_details, rich_options, timerange, tzinfo):
     """
     Expand events within the specified time range and limit the set of properties to those allowed for
     delegate extended free busy.
@@ -691,17 +710,24 @@
     # First expand the component
     expanded = calendar.expand(timerange.start, timerange.end, timezone=tzinfo)
 
-    # Remove all but essential properties
-    expanded.filterProperties(keep=(
+    keep_props = (
         "UID",
         "RECURRENCE-ID",
         "DTSTAMP",
         "DTSTART",
         "DTEND",
         "DURATION",
-        "SUMMARY",
-    ))
+    )
 
+    if rich_options["organizer"] or rich_options["delegate"]:
+        keep_props += ("SUMMARY",)
+
+    if rich_options["organizer"] or rich_options["resource"]:
+        keep_props += ("ORGANIZER",)
+
+    # Remove all but essential properties
+    expanded.filterProperties(keep=keep_props)
+
     # Need to remove all child components of VEVENT
     for subcomponent in expanded.subcomponents():
         if subcomponent.name() == "VEVENT":

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/ischedule.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/ischedule.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/ischedule.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -173,6 +173,12 @@
         
         # Generate an HTTP client request
         try:
+            if not hasattr(self.scheduler.request, "extendedLogItems"):
+                self.request.extendedLogItems = {}
+            if "itip.ischedule" not in self.request.extendedLogItems:
+                self.request.extendedLogItems["itip.ischedule"] = 0
+            self.request.extendedLogItems["itip.ischedule"] += 1
+
             from twisted.internet import reactor
             f = Factory()
             f.protocol = HTTPClientProtocol

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/scheduler.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/scheduler.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/scheduling/scheduler.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -160,7 +160,7 @@
         if not hasattr(self.request, "extendedLogItems"):
             self.request.extendedLogItems = {}
         self.request.extendedLogItems["recipients"] = len(self.recipients)
-        self.request.extendedLogItems["cl"] = str(self.calendar)
+        self.request.extendedLogItems["cl"] = str(len(str(self.calendar)))
     
         # Do some extra authorization checks
         self.checkAuthorization()

Modified: CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/stdconfig.py
===================================================================
--- CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/stdconfig.py	2012-07-16 23:11:10 UTC (rev 9457)
+++ CalendarServer/branches/users/gaya/inviteclean/twistedcaldav/stdconfig.py	2012-07-17 00:20:24 UTC (rev 9458)
@@ -97,7 +97,7 @@
                 "mapping" : { # maps internal record names to LDAP
                     "recordName": "uid",
                     "fullName" : "cn",
-                    "emailAddresses" : "mail",
+                    "emailAddresses" : ["mail"],
                     "firstName" : "givenName",
                     "lastName" : "sn",
                 },
@@ -110,7 +110,7 @@
                 "mapping" : { # maps internal record names to LDAP
                     "recordName": "cn",
                     "fullName" : "cn",
-                    "emailAddresses" : "mail",
+                    "emailAddresses" : ["mail"],
                     "firstName" : "givenName",
                     "lastName" : "sn",
                 },
@@ -125,7 +125,7 @@
                 "mapping" : { # maps internal record names to LDAP
                     "recordName": "cn",
                     "fullName" : "cn",
-                    "emailAddresses" : "mail",
+                    "emailAddresses" : ["mail"],
                     "firstName" : "givenName",
                     "lastName" : "sn",
                 },
@@ -140,7 +140,7 @@
                 "mapping" : { # maps internal record names to LDAP
                     "recordName": "cn",
                     "fullName" : "cn",
-                    "emailAddresses" : "mail",
+                    "emailAddresses" : ["mail"],
                     "firstName" : "givenName",
                     "lastName" : "sn",
                 },
@@ -435,6 +435,7 @@
     "PIDFile"        : "caldavd.pid",
     "RotateAccessLog"   : False,
     "EnableExtendedAccessLog": True,
+    "EnableExtendedTimingAccessLog": False,
     "DefaultLogLevel"   : "",
     "LogLevels"         : {},
     "LogID"             : "",
@@ -653,6 +654,9 @@
             "V1Compatibility"                     : False,  # Allow /path-based CUAs in scheduling replies
             "PrincipalHostAliases"                : [],     # Hostnames matched in http(s) CUAs
             
+            "DelegeteRichFreeBusy"                : True,   # Delegates can get extra info in a freebusy request
+            "RoomResourceRichFreeBusy"            : True,   # Any user can get extra info for rooms/resources in a freebusy request 
+
             "AutoSchedule" : {
                 "Enabled"                         : True,   # Auto-scheduling will never occur if set to False
                 "Always"                          : False,  # Override augments setting and always auto-schedule
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20120716/98f0c049/attachment-0001.html>


More information about the calendarserver-changes mailing list