[CalendarServer-changes] [5891] CalendarServer/branches/new-store
source_changes at macosforge.org
source_changes at macosforge.org
Wed Jul 14 00:40:40 PDT 2010
Revision: 5891
http://trac.macosforge.org/projects/calendarserver/changeset/5891
Author: glyph at apple.com
Date: 2010-07-14 00:40:35 -0700 (Wed, 14 Jul 2010)
Log Message:
-----------
catch up to trunk (no conflicts)
Modified Paths:
--------------
CalendarServer/branches/new-store/conf/auth/augments.dtd
CalendarServer/branches/new-store/conf/carddavd-apple.plist
CalendarServer/branches/new-store/lib-patches/vobject/vobject.icalendar.patch
CalendarServer/branches/new-store/support/Makefile.Apple
CalendarServer/branches/new-store/support/build.sh
CalendarServer/branches/new-store/twext/internet/sendfdport.py
CalendarServer/branches/new-store/twistedcaldav/directory/appleopendirectory.py
CalendarServer/branches/new-store/twistedcaldav/directory/augment.py
CalendarServer/branches/new-store/twistedcaldav/directory/test/augments-test-default.xml
CalendarServer/branches/new-store/twistedcaldav/directory/test/test_augment.py
CalendarServer/branches/new-store/twistedcaldav/directory/test/test_buildquery.py
CalendarServer/branches/new-store/twistedcaldav/directory/test/test_opendirectory.py
CalendarServer/branches/new-store/twistedcaldav/scheduling/scheduler.py
CalendarServer/branches/new-store/twistedcaldav/stdconfig.py
CalendarServer/branches/new-store/twistedcaldav/test/test_options.py
CalendarServer/branches/new-store/twistedcaldav/upgrade.py
Added Paths:
-----------
CalendarServer/branches/new-store/bin/calendarserver_migrate_resources
CalendarServer/branches/new-store/calendarserver/tools/resources.py
CalendarServer/branches/new-store/calendarserver/tools/test/test_resources.py
CalendarServer/branches/new-store/contrib/tools/monitoranalysis.py
CalendarServer/branches/new-store/contrib/tools/monitorsplit.py
CalendarServer/branches/new-store/doc/calendarserver_migrate_resources.8
Property Changed:
----------------
CalendarServer/branches/new-store/
Property changes on: CalendarServer/branches/new-store
___________________________________________________________________
Modified: svn:mergeinfo
- /CalendarServer/branches/config-separation:4379-4443
/CalendarServer/branches/egg-info-351:4589-4625
/CalendarServer/branches/users/cdaboo/cached-subscription-calendars-5692:5693-5702
/CalendarServer/branches/users/cdaboo/directory-cache-on-demand-3627:3628-3644
/CalendarServer/branches/users/cdaboo/more-sharing-5591:5592-5601
/CalendarServer/branches/users/cdaboo/partition-4464:4465-4957
/CalendarServer/branches/users/cdaboo/relative-config-paths-5070:5071-5105
/CalendarServer/branches/users/cdaboo/shared-calendars-5187:5188-5440
/CalendarServer/branches/users/glyph/contacts-server-merge:4971-5080
/CalendarServer/branches/users/glyph/sendfdport:5388-5424
/CalendarServer/branches/users/glyph/use-system-twisted:5084-5149
/CalendarServer/branches/users/sagen/locations-resources:5032-5051
/CalendarServer/branches/users/sagen/locations-resources-2:5052-5061
/CalendarServer/branches/users/sagen/resource-delegates-4038:4040-4067
/CalendarServer/branches/users/sagen/resource-delegates-4066:4068-4075
/CalendarServer/branches/users/sagen/resources-2:5084-5093
/CalendarServer/branches/users/wsanchez/transations:5515-5593
/CalendarServer/trunk:5594-5803
+ /CalendarServer/branches/config-separation:4379-4443
/CalendarServer/branches/egg-info-351:4589-4625
/CalendarServer/branches/users/cdaboo/cached-subscription-calendars-5692:5693-5702
/CalendarServer/branches/users/cdaboo/directory-cache-on-demand-3627:3628-3644
/CalendarServer/branches/users/cdaboo/more-sharing-5591:5592-5601
/CalendarServer/branches/users/cdaboo/partition-4464:4465-4957
/CalendarServer/branches/users/cdaboo/relative-config-paths-5070:5071-5105
/CalendarServer/branches/users/cdaboo/shared-calendars-5187:5188-5440
/CalendarServer/branches/users/glyph/contacts-server-merge:4971-5080
/CalendarServer/branches/users/glyph/sendfdport:5388-5424
/CalendarServer/branches/users/glyph/use-system-twisted:5084-5149
/CalendarServer/branches/users/sagen/locations-resources:5032-5051
/CalendarServer/branches/users/sagen/locations-resources-2:5052-5061
/CalendarServer/branches/users/sagen/resource-delegates-4038:4040-4067
/CalendarServer/branches/users/sagen/resource-delegates-4066:4068-4075
/CalendarServer/branches/users/sagen/resources-2:5084-5093
/CalendarServer/branches/users/wsanchez/transations:5515-5593
/CalendarServer/trunk:5594-5890
Copied: CalendarServer/branches/new-store/bin/calendarserver_migrate_resources (from rev 5890, CalendarServer/trunk/bin/calendarserver_migrate_resources)
===================================================================
--- CalendarServer/branches/new-store/bin/calendarserver_migrate_resources (rev 0)
+++ CalendarServer/branches/new-store/bin/calendarserver_migrate_resources 2010-07-14 07:40:35 UTC (rev 5891)
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+
+##
+# Copyright (c) 2010 Apple Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+import sys
+
+#PYTHONPATH
+
+if __name__ == "__main__":
+ if "PYTHONPATH" in globals():
+ sys.path.insert(0, PYTHONPATH)
+ else:
+ from os.path import dirname, abspath, join
+ from subprocess import Popen, PIPE
+
+ home = dirname(dirname(abspath(__file__)))
+ run = join(home, "run")
+
+ child = Popen((run, "-p"), stdout=PIPE)
+ path, stderr = child.communicate()
+
+ path = path.rstrip("\n")
+
+ if child.wait() == 0:
+ sys.path[0:0] = path.split(":")
+
+ sys.argv[1:1] = ["-f", join(home, "conf", "caldavd-dev.plist")]
+
+ from calendarserver.tools.resources import main
+ main()
Copied: CalendarServer/branches/new-store/calendarserver/tools/resources.py (from rev 5890, CalendarServer/trunk/calendarserver/tools/resources.py)
===================================================================
--- CalendarServer/branches/new-store/calendarserver/tools/resources.py (rev 0)
+++ CalendarServer/branches/new-store/calendarserver/tools/resources.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+
+##
+# Copyright (c) 2006-2010 Apple Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+from calendarserver.tools.principals import updateRecord
+from calendarserver.tools.util import loadConfig, getDirectory, setupMemcached, setupNotifications, checkDirectory
+from getopt import getopt, GetoptError
+from grp import getgrnam
+from pwd import getpwnam
+from twext.python.log import StandardIOObserver
+from twext.python.log import clearLogLevels
+from twisted.internet import reactor
+from twisted.internet.defer import inlineCallbacks
+from twisted.python.util import switchUID
+from twistedcaldav.config import config, ConfigurationError
+from twistedcaldav.directory.appleopendirectory import OpenDirectoryService
+from twistedcaldav.directory.directory import DirectoryService, DirectoryError
+from twistedcaldav.directory.xmlfile import XMLDirectoryService
+import opendirectory, dsattributes
+import os
+import sys
+
+__all__ = [ "migrateResources", ]
+
+def usage():
+
+ name = os.path.basename(sys.argv[0])
+ print "usage: %s [options] " % (name,)
+ print ""
+ print " Migrates resources and locations from OD to Calendar Server"
+ print ""
+ print "options:"
+ print " -h --help: print this help and exit"
+ print " -f --config <path>: Specify caldavd.plist configuration path"
+ print " -v --verbose: print debugging information"
+ print ""
+
+ sys.exit(0)
+
+def abort(msg, status=1):
+ sys.stdout.write("%s\n" % (msg,))
+ try:
+ reactor.stop()
+ except RuntimeError:
+ pass
+ sys.exit(status)
+
+def main():
+ try:
+ (optargs, args) = getopt(
+ sys.argv[1:], "hf:v", [
+ "help",
+ "config=",
+ "verbose",
+ ],
+ )
+ except GetoptError, e:
+ usage(e)
+
+ #
+ # Get configuration
+ #
+ configFileName = None
+ verbose = False
+
+ for opt, arg in optargs:
+ if opt in ("-h", "--help"):
+ usage()
+
+ elif opt in ("-v", "--verbose"):
+ verbose = True
+
+ elif opt in ("-f", "--config"):
+ configFileName = arg
+
+ else:
+ raise NotImplementedError(opt)
+
+ #
+ # Get configuration
+ #
+ try:
+ loadConfig(configFileName)
+
+ # Do this first, because modifying the config object will cause
+ # some logging activity at whatever log level the plist says
+ clearLogLevels()
+
+ config.DefaultLogLevel = "info" if verbose else "error"
+
+ #
+ # Send logging output to stdout
+ #
+ observer = StandardIOObserver()
+ observer.start()
+
+ # Create the DataRoot directory before shedding privileges
+ if config.DataRoot.startswith(config.ServerRoot + os.sep):
+ checkDirectory(
+ config.DataRoot,
+ "Data root",
+ access=os.W_OK,
+ create=(0750, config.UserName, config.GroupName),
+ )
+
+ # Shed privileges
+ if config.UserName and config.GroupName and os.getuid() == 0:
+ uid = getpwnam(config.UserName).pw_uid
+ gid = getgrnam(config.GroupName).gr_gid
+ switchUID(uid, uid, gid)
+
+ os.umask(config.umask)
+
+ try:
+ config.directory = getDirectory()
+ except DirectoryError, e:
+ abort(e)
+ setupMemcached(config)
+ setupNotifications(config)
+ except ConfigurationError, e:
+ abort(e)
+
+ # Find the opendirectory service
+ userService = config.directory.serviceForRecordType("users")
+ resourceService = config.directory.serviceForRecordType("resources")
+ if (not isinstance(userService, OpenDirectoryService) or
+ not isinstance(resourceService, XMLDirectoryService)):
+ abort("This script only migrates resources and locations from OpenDirectory to XML; this calendar server does not have such a configuration.")
+
+ #
+ # Start the reactor
+ #
+ reactor.callLater(0, migrate, userService, resourceService, verbose=verbose)
+ reactor.run()
+
+
+
+ at inlineCallbacks
+def migrate(sourceService, resourceService, verbose=False):
+ """
+ Simply a wrapper around migrateResources in order to stop the reactor
+ """
+
+ try:
+ yield migrateResources(sourceService, resourceService, verbose=verbose)
+ finally:
+ reactor.stop()
+
+
+def queryForType(sourceService, recordType, verbose=False):
+ """
+ Queries OD for all records of the specified record type
+ """
+
+ attrs = [
+ dsattributes.kDS1AttrGeneratedUID,
+ dsattributes.kDS1AttrDistinguishedName,
+ # NEED THIS? dsattributes.kDSNAttrServicesLocator,
+ ]
+
+ if verbose:
+ print "Querying for all %s records" % (recordType,)
+
+ results = opendirectory.listAllRecordsWithAttributes_list(
+ sourceService.directory,
+ recordType,
+ attrs,
+ )
+
+ if verbose:
+ print "Found %d records" % (len(results),)
+
+ return results
+
+
+ at inlineCallbacks
+def migrateResources(sourceService, destService, queryMethod=queryForType,
+ verbose=False):
+
+ for recordTypeOD, recordType in (
+ (dsattributes.kDSStdRecordTypeResources, DirectoryService.recordType_resources),
+ (dsattributes.kDSStdRecordTypePlaces, DirectoryService.recordType_locations),
+ ):
+ data = queryMethod(sourceService, recordTypeOD, verbose=verbose)
+ for recordName, val in data:
+ guid = val.get(dsattributes.kDS1AttrGeneratedUID, None)
+ fullName = val.get(dsattributes.kDS1AttrDistinguishedName, None)
+ if guid and fullName:
+ if not recordName:
+ recordName = guid
+ record = destService.recordWithGUID(guid)
+ if record is None:
+ if verbose:
+ print "Migrating %s (%s)" % (fullName, recordType)
+ yield updateRecord(True, destService, recordType,
+ guid=guid, shortNames=[recordName], fullName=fullName,
+ autoSchedule="true")
+
+
+if __name__ == "__main__":
+ main()
Copied: CalendarServer/branches/new-store/calendarserver/tools/test/test_resources.py (from rev 5890, CalendarServer/trunk/calendarserver/tools/test/test_resources.py)
===================================================================
--- CalendarServer/branches/new-store/calendarserver/tools/test/test_resources.py (rev 0)
+++ CalendarServer/branches/new-store/calendarserver/tools/test/test_resources.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -0,0 +1,157 @@
+##
+# Copyright (c) 2005-2010 Apple Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+from calendarserver.tools.resources import migrateResources
+from twisted.internet.defer import inlineCallbacks, succeed
+from twistedcaldav.directory import augment
+from twistedcaldav.directory.directory import DirectoryService
+from twistedcaldav.test.util import TestCase
+import dsattributes
+
+
+strGUID = dsattributes.kDS1AttrGeneratedUID
+strName = dsattributes.kDS1AttrDistinguishedName
+
+
+class StubDirectoryRecord(object):
+
+ def __init__(self, recordType, guid=None, shortNames=None, fullName=None):
+ self.recordType = recordType
+ self.guid = guid
+ self.shortNames = shortNames
+ self.fullName = fullName
+
+
+class StubDirectoryService(object):
+
+ def __init__(self):
+ self.records = {}
+
+ def recordWithGUID(self, guid):
+ return None
+
+ def createRecord(self, recordType, guid=None, shortNames=None,
+ fullName=None):
+ record = StubDirectoryRecord(recordType, guid=guid,
+ shortNames=shortNames, fullName=fullName)
+ self.records[guid] = record
+ return record
+
+ def updateRecord(self, recordType, guid=None, shortNames=None,
+ fullName=None):
+ pass
+
+
+class StubAugmentRecord(object):
+
+ def __init__(self, guid=None):
+ self.guid = guid
+ self.autoSchedule = True
+
+
+class StubAugmentService(object):
+
+ records = {}
+
+ @classmethod
+ def getAugmentRecord(cls, guid):
+ if not cls.records.has_key(guid):
+ record = StubAugmentRecord(guid=guid)
+ cls.records[guid] = record
+ return succeed(cls.records[guid])
+
+ @classmethod
+ def addAugmentRecords(cls, records):
+ for record in records:
+ cls.records[record.guid] = record
+ return succeed(True)
+
+
+class MigrateResourcesTestCase(TestCase):
+
+ @inlineCallbacks
+ def test_migrateResources(self):
+
+ data = {
+ dsattributes.kDSStdRecordTypeResources :
+ [
+ ['projector1', {
+ strGUID : '6C99E240-E915-4012-82FA-99E0F638D7EF',
+ strName : 'Projector 1'
+ }],
+ ['projector2', {
+ strGUID : '7C99E240-E915-4012-82FA-99E0F638D7EF',
+ strName : 'Projector 2'
+ }],
+ ],
+ dsattributes.kDSStdRecordTypePlaces :
+ [
+ ['office1', {
+ strGUID : '8C99E240-E915-4012-82FA-99E0F638D7EF',
+ strName : 'Office 1'
+ }],
+ ],
+ }
+
+ def queryMethod(sourceService, recordType, verbose=False):
+ return data[recordType]
+
+ self.patch(augment, "AugmentService", StubAugmentService)
+ directoryService = StubDirectoryService()
+ yield migrateResources(None, directoryService, queryMethod=queryMethod)
+ for guid, recordType in (
+ ('6C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_resources),
+ ('7C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_resources),
+ ('8C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_locations),
+ ):
+ self.assertTrue(guid in directoryService.records)
+ record = directoryService.records[guid]
+ self.assertEquals(record.recordType, recordType)
+
+ self.assertTrue(guid in StubAugmentService.records)
+
+
+ #
+ # Add more to OD and re-migrate
+ #
+
+ data[dsattributes.kDSStdRecordTypeResources].append(
+ ['projector3', {
+ strGUID : '9C99E240-E915-4012-82FA-99E0F638D7EF',
+ strName : 'Projector 3'
+ }]
+ )
+ data[dsattributes.kDSStdRecordTypePlaces].append(
+ ['office2', {
+ strGUID : 'AC99E240-E915-4012-82FA-99E0F638D7EF',
+ strName : 'Office 2'
+ }]
+ )
+
+ yield migrateResources(None, directoryService, queryMethod=queryMethod)
+
+ for guid, recordType in (
+ ('6C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_resources),
+ ('7C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_resources),
+ ('9C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_resources),
+ ('8C99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_locations),
+ ('AC99E240-E915-4012-82FA-99E0F638D7EF', DirectoryService.recordType_locations),
+ ):
+ self.assertTrue(guid in directoryService.records)
+ record = directoryService.records[guid]
+ self.assertEquals(record.recordType, recordType)
+
+ self.assertTrue(guid in StubAugmentService.records)
Modified: CalendarServer/branches/new-store/conf/auth/augments.dtd
===================================================================
--- CalendarServer/branches/new-store/conf/auth/augments.dtd 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/conf/auth/augments.dtd 2010-07-14 07:40:35 UTC (rev 5891)
@@ -16,7 +16,7 @@
<!ELEMENT augments (record*) >
- <!ELEMENT record (guid, enable, hosted-at?, enable-calendar?, enable-addressbook?, auto-schedule?)>
+ <!ELEMENT record (uid, enable, hosted-at?, enable-calendar?, enable-addressbook?, auto-schedule?)>
<!ATTLIST record repeat CDATA "1">
<!ELEMENT uid (#PCDATA)>
Modified: CalendarServer/branches/new-store/conf/carddavd-apple.plist
===================================================================
--- CalendarServer/branches/new-store/conf/carddavd-apple.plist 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/conf/carddavd-apple.plist 2010-07-14 07:40:35 UTC (rev 5891)
@@ -546,7 +546,7 @@
<true/>
<key>userNode</key>
<!-- Search for user records in this directory service node -->
- <string>/Search</string>
+ <string>/Search/Contacts</string>
<!-- query in directory service local node -->
<key>queryDSLocal</key>
<false/>
Copied: CalendarServer/branches/new-store/contrib/tools/monitoranalysis.py (from rev 5890, CalendarServer/trunk/contrib/tools/monitoranalysis.py)
===================================================================
--- CalendarServer/branches/new-store/contrib/tools/monitoranalysis.py (rev 0)
+++ CalendarServer/branches/new-store/contrib/tools/monitoranalysis.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -0,0 +1,317 @@
+#!/usr/bin/env python
+##
+# Copyright (c) 2010 Apple Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+import matplotlib.pyplot as plt
+import getopt
+import sys
+import os
+import datetime
+
+dataset = []
+initialDate = None
+
+def analyze(fpath, noweekends, startDate=None, endDate=None, title=None):
+
+ print "Analyzing data for %s" % (fpath,)
+ data = []
+ firstDate = None
+ global initialDate
+ with open(fpath) as f:
+ for line in f:
+ try:
+ if line.startswith("2010/0"):
+
+ date = line[:10]
+ if startDate and date < startDate or endDate and date > endDate:
+ continue
+
+ if noweekends:
+ dt = datetime.date(int(date[0:4]), int(date[5:7]), int(date[8:10]))
+ if dt.weekday() > 4:
+ continue
+
+ digits = line[11:13]
+ if digits in ("05", "06"):
+ for _ignore in range(3):
+ f.next()
+ continue
+ dtstamp = line[:19]
+
+ if firstDate is None:
+ firstDate = date.replace("/", "")
+ if initialDate is None:
+ initialDate = firstDate
+
+ if "Listenq" in line:
+ lqnon = line[len("2010/05/12 22:27:24 Listenq (ssl+non): "):].split("+", 1)[1]
+ else:
+ lqnon = line[len("2010/01/05 19:47:23 Listen queue: "):]
+
+ lqnon = int(lqnon.split(" ", 1)[0])
+
+ line = f.next()
+ cpu = int(line[len("CPU idle %: "):].split(" ", 1)[0])
+
+ line = f.next()
+ reqs = int(float(line.split(" ", 1)[0]))
+
+ line = f.next()
+ resp = line[len("Response time: average "):].split(" ", 1)[0]
+ resp = int(float(resp)/10.0) * 10
+
+ if reqs <= 80:
+ data.append((dtstamp, reqs, resp, lqnon, cpu))
+ #print "%s %d %d %d %d" % (dtstamp, reqs, resp, lqnon, cpu)
+ except StopIteration:
+ break
+
+ if not title:
+ if startDate and endDate:
+ title = "Between %s and %s" % (startDate, endDate,)
+ elif startDate:
+ title = "Since %s" % (startDate,)
+ elif endDate:
+ title = "Up to %s" % (endDate,)
+ else:
+ title = "Start at %s" % (firstDate,)
+
+ dataset.append((title, data,))
+
+ print "Stored %d data points" % (len(data),)
+
+def plotListenQBands(data, first, last, xlim, ylim):
+
+ x1 = []
+ y1 = []
+ x2 = []
+ y2 = []
+ x3 = []
+ y3 = []
+ for datetime, reqs, resp, lq, cpu in data:
+ if lq == 0:
+ x1.append(reqs)
+ y1.append(resp)
+ elif lq < 50:
+ x2.append(reqs)
+ y2.append(resp)
+ else:
+ x3.append(reqs)
+ y3.append(resp)
+
+ plt.plot(x1, y1, "b+", x2, y2, "g+", x3, y3, "y+")
+
+ if first:
+ plt.legend(('ListenQ at zero', 'ListenQ < 50', 'ListenQ >= 50'),
+ 'upper right', shadow=True, fancybox=True)
+ if last:
+ plt.xlabel("Requests/second")
+ plt.ylabel("Av. Response Time (ms)")
+ plt.xlim(0, xlim)
+ plt.ylim(0, ylim)
+
+def plotCPUBands(data, first, last, xlim, ylim):
+
+ x = [[], [], [], []]
+ y = [[], [], [], []]
+ for datetime, reqs, resp, lq, cpu in data:
+ if cpu > 75:
+ x[0].append(reqs)
+ y[0].append(resp)
+ elif cpu > 50:
+ x[1].append(reqs)
+ y[1].append(resp)
+ elif cpu > 25:
+ x[2].append(reqs)
+ y[2].append(resp)
+ else:
+ x[3].append(reqs)
+ y[3].append(resp)
+
+ plt.plot(
+ x[0], y[0], "b+",
+ x[1], y[1], "g+",
+ x[2], y[2], "y+",
+ x[3], y[3], "m+",
+ )
+
+ if first:
+ plt.legend(('CPU < 1/4', 'CPU < 1/2', 'CPU < 3/4', "CPU High"),
+ 'upper right', shadow=True, fancybox=True)
+ if last:
+ plt.xlabel("Requests/second")
+ plt.ylabel("Av. Response Time (ms)")
+ plt.xlim(0, xlim)
+ plt.ylim(0, ylim)
+
+def plot(figure, noshow, nosave, pngDir, xlim, ylim):
+
+ print "Plotting data"
+
+ plt.figure(figure, figsize=(16, 5 * len(dataset)))
+
+ nplots = len(dataset)
+ subplot = nplots*100 + 20
+
+ for ctr, item in enumerate(dataset):
+
+ title, data = item
+ if not title:
+ title = "#%d" % (ctr+1,)
+
+ plt.subplot(subplot + 2*ctr + 1)
+ plotListenQBands(data, first=(ctr == 0), last=(ctr+1 == len(dataset)), xlim=xlim, ylim=ylim)
+ plt.title("ListenQ %s" % (title,))
+
+ plt.subplot(subplot + 2*ctr + 2)
+ plotCPUBands(data, first=(ctr == 0), last=(ctr+1 == len(dataset)), xlim=xlim, ylim=ylim)
+ plt.title("CPU %s" % (title,))
+
+def argPath(path):
+ fpath = os.path.expanduser(path)
+ if not fpath.startswith("/"):
+ fpath = os.path.join(pwd, fpath)
+ return fpath
+
+def expandDate(date):
+ return "%s/%s/%s" % (date[0:4], date[4:6], date[6:8],)
+
+def usage(error_msg=None):
+ if error_msg:
+ print error_msg
+
+ print """Usage: monitoranalysis [options] [FILE+]
+Options:
+ -h Print this help and exit
+ -d Directory to save PNGs to
+ -s Directory to scan for data instead of FILEs
+ --no-weekends Ignore data for Saturday and Sunday
+ --no-show Do not show plots on screen
+ --no-save Do not save plots to file
+ --xlim x-axis limit [80]
+ --ylim y-axim limit [4000]
+
+Arguments:
+ FILE File names for the requests.log to analyze. A date
+ range can be specified by append a comma, then a
+ dash seperated pair of YYYYMMDD dates, e.g.:
+ ~/request.log,20100614-20100619. Multiple
+ ranges can be specified for multiple plots.
+
+Description:
+This utility will analyze the output of the request monitor tool and
+generate some pretty plots of data.
+"""
+
+ if error_msg:
+ raise ValueError(error_msg)
+ else:
+ sys.exit(0)
+
+if __name__ == "__main__":
+
+ pngDir = None
+ scanDir = None
+ noweekends = False
+ noshow = False
+ nosave = False
+ xlim = 80
+ ylim = 4000
+ options, args = getopt.getopt(sys.argv[1:], "hd:s:", ["no-weekends", "no-show", "no-save", "xlim=", "ylim="])
+
+ for option, value in options:
+ if option == "-h":
+ usage()
+ elif option == "-d":
+ pngDir = os.path.expanduser(value)
+ elif option == "-s":
+ scanDir = os.path.expanduser(value)
+ elif option == "--no-show":
+ noshow = True
+ elif option == "--no-save":
+ nosave = True
+ elif option == "--no-weekends":
+ noweekends = True
+ elif option == "--xlim":
+ xlim = int(value)
+ elif option == "--ylim":
+ ylim = int(value)
+ else:
+ usage("Unrecognized option: %s" % (option,))
+
+ if pngDir is None and scanDir:
+ pngDir = scanDir
+
+ if not nosave and not os.path.isdir(pngDir):
+ usage("Must have a valid -d path for saving images")
+
+ # Process arguments
+ if len(args) == 0 and scanDir is None:
+ usage("Must have arguments")
+ elif scanDir and len(args) != 0:
+ usage("No arguments allowed when scanning a directory")
+
+ pwd = os.getcwd()
+
+ if scanDir:
+ fnames = os.listdir(scanDir)
+ count = 1
+ for name in fnames:
+ if name.startswith("request.log"):
+ print "Found file: %s" % (os.path.join(scanDir, name),)
+ trailer = name[len("request.log"):]
+ if trailer.startswith("."):
+ trailer = trailer[1:]
+ initialDate = None
+ dataset = []
+ analyze(os.path.join(scanDir, name), noweekends)
+ plot(count, noshow, nosave, pngDir, xlim, ylim)
+ if not nosave:
+ plt.savefig(os.path.expanduser(os.path.join(pngDir, "Monitor-%s" % (trailer,))))
+ count += 1
+
+ if not noshow:
+ plt.show()
+ else:
+ for arg in args:
+ if "," in arg:
+ items = arg.split(",")
+ arg = items[0]
+ start = []
+ end = []
+ for daterange in items[1:]:
+ splits = daterange.split("-")
+ if len(splits) == 1:
+ start.append(expandDate(splits[0]))
+ end.append(None)
+ elif len(splits) == 2:
+ start.append(expandDate(splits[0]))
+ end.append(expandDate(splits[1]))
+ else:
+ start.append(None)
+ end.append(None)
+ else:
+ start = (None,)
+ end = (None,)
+
+ for i in range(len(start)):
+ analyze(argPath(arg), noweekends, start[i], end[i])
+
+ plot(1, noshow, nosave, pngDir, xlim, ylim)
+ if not nosave:
+ plt.savefig(os.path.expanduser(os.path.join(pngDir, "Monitor-%s" % (initialDate,))))
+ if not noshow:
+ plt.show()
Copied: CalendarServer/branches/new-store/contrib/tools/monitorsplit.py (from rev 5890, CalendarServer/trunk/contrib/tools/monitorsplit.py)
===================================================================
--- CalendarServer/branches/new-store/contrib/tools/monitorsplit.py (rev 0)
+++ CalendarServer/branches/new-store/contrib/tools/monitorsplit.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+##
+# Copyright (c) 2010 Apple Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##
+
+import getopt
+import sys
+import os
+from gzip import GzipFile
+import datetime
+
+outputFile = None
+fileCount = 0
+lastWeek = None
+
+def split(fpath, outputDir):
+
+ global outputFile, fileCount, lastWeek
+
+ print "Splitting data for %s" % (fpath,)
+ f = GzipFile(fpath) if fpath.endswith(".gz") else open(fpath)
+ for line in f:
+ if line.startswith("2010/0"):
+ date = line[:10]
+ date = date.replace("/", "")
+ hours = line[11:13]
+
+ dt = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8]))
+
+ currentWeek = dt.isocalendar()[1]
+ if dt.weekday() == 0 and hours <= "06":
+ currentWeek -= 1
+ if lastWeek != currentWeek:
+ if outputFile:
+ outputFile.close()
+ outputFile = open(os.path.join(outputDir, "request.log.%s" % (date,)), "w")
+ fileCount += 1
+ lastWeek = currentWeek
+ print "Changed to week of %s" % (date,)
+
+ output = ["-----\n"]
+ output.append(line)
+ try:
+ output.append(f.next())
+ output.append(f.next())
+ output.append(f.next())
+ except StopIteration:
+ break
+ outputFile.write("".join(output))
+ f.close()
+
+def argPath(path):
+ fpath = os.path.expanduser(path)
+ if not fpath.startswith("/"):
+ fpath = os.path.join(pwd, fpath)
+ return fpath
+
+def expandDate(date):
+ return "%s/%s/%s" % (date[0:4], date[4:6], date[6:8],)
+
+def usage(error_msg=None):
+ if error_msg:
+ print error_msg
+
+ print """Usage: monitoranalysis [options] FILE+
+Options:
+ -h Print this help and exit
+ -d Directory to store split files in
+
+Arguments:
+ FILE File names for the requests.log to analyze. A date
+ range can be specified by append a comma, then a
+ dash seperated pair of YYYYMMDD dates, e.g.:
+ ~/request.log,20100614-20100619. Multiple
+ ranges can be specified for multiple plots.
+
+Description:
+This utility will analyze the output of the request monitor tool and
+generate some pretty plots of data.
+"""
+
+ if error_msg:
+ raise ValueError(error_msg)
+ else:
+ sys.exit(0)
+
+if __name__ == "__main__":
+
+ outputDir = None
+
+ options, args = getopt.getopt(sys.argv[1:], "hd:", [])
+
+ for option, value in options:
+ if option == "-h":
+ usage()
+ elif option == "-d":
+ outputDir = argPath(value)
+ else:
+ usage("Unrecognized option: %s" % (option,))
+
+ if not outputDir or not os.path.isdir(outputDir):
+ usage("Must specify a valid output directory.")
+
+ # Process arguments
+ if len(args) == 0:
+ usage("Must have arguments")
+
+ pwd = os.getcwd()
+
+ for arg in args:
+ split(argPath(arg), outputDir)
+
+ print "Created %d files" % (fileCount,)
Copied: CalendarServer/branches/new-store/doc/calendarserver_migrate_resources.8 (from rev 5890, CalendarServer/trunk/doc/calendarserver_migrate_resources.8)
===================================================================
--- CalendarServer/branches/new-store/doc/calendarserver_migrate_resources.8 (rev 0)
+++ CalendarServer/branches/new-store/doc/calendarserver_migrate_resources.8 2010-07-14 07:40:35 UTC (rev 5891)
@@ -0,0 +1,51 @@
+.\"
+.\" Copyright (c) 2006-2010 Apple Inc. All rights reserved.
+.\"
+.\" Licensed under the Apache License, Version 2.0 (the "License");
+.\" you may not use this file except in compliance with the License.
+.\" You may obtain a copy of the License at
+.\"
+.\" http://www.apache.org/licenses/LICENSE-2.0
+.\"
+.\" Unless required by applicable law or agreed to in writing, software
+.\" distributed under the License is distributed on an "AS IS" BASIS,
+.\" WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+.\" See the License for the specific language governing permissions and
+.\" limitations under the License.
+.\"
+.\" The following requests are required for all man pages.
+.Dd June 17, 2009
+.Dt CALENDARSERVER_MIGRATE_RESOURCES 8
+.Os
+.Sh NAME
+.Nm calendarserver_migrate_resources
+.Nd Darwin Calendar Server resource and location migration utility
+.Sh SYNOPSIS
+.Nm
+.Op Fl -config Ar file
+.Op Fl -verbose
+.Op Fl -help
+.Sh DESCRIPTION
+.Nm
+is a tool for migrating resource and location records from OpenDirectory into the calendar server's internal directory.
+.Pp
+.Nm
+should be run as a user with the same priviledges as the Calendar
+Server itself, as it needs to read and write data that belongs to the
+server.
+.Sh OPTIONS
+.Bl -tag -width flag
+.It Fl h, -help
+Display usage information
+.It Fl f, -config Ar FILE
+Use the Calendar Server configuration specified in the given file. Defaults to /etc/caldavd/caldavd.plist.
+.It Fl v, -verbose
+Print progress information including the names of resources and locations being migrated.
+.El
+.Sh FILES
+.Bl -tag -width flag
+.It /etc/caldavd/caldavd.plist
+The Calendar Server configuration file.
+.El
+.Sh SEE ALSO
+.Xr caldavd 8
Modified: CalendarServer/branches/new-store/lib-patches/vobject/vobject.icalendar.patch
===================================================================
--- CalendarServer/branches/new-store/lib-patches/vobject/vobject.icalendar.patch 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/lib-patches/vobject/vobject.icalendar.patch 2010-07-14 07:40:35 UTC (rev 5891)
@@ -2,7 +2,7 @@
===================================================================
--- vobject/icalendar.py (revision 219)
+++ vobject/icalendar.py (working copy)
-@@ -1622,11 +1622,19 @@
+@@ -1622,11 +1622,15 @@
# DQUOTE included to work around iCal's penchant for backslash escaping it,
# although it isn't actually supposed to be escaped according to rfc2445 TEXT
@@ -11,11 +11,7 @@
+# COLON included to work around Address Book's escaping it in vCard data,
+# despite the fact that it is similarly not supposed to be escaped.
+
-+# Isn't it a bad idea to be lenient with \" ?
-+if True:
-+ escapableCharList = '\\;,Nn'
-+else:
-+ escapableCharList = '\\;:,Nn"'
++escapableCharList = '\\;:,Nn'
+
def stringToTextValues(s, listSeparator=',', charList=None, strict=False):
"""Returns list of strings."""
@@ -24,7 +20,7 @@
if charList is None:
charList = escapableCharList
-@@ -1675,9 +1683,10 @@
+@@ -1675,9 +1679,10 @@
else:
current.append(char)
else:
Modified: CalendarServer/branches/new-store/support/Makefile.Apple
===================================================================
--- CalendarServer/branches/new-store/support/Makefile.Apple 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/support/Makefile.Apple 2010-07-14 07:40:35 UTC (rev 5891)
@@ -88,6 +88,7 @@
$(_v) $(INSTALL_FILE) "$(Sources)/doc/carddavd.8" "$(DSTROOT)$(MANDIR)/man8"
$(_v) $(INSTALL_FILE) "$(Sources)/doc/calendarserver_export.8" "$(DSTROOT)$(MANDIR)/man8"
$(_v) $(INSTALL_FILE) "$(Sources)/doc/calendarserver_manage_principals.8" "$(DSTROOT)$(MANDIR)/man8"
+ $(_v) $(INSTALL_FILE) "$(Sources)/doc/calendarserver_migrate_resources.8" "$(DSTROOT)$(MANDIR)/man8"
$(_v) $(INSTALL_FILE) "$(Sources)/doc/calendarserver_command_gateway.8" "$(DSTROOT)$(MANDIR)/man8"
$(_v) $(INSTALL_FILE) "$(Sources)/doc/calendarserver_purge_events.8" "$(DSTROOT)$(MANDIR)/man8"
$(_v) gzip -9 -f "$(DSTROOT)$(MANDIR)/man8/"*.[0-9]
Modified: CalendarServer/branches/new-store/support/build.sh
===================================================================
--- CalendarServer/branches/new-store/support/build.sh 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/support/build.sh 2010-07-14 07:40:35 UTC (rev 5891)
@@ -158,6 +158,16 @@
www_get () {
if ! "${do_get}"; then return 0; fi;
+ local md5="";
+
+ OPTIND=1;
+ while getopts "m:" option; do
+ case "${option}" in
+ 'm') md5="${OPTARG}"; ;;
+ esac;
+ done;
+ shift $((${OPTIND} - 1));
+
local name="$1"; shift;
local path="$1"; shift;
local url="$1"; shift;
@@ -187,6 +197,15 @@
curl -L "${url}" -o "${cache_file}";
fi;
+ if [ -n "${md5}" ]; then
+ echo "Checking MD5 sum for ${name}...";
+ local sum="$(md5 "${cache_file}" | perl -pe 's|^.*([0-9a-f]{32}).*$|\1|')";
+ if [ "${md5}" != "${sum}" ]; then
+ echo "ERROR: MD5 sum for cache file ${cache_file} ${sum} != ${md5}. Corrupt file?";
+ exit 1;
+ fi;
+ fi;
+
echo "Unpacking ${name} from cache...";
get () { cat "${cache_file}"; }
else
@@ -334,9 +353,10 @@
local revision="0"; # Revision (if svn)
local get_type="www"; # Protocol to use
local version=""; # Minimum version required
+ local f_hash=""; # Checksum
OPTIND=1;
- while getopts "ofier:v:" option; do
+ while getopts "ofier:v:m:" option; do
case "${option}" in
'o') optional="true"; ;;
'f') override="true"; ;;
@@ -344,6 +364,7 @@
'e') skip_egg="true"; ;;
'r') get_type="svn"; revision="${OPTARG}"; ;;
'v') version="-v ${OPTARG}"; ;;
+ 'm') f_hash="-m ${OPTARG}"; ;;
esac;
done;
shift $((${OPTIND} - 1));
@@ -360,7 +381,7 @@
echo "";
fi;
if "${override}" || ! py_have_module ${version} "${module}"; then
- "${get_type}_get" "${name}" "${srcdir}" "${get_uri}" "${revision}"
+ "${get_type}_get" ${f_hash} "${name}" "${srcdir}" "${get_uri}" "${revision}"
if "${inplace}"; then
if "${do_setup}" && "${override}" && ! "${skip_egg}"; then
echo;
@@ -514,9 +535,9 @@
"http://pypi.python.org/packages/source/s/select26/select26-0.1a3.tar.gz";
fi;
- py_dependency -v 4.0 \
+ py_dependency -v 4.0 -m 1aca50e59ff4cc56abe9452a9a49c5ff -o \
"PyGreSQL" "pgdb" "PyGreSQL-4.0" \
- "ftp://ftp.pygresql.org/pub/distrib/PyGreSQL.tgz";
+ "http://pypi.python.org/packages/source/P/PyGreSQL/PyGreSQL-4.0.tar.gz";
py_dependency -v 10 -r 28657 \
"Twisted" "twisted" "Twisted" \
Modified: CalendarServer/branches/new-store/twext/internet/sendfdport.py
===================================================================
--- CalendarServer/branches/new-store/twext/internet/sendfdport.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twext/internet/sendfdport.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -274,7 +274,10 @@
# should this be on the transportFactory's side of things?
close(fd) # fromfd() calls dup()
- peeraddr = skt.getpeername()
+ try:
+ peeraddr = skt.getpeername()
+ except SocketError:
+ peeraddr = ('0.0.0.0', 0)
protocol = self.protocolFactory.buildProtocol(peeraddr)
transport = self.transportFactory(skt, description, protocol)
protocol.makeConnection(transport)
Modified: CalendarServer/branches/new-store/twistedcaldav/directory/appleopendirectory.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/directory/appleopendirectory.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/directory/appleopendirectory.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -392,7 +392,7 @@
return ()
def recordsMatchingFields(self, fields, operand="or", recordType=None,
- lookupMethod=opendirectory.queryRecordsWithAttributes_list):
+ lookupMethod=opendirectory.queryRecordsWithAttribute_list):
# Note that OD applies case-sensitivity globally across the entire
# query, not per expression, so the current code uses whatever is
@@ -407,9 +407,6 @@
dsattributes.kDSNAttrMetaNodeLocation)
recordShortNames = self._uniqueTupleFromAttribute(
value.get(dsattributes.kDSNAttrRecordName))
- if (recordNodeName == "/Local/Default" and not
- (config.Scheduling.iMIP.Username in recordShortNames)):
- continue
recordGUID = value.get(dsattributes.kDS1AttrGeneratedUID)
@@ -469,40 +466,58 @@
pass
def multiQuery(directory, queries, attrs, operand):
- results = []
+ byGUID = { }
+ sets = []
for query, recordTypes in queries.iteritems():
- if not query:
- continue
+ ODField, value, caseless, matchType = query
+ if matchType == "starts-with":
+ comparison = dsattributes.eDSStartsWith
+ elif matchType == "contains":
+ comparison = dsattributes.eDSContains
+ else:
+ comparison = dsattributes.eDSExact
- expressions = []
- for ODField, value, caseless, matchType in query:
- if matchType == "starts-with":
- comparison = dsattributes.eDSStartsWith
- elif matchType == "contains":
- comparison = dsattributes.eDSContains
- else:
- comparison = dsattributes.eDSExact
- expressions.append(dsquery.match(ODField, value, comparison))
+ self.log_debug("Calling OD: Types %s, Field %s, Value %s, Match %s, Caseless %s" %
+ (recordTypes, ODField, value, matchType, caseless))
- complexExpression = dsquery.expression(operand, expressions).generate()
+ queryResults = lookupMethod(
+ directory,
+ ODField,
+ value,
+ comparison,
+ caseless,
+ recordTypes,
+ attrs,
+ )
- self.log_debug("Calling OD: Types %s, Operand %s, Caseless %s, %s" %
- (recordTypes, operand, caseless, complexExpression))
+ if operand == dsquery.expression.OR:
+ for recordName, data in queryResults:
+ guid = data.get(dsattributes.kDS1AttrGeneratedUID, None)
+ if guid:
+ byGUID[guid] = (recordName, data)
+ else: # AND
+ newSet = set()
+ for recordName, data in queryResults:
+ guid = data.get(dsattributes.kDS1AttrGeneratedUID, None)
+ if guid:
+ byGUID[guid] = (recordName, data)
+ newSet.add(guid)
- results.extend(
- lookupMethod(
- directory,
- complexExpression,
- caseless,
- recordTypes,
- attrs,
- )
- )
+ sets.append(newSet)
- return results
+ if operand == dsquery.expression.OR:
+ return byGUID.values()
+ else:
+ results = []
+ for guid in set.intersection(*sets):
+ recordName, data = byGUID.get(guid, None)
+ if data is not None:
+ results.append((data[dsattributes.kDSNAttrRecordName], data))
+ return results
+
operand = (dsquery.expression.OR if operand == "or"
else dsquery.expression.AND)
@@ -636,11 +651,6 @@
recordEmailAddresses = self._setFromAttribute(value.get(dsattributes.kDSNAttrEMailAddress), lower=True)
recordNodeName = value.get(dsattributes.kDSNAttrMetaNodeLocation)
- if recordNodeName == "/Local/Default" and not (config.Scheduling.iMIP.Username in recordShortNames):
- self.log_info("Local record (%s)%s is not eligible for calendaring."
- % (recordType, recordShortName))
- continue
-
if not recordType:
self.log_debug("Record (unknown)%s in node %s has no recordType; ignoring."
% (recordShortName, recordNodeName))
@@ -766,22 +776,19 @@
results (either none, or all records).
"""
- fieldLists = {}
+ queries = {}
for recordType in recordTypes:
- fieldLists[recordType] = []
for field, value, caseless, matchType in fields:
if field in mapping:
if recordType in mapping[field]['appliesTo']:
ODField = mapping[field]['odField']
- fieldLists[recordType].append((ODField, value, caseless, matchType))
+ key = (ODField, value, caseless, matchType)
+ queries.setdefault(key, []).append(recordType)
- queries = {}
- for recordType, fieldList in fieldLists.iteritems():
- key = tuple(fieldList)
- queries.setdefault(key, []).append(recordType)
return queries
+
class OpenDirectoryRecord(CachingDirectoryRecord):
"""
OpenDirectory implementation of L{IDirectoryRecord}.
Modified: CalendarServer/branches/new-store/twistedcaldav/directory/augment.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/directory/augment.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/directory/augment.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -63,7 +63,8 @@
"""
def __init__(self):
- pass
+
+ self.cachedRecords = {}
@inlineCallbacks
def getAugmentRecord(self, uid):
@@ -77,26 +78,29 @@
"""
result = (yield self._lookupAugmentRecord(uid))
- if result is None:
- if not hasattr(self, "_defaultRecord"):
- self._defaultRecord = (yield self._lookupAugmentRecord("Default"))
- if self._defaultRecord is None:
- # No default was specified in the db, so generate one
- self._defaultRecord = AugmentRecord(
- "Default",
- enabled=True,
- enabledForCalendaring=True,
- enabledForAddressBooks=True,
- )
+ if result is not None:
+ returnValue(result)
- result = copy.deepcopy(self._defaultRecord)
- result.uid = uid
+ # Try wildcard/default matches next
+ for lookup in ("%s*" % (uid[0:2],), "%s*" % (uid[0],), "Default"):
+ result = (yield self._cachedAugmentRecord(lookup))
+ if result is not None:
+ result = copy.deepcopy(result)
+ result.uid = uid
+ result.clonedFromDefault = True
+ returnValue(result)
- # Mark default-cloned augment records as such so
- # DirectoryRecord.addAugmentInformation( ) can avoid unneccesary
- # error messages:
- result.clonedFromDefault = True
-
+ # No default was specified in the db, so generate one
+ result = AugmentRecord(
+ "Default",
+ enabled=True,
+ enabledForCalendaring=True,
+ enabledForAddressBooks=True,
+ )
+ self.cachedRecords["Default"] = result
+ result = copy.deepcopy(result)
+ result.uid = uid
+ result.clonedFromDefault = True
returnValue(result)
@inlineCallbacks
@@ -121,6 +125,22 @@
raise NotImplementedError("Child class must define this.")
+ @inlineCallbacks
+ def _cachedAugmentRecord(self, uid):
+ """
+ Get an AugmentRecord for the specified UID from the cache.
+
+ @param uid: directory UID to lookup
+ @type uid: C{str}
+
+ @return: L{Deferred}
+ """
+
+ if not uid in self.cachedRecords:
+ result = (yield self._lookupAugmentRecord(uid))
+ self.cachedRecords[uid] = result
+ returnValue(self.cachedRecords[uid])
+
def addAugmentRecords(self, records):
"""
Add an AugmentRecord to the DB.
@@ -152,6 +172,7 @@
@return: L{Deferred}
"""
+ self.cachedRecords.clear()
return succeed(None)
def clean(self):
@@ -173,6 +194,7 @@
def __init__(self, xmlFiles, cacheTimeout=30):
+ super(AugmentXMLDB, self).__init__()
self.xmlFiles = [fullServerPath(config.DataRoot, path) for path in xmlFiles]
self.cacheTimeout = cacheTimeout * 60 # Value is mins we want secs
self.lastCached = 0
@@ -378,6 +400,7 @@
"""
Refresh any cached data.
"""
+ super(AugmentXMLDB, self).refresh()
try:
self.db = self._parseXML()
except RuntimeError:
@@ -431,6 +454,7 @@
def __init__(self, dbID, dbapiName, dbapiArgs, **kwargs):
+ AugmentDB.__init__(self)
self.cachedPartitions = {}
self.cachedHostedAt = {}
Modified: CalendarServer/branches/new-store/twistedcaldav/directory/test/augments-test-default.xml
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/directory/test/augments-test-default.xml 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/directory/test/augments-test-default.xml 2010-07-14 07:40:35 UTC (rev 5891)
@@ -27,6 +27,30 @@
<hosted-at>00001</hosted-at>
</record>
<record>
+ <uid>AA*</uid>
+ <enable>true</enable>
+ <hosted-at>00001</hosted-at>
+ </record>
+ <record>
+ <uid>AB*</uid>
+ <enable>false</enable>
+ </record>
+ <record>
+ <uid>B*</uid>
+ <enable>true</enable>
+ <hosted-at>00002</hosted-at>
+ <enable-calendar>true</enable-calendar>
+ <enable-addressbook>true</enable-addressbook>
+ </record>
+ <record>
+ <uid>C*</uid>
+ <enable>true</enable>
+ <hosted-at>00003</hosted-at>
+ <enable-calendar>true</enable-calendar>
+ <enable-addressbook>true</enable-addressbook>
+ <auto-schedule>true</auto-schedule>
+ </record>
+ <record>
<uid>D11F03A0-97EA-48AF-9A6C-FAC7F3975766</uid>
<enable>true</enable>
</record>
@@ -60,7 +84,6 @@
<uid>6A73326A-F781-47E7-A9F8-AF47364D4152</uid>
<enable>true</enable>
<hosted-at>00002</hosted-at>
- <enable>true</enable>
<enable-calendar>true</enable-calendar>
<enable-addressbook>true</enable-addressbook>
<auto-schedule>true</auto-schedule>
Modified: CalendarServer/branches/new-store/twistedcaldav/directory/test/test_augment.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/directory/test/test_augment.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/directory/test/test_augment.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -36,8 +36,19 @@
{"uid":"6A73326A-F781-47E7-A9F8-AF47364D4152", "enabled":True, "hostedAt":"00002", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":True},
)
-testRecordDefault = {"uid":"A4318887-F2C7-4A70-9056-B88CC8DB26F1", "enabled":True, "hostedAt":"00001", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":False}
+testRecordWildcardDefault = (
+ {"uid":"A4318887-F2C7-4A70-9056-B88CC8DB26F1", "enabled":True, "hostedAt":"00001", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":False},
+ {"uid":"AA5F935F-3358-4510-A649-B391D63279F2", "enabled":True, "hostedAt":"00001", "enabledForCalendaring":False, "enabledForAddressBooks":False, "autoSchedule":False},
+ {"uid":"ABF1A83B-1A29-4E04-BDC3-A6A66ECF27CA", "enabled":False, "hostedAt":"", "enabledForCalendaring":False, "enabledForAddressBooks":False, "autoSchedule":False},
+ {"uid":"BC22A734-5E41-4FB7-B5C1-51DC0656DC2F", "enabled":True, "hostedAt":"00002", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":False},
+ {"uid":"C6DEEBB1-E14A-47F2-98BA-7E3BB4353E3A", "enabled":True, "hostedAt":"00003", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":True },
+ {"uid":"AA859321-2C72-4974-ADCF-0CBA0C76F95D", "enabled":True, "hostedAt":"00001", "enabledForCalendaring":False, "enabledForAddressBooks":False, "autoSchedule":False},
+ {"uid":"AB7C488B-9ED2-4265-881C-7E2E38A63584", "enabled":False, "hostedAt":"", "enabledForCalendaring":False, "enabledForAddressBooks":False, "autoSchedule":False},
+ {"uid":"BB0C0DA1-0545-45F6-8D08-917C554D93A4", "enabled":True, "hostedAt":"00002", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":False},
+ {"uid":"CCD30AD3-582F-4682-8B65-2EDE92C5656E", "enabled":True, "hostedAt":"00003", "enabledForCalendaring":True, "enabledForAddressBooks":True, "autoSchedule":True },
+)
+
testAddRecords = (
{"uid":"D11F03A0-97EA-48AF-9A6C-FAC7F3975767", "enabled":True, "hostedAt":"", "enabledForCalendaring":False, "enabledForAddressBooks":False, "autoSchedule":False},
)
@@ -53,110 +64,27 @@
def _checkRecord(self, db, items):
record = (yield db.getAugmentRecord(items["uid"]))
- self.assertTrue(record is not None)
+ self.assertTrue(record is not None, "Failed record uid: %s" % (items["uid"],))
for k,v in items.iteritems():
- self.assertEqual(getattr(record, k), v)
+ self.assertEqual(getattr(record, k), v, "Failed record uid: %s, attribute: %s" % (items["uid"], k, ))
@inlineCallbacks
def _checkRecordExists(self, db, uid):
record = (yield db.getAugmentRecord(uid))
- self.assertTrue(record is not None)
+ self.assertTrue(record is not None, "Failed record uid: %s" % (uid,))
-class AugmentXMLTests(AugmentTests):
+class AugmentTestsMixin(object):
- @inlineCallbacks
- def test_read(self):
-
- db = AugmentXMLDB((xmlFile,))
+ def _db(self, dbpath=None):
+ raise NotImplementedError
- for item in testRecords:
- yield self._checkRecord(db, item)
-
- # Verify that a default record is returned, even if not specified
- # in the DB
- yield self._checkRecordExists(db, "D11F03A0-97EA-48AF-9A6C-FAC7F3975767")
-
@inlineCallbacks
- def test_read_default(self):
-
- db = AugmentXMLDB((xmlFileDefault,))
-
- for item in testRecords:
- yield self._checkRecord(db, item)
-
- yield self._checkRecord(db, testRecordDefault)
-
- def test_parseErrors(self):
-
- db = {}
- self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO(""), db)
- self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
-<accounts>
- <foo/>
-</accounts>
-"""), db)
- self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
-<augments>
- <foo/>
-</augments>
-"""), db)
- self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
-<augments>
- <record>
- <enable>true</enable>
- </record>
-</augments>
-"""), db)
- self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
- <record>
- <uid>admin</uid>
- <enable>true</enable>
- <foo/>
- </record>
-"""), db)
-
- @inlineCallbacks
- def test_add_modify(self):
-
- # Duplicate file as we will change it
- newxmlfile = FilePath(self.mktemp())
- FilePath(xmlFile).copyTo(newxmlfile)
-
- db = AugmentXMLDB((newxmlfile.path,))
-
- for item in testRecords:
- yield self._checkRecord(db, item)
-
- newrecord = AugmentRecord(
- **testAddRecords[0]
- )
- yield db.addAugmentRecords((newrecord,))
-
- newdb = AugmentXMLDB((newxmlfile.path,))
-
- for item in testRecords:
- yield self._checkRecord(newdb, item)
- yield self._checkRecord(newdb, testAddRecords[0])
-
- newrecord = AugmentRecord(
- **testModifyRecords[0]
- )
- yield db.addAugmentRecords((newrecord,))
-
- newdb = AugmentXMLDB((newxmlfile.path,))
-
- for item in testRecords:
- yield self._checkRecord(newdb, item)
- yield self._checkRecord(newdb, testModifyRecords[0])
-
-class AugmentSqliteTests(AugmentTests):
-
- @inlineCallbacks
def test_read(self):
- db = AugmentSqliteDB(os.path.abspath(self.mktemp()))
+ dbpath = os.path.abspath(self.mktemp())
+ db = self._db(dbpath)
dbxml = AugmentXMLDB((xmlFile,))
yield db.addAugmentRecords(dbxml.db.values())
@@ -171,7 +99,8 @@
@inlineCallbacks
def test_read_default(self):
- db = AugmentSqliteDB(os.path.abspath(self.mktemp()))
+ dbpath = os.path.abspath(self.mktemp())
+ db = self._db(dbpath)
dbxml = AugmentXMLDB((xmlFileDefault,))
yield db.addAugmentRecords(dbxml.db.values())
@@ -179,13 +108,18 @@
for item in testRecords:
yield self._checkRecord(db, item)
- yield self._checkRecord(db, testRecordDefault)
+ for item in testRecordWildcardDefault:
+ yield self._checkRecord(db, item)
+ # Do a second time to test caching
+ for item in testRecordWildcardDefault:
+ yield self._checkRecord(db, item)
+
@inlineCallbacks
def test_add_modify(self):
dbpath = os.path.abspath(self.mktemp())
- db = AugmentSqliteDB(dbpath)
+ db = self._db(dbpath)
dbxml = AugmentXMLDB((xmlFile,))
yield db.addAugmentRecords(dbxml.db.values())
@@ -202,7 +136,7 @@
)
yield db.addAugmentRecords((newrecord,))
- newdb = AugmentSqliteDB(dbpath)
+ newdb = self._db(dbpath)
for item in testRecords:
yield self._checkRecord(newdb, item)
@@ -213,23 +147,19 @@
)
yield db.addAugmentRecords((newrecord,))
- newdb = AugmentSqliteDB(dbpath)
+ newdb = self._db(dbpath)
for item in testRecords:
yield self._checkRecord(newdb, item)
yield self._checkRecord(newdb, testModifyRecords[0])
-class AugmentPostgreSQLTests(AugmentTests):
+class AugmentXMLTests(AugmentTests):
@inlineCallbacks
def test_read(self):
- db = AugmentPostgreSQLDB("localhost", "augments")
- yield db.clean()
+ db = AugmentXMLDB((xmlFile,))
- dbxml = AugmentXMLDB((xmlFile,))
- yield db.addAugmentRecords(dbxml.db.values())
-
for item in testRecords:
yield self._checkRecord(db, item)
@@ -240,39 +170,61 @@
@inlineCallbacks
def test_read_default(self):
- db = AugmentPostgreSQLDB("localhost", "augments")
- yield db.clean()
+ db = AugmentXMLDB((xmlFileDefault,))
- dbxml = AugmentXMLDB((xmlFileDefault,))
- yield db.addAugmentRecords(dbxml.db.values())
-
for item in testRecords:
yield self._checkRecord(db, item)
- yield self._checkRecord(db, testRecordDefault)
+ for item in testRecordWildcardDefault:
+ yield self._checkRecord(db, item)
+ def test_parseErrors(self):
+
+ db = {}
+ self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO(""), db)
+ self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
+<accounts>
+ <foo/>
+</accounts>
+"""), db)
+ self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
+<augments>
+ <foo/>
+</augments>
+"""), db)
+ self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
+<augments>
+ <record>
+ <enable>true</enable>
+ </record>
+</augments>
+"""), db)
+ self.assertRaises(RuntimeError, XMLAugmentsParser, cStringIO.StringIO("""<?xml version="1.0" encoding="utf-8"?>
+ <record>
+ <uid>admin</uid>
+ <enable>true</enable>
+ <foo/>
+ </record>
+"""), db)
+
@inlineCallbacks
def test_add_modify(self):
- db = AugmentPostgreSQLDB("localhost", "augments")
- yield db.clean()
+ # Duplicate file as we will change it
+ newxmlfile = FilePath(self.mktemp())
+ FilePath(xmlFile).copyTo(newxmlfile)
+
+ db = AugmentXMLDB((newxmlfile.path,))
- dbxml = AugmentXMLDB((xmlFile,))
- yield db.addAugmentRecords(dbxml.db.values())
-
for item in testRecords:
yield self._checkRecord(db, item)
- # Verify that a default record is returned, even if not specified
- # in the DB
- yield self._checkRecordExists(db, "D11F03A0-97EA-48AF-9A6C-FAC7F3975767")
-
newrecord = AugmentRecord(
**testAddRecords[0]
)
yield db.addAugmentRecords((newrecord,))
- newdb = AugmentPostgreSQLDB("localhost", "augments")
+ newdb = AugmentXMLDB((newxmlfile.path,))
for item in testRecords:
yield self._checkRecord(newdb, item)
@@ -283,12 +235,22 @@
)
yield db.addAugmentRecords((newrecord,))
- newdb = AugmentPostgreSQLDB("localhost", "augments")
+ newdb = AugmentXMLDB((newxmlfile.path,))
for item in testRecords:
yield self._checkRecord(newdb, item)
yield self._checkRecord(newdb, testModifyRecords[0])
+class AugmentSqliteTests(AugmentTests, AugmentTestsMixin):
+
+ def _db(self, dbpath=None):
+ return AugmentSqliteDB(dbpath if dbpath else os.path.abspath(self.mktemp()))
+
+class AugmentPostgreSQLTests(AugmentTests, AugmentTestsMixin):
+
+ def _db(self, dbpath=None):
+ return AugmentPostgreSQLDB("localhost", "augments")
+
try:
import pgdb
except ImportError:
Modified: CalendarServer/branches/new-store/twistedcaldav/directory/test/test_buildquery.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/directory/test/test_buildquery.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/directory/test/test_buildquery.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -31,10 +31,8 @@
OpenDirectoryService._ODFields
),
{
- (
- ('dsAttrTypeStandard:FirstName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:LastName', 'sagen', True, 'starts-with')
- ): ['dsRecTypeStandard:Users']
+ ('dsAttrTypeStandard:FirstName', 'morgen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeUsers],
+ ('dsAttrTypeStandard:LastName', 'sagen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeUsers],
}
)
self.assertEquals(
@@ -49,10 +47,8 @@
OpenDirectoryService._ODFields
),
{
- (
- ('dsAttrTypeStandard:FirstName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains'),
- ): ['dsRecTypeStandard:Users'],
+ ('dsAttrTypeStandard:FirstName', 'morgen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeUsers],
+ ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains') : [dsattributes.kDSStdRecordTypeUsers],
}
)
self.assertEquals(
@@ -69,10 +65,8 @@
OpenDirectoryService._ODFields
),
{
- (
- ('dsAttrTypeStandard:RealName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains'),
- ): ['dsRecTypeStandard:Groups'],
+ ('dsAttrTypeStandard:RealName', 'morgen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeGroups],
+ ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains') : [dsattributes.kDSStdRecordTypeGroups],
}
)
self.assertEquals(
@@ -90,15 +84,22 @@
OpenDirectoryService._ODFields
),
{
+ ('dsAttrTypeStandard:RealName', 'morgen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeUsers, dsattributes.kDSStdRecordTypeGroups],
+ ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains') : [dsattributes.kDSStdRecordTypeUsers, dsattributes.kDSStdRecordTypeGroups],
+ ('dsAttrTypeStandard:FirstName', 'morgen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeUsers],
+ ('dsAttrTypeStandard:LastName', 'morgen', True, 'starts-with') : [dsattributes.kDSStdRecordTypeUsers],
+ }
+ )
+ self.assertEquals(
+ buildQueries(
+ [
+ dsattributes.kDSStdRecordTypeGroups,
+ ],
(
- ('dsAttrTypeStandard:RealName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains')
- ): ['dsRecTypeStandard:Groups'],
- (
- ('dsAttrTypeStandard:FirstName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:LastName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:RealName', 'morgen', True, 'starts-with'),
- ('dsAttrTypeStandard:EMailAddress', 'morgen', True, 'contains')
- ): ['dsRecTypeStandard:Users']
+ ("firstName", "morgen", True, "starts-with"),
+ ),
+ OpenDirectoryService._ODFields
+ ),
+ {
}
)
Modified: CalendarServer/branches/new-store/twistedcaldav/directory/test/test_opendirectory.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/directory/test/test_opendirectory.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/directory/test/test_opendirectory.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -194,7 +194,7 @@
self.assertFalse(self.service().recordWithGUID("1234567890"))
def test_queryDirectoryLocalUsers(self):
- """ Test for lookup on local users, ensuring they don't get
+ """ Test for lookup on local users, ensuring they do get
faulted in """
def lookupMethod(obj, attr, value, matchType, casei, recordType, attributes, count=0):
@@ -222,7 +222,7 @@
recordTypes = [DirectoryService.recordType_users, DirectoryService.recordType_groups]
self.service().queryDirectory(recordTypes, self.service().INDEX_TYPE_GUID, "1234567890", lookupMethod=lookupMethod)
self.service().queryDirectory(recordTypes, self.service().INDEX_TYPE_GUID, "987654321", lookupMethod=lookupMethod)
- self.assertFalse(self.service().recordWithGUID("1234567890"))
+ self.assertTrue(self.service().recordWithGUID("1234567890"))
self.assertTrue(self.service().recordWithGUID("987654321"))
def test_queryDirectoryEmailAddresses(self):
@@ -243,35 +243,110 @@
@inlineCallbacks
def test_recordsMatchingFields(self):
- def lookupMethod(obj, compound, casei, recordType, attributes, count=0):
- if dsattributes.kDSStdRecordTypeUsers in recordType:
- return [
- ('morgen',
+
+ def lookupMethod(obj, attribute, value, matchType, caseless,
+ recordTypes, attributes):
+
+ data = {
+ dsattributes.kDSStdRecordTypeUsers : (
{
- 'dsAttrTypeStandard:RecordType': 'dsRecTypeStandard:Users',
- 'dsAttrTypeStandard:AppleMetaNodeLocation': '/LDAPv3/127.0.0.1',
- 'dsAttrTypeStandard:RecordName': ['morgen', 'Morgen Sagen'],
- 'dsAttrTypeStandard:FirstName': 'Morgen',
- 'dsAttrTypeStandard:GeneratedUID': '83479230-821E-11DE-B6B0-DBB02C6D659D',
- 'dsAttrTypeStandard:LastName': 'Sagen',
- 'dsAttrTypeStandard:EMailAddress': 'morgen at example.com',
- 'dsAttrTypeStandard:RealName': 'Morgen Sagen'
- }),
- ('morehouse',
+ dsattributes.kDS1AttrDistinguishedName : "Morgen Sagen",
+ dsattributes.kDSNAttrRecordName : "morgen",
+ dsattributes.kDS1AttrFirstName : "Morgen",
+ dsattributes.kDS1AttrLastName : "Sagen",
+ dsattributes.kDSNAttrEMailAddress : "morgen at example.com",
+ dsattributes.kDSNAttrMetaNodeLocation : "/LDAPv3/127.0.0.1",
+ dsattributes.kDS1AttrGeneratedUID : "83479230-821E-11DE-B6B0-DBB02C6D659D",
+ dsattributes.kDSNAttrRecordType : dsattributes.kDSStdRecordTypeUsers,
+ },
{
- 'dsAttrTypeStandard:RecordType': 'dsRecTypeStandard:Users',
- 'dsAttrTypeStandard:AppleMetaNodeLocation': '/LDAPv3/127.0.0.1',
- 'dsAttrTypeStandard:RecordName': ['morehouse', 'Joe Morehouse'],
- 'dsAttrTypeStandard:FirstName': 'Joe',
- 'dsAttrTypeStandard:GeneratedUID': '98342930-90DC-11DE-A842-A29601FB13E8',
- 'dsAttrTypeStandard:LastName': 'Morehouse',
- 'dsAttrTypeStandard:EMailAddress': 'morehouse at example.com',
- 'dsAttrTypeStandard:RealName': 'Joe Morehouse'
- }),
- ]
- else:
- return []
+ dsattributes.kDS1AttrDistinguishedName : "Morgan Sagan",
+ dsattributes.kDSNAttrRecordName : "morgan",
+ dsattributes.kDS1AttrFirstName : "Morgan",
+ dsattributes.kDS1AttrLastName : "Sagan",
+ dsattributes.kDSNAttrEMailAddress : "morgan at example.com",
+ dsattributes.kDSNAttrMetaNodeLocation : "/LDAPv3/127.0.0.1",
+ dsattributes.kDS1AttrGeneratedUID : "93479230-821E-11DE-B6B0-DBB02C6D659D",
+ dsattributes.kDSNAttrRecordType : dsattributes.kDSStdRecordTypeUsers,
+ },
+ {
+ dsattributes.kDS1AttrDistinguishedName : "Shari Sagen",
+ dsattributes.kDSNAttrRecordName : "shari",
+ dsattributes.kDS1AttrFirstName : "Shari",
+ dsattributes.kDS1AttrLastName : "Sagen",
+ dsattributes.kDSNAttrEMailAddress : "shari at example.com",
+ dsattributes.kDSNAttrMetaNodeLocation : "/LDAPv3/127.0.0.1",
+ dsattributes.kDS1AttrGeneratedUID : "A3479230-821E-11DE-B6B0-DBB02C6D659D",
+ dsattributes.kDSNAttrRecordType : dsattributes.kDSStdRecordTypeUsers,
+ },
+ {
+ dsattributes.kDS1AttrDistinguishedName : "Local Morgen",
+ dsattributes.kDSNAttrRecordName : "localmorgen",
+ dsattributes.kDS1AttrFirstName : "Local",
+ dsattributes.kDS1AttrLastName : "Morgen",
+ dsattributes.kDSNAttrEMailAddress : "localmorgen at example.com",
+ dsattributes.kDSNAttrMetaNodeLocation : "/Local/Default",
+ dsattributes.kDS1AttrGeneratedUID : "B3479230-821E-11DE-B6B0-DBB02C6D659D",
+ dsattributes.kDSNAttrRecordType : dsattributes.kDSStdRecordTypeUsers,
+ },
+ ),
+ dsattributes.kDSStdRecordTypeGroups : (
+ {
+ dsattributes.kDS1AttrDistinguishedName : "Test Group",
+ dsattributes.kDSNAttrRecordName : "testgroup",
+ dsattributes.kDS1AttrFirstName : None,
+ dsattributes.kDS1AttrLastName : None,
+ dsattributes.kDSNAttrEMailAddress : None,
+ dsattributes.kDSNAttrMetaNodeLocation : "/LDAPv3/127.0.0.1",
+ dsattributes.kDS1AttrGeneratedUID : "C3479230-821E-11DE-B6B0-DBB02C6D659D",
+ dsattributes.kDSNAttrRecordType : dsattributes.kDSStdRecordTypeGroups,
+ },
+ {
+ dsattributes.kDS1AttrDistinguishedName : "Morgen's Group",
+ dsattributes.kDSNAttrRecordName : "morgensgroup",
+ dsattributes.kDS1AttrFirstName : None,
+ dsattributes.kDS1AttrLastName : None,
+ dsattributes.kDSNAttrEMailAddress : None,
+ dsattributes.kDSNAttrMetaNodeLocation : "/LDAPv3/127.0.0.1",
+ dsattributes.kDS1AttrGeneratedUID : "D3479230-821E-11DE-B6B0-DBB02C6D659D",
+ dsattributes.kDSNAttrRecordType : dsattributes.kDSStdRecordTypeGroups,
+ },
+ ),
+ }
+ def attributeMatches(fieldValue, value, caseless, matchType):
+ if fieldValue is None:
+ return False
+ if caseless:
+ fieldValue = fieldValue.lower()
+ value = value.lower()
+ if matchType == dsattributes.eDSStartsWith:
+ if fieldValue.startswith(value):
+ return True
+ elif matchType == dsattributes.eDSContains:
+ try:
+ fieldValue.index(value)
+ return True
+ except ValueError:
+ pass
+ else: # exact
+ if fieldValue == value:
+ return True
+ return False
+
+ results = []
+ for recordType in recordTypes:
+ for row in data[recordType]:
+ if attributeMatches(row[attribute], value, caseless,
+ matchType):
+ results.append((row[dsattributes.kDSNAttrRecordName], row))
+
+ return results
+
+
+ #
+ # OR
+ #
fields = [
('fullName', 'mor', True, u'starts-with'),
('emailAddresses', 'mor', True, u'starts-with'),
@@ -279,14 +354,73 @@
('lastName', 'mor', True, u'starts-with')
]
+ # any record type
results = (yield self.service().recordsMatchingFields(fields,
lookupMethod=lookupMethod))
results = list(results)
- self.assertEquals(len(results), 2)
+ self.assertEquals(len(results), 4)
for record in results:
self.assertTrue(isinstance(record, OpenDirectoryRecord))
+ # just users
+ results = (yield self.service().recordsMatchingFields(fields,
+ recordType="users",
+ lookupMethod=lookupMethod))
+ results = list(results)
+ self.assertEquals(len(results), 3)
+ # just groups
+ results = (yield self.service().recordsMatchingFields(fields,
+ recordType="groups",
+ lookupMethod=lookupMethod))
+ results = list(results)
+ self.assertEquals(len(results), 1)
+
+
+ #
+ # AND
+ #
+ fields = [
+ ('firstName', 'morgen', True, u'equals'),
+ ('lastName', 'age', True, u'contains')
+ ]
+ results = (yield self.service().recordsMatchingFields(fields,
+ operand="and", lookupMethod=lookupMethod))
+ results = list(results)
+ self.assertEquals(len(results), 1)
+
+ #
+ # case sensitivity
+ #
+ fields = [
+ ('firstName', 'morgen', False, u'equals'),
+ ]
+ results = (yield self.service().recordsMatchingFields(fields,
+ lookupMethod=lookupMethod))
+ results = list(results)
+ self.assertEquals(len(results), 0)
+
+ fields = [
+ ('firstName', 'morgen', True, u'equals'),
+ ]
+ results = (yield self.service().recordsMatchingFields(fields,
+ lookupMethod=lookupMethod))
+ results = list(results)
+ self.assertEquals(len(results), 1)
+
+ #
+ # no matches
+ #
+ fields = [
+ ('firstName', 'xyzzy', True, u'starts-with'),
+ ('lastName', 'plugh', True, u'contains')
+ ]
+ results = (yield self.service().recordsMatchingFields(fields,
+ operand="and", lookupMethod=lookupMethod))
+ results = list(results)
+ self.assertEquals(len(results), 0)
+
+
class OpenDirectorySubset (OpenDirectory):
"""
Test the recordTypes subset feature of Apple OpenDirectoryService.
Modified: CalendarServer/branches/new-store/twistedcaldav/scheduling/scheduler.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/scheduling/scheduler.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/scheduling/scheduler.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -397,9 +397,9 @@
for ctr, recipient in enumerate(self.recipients):
# Check for freebusy limit
- if freebusy and ctr >= config.Scheduling.Options.LimitFreeBusyAttendees:
+ if freebusy and config.Scheduling.Options.LimitFreeBusyAttendees and ctr >= config.Scheduling.Options.LimitFreeBusyAttendees:
err = HTTPError(ErrorResponse(responsecode.NOT_FOUND, (caldav_namespace, "recipient-limit")))
- responses.add(recipient.cuaddr, Failure(exc_value=err), reqstatus=iTIPRequestStatus.NO_USER_SUPPORT)
+ responses.add(recipient.cuaddr, Failure(exc_value=err), reqstatus=iTIPRequestStatus.SERVICE_UNAVAILABLE)
continue
if self.fakeTheResult:
@@ -641,7 +641,7 @@
if self.calendar.propertyValue("METHOD") in ("COUNTER", "DECLINECOUNTER"):
return
- # Anything else is not allowed. However, for compatIbility we will optionally
+ # Anything else is not allowed. However, for compatibility we will optionally
# return a success response for all attendees.
if config.Scheduling.CalDAV.OldDraftCompatibility:
self.fakeTheResult = True
Modified: CalendarServer/branches/new-store/twistedcaldav/stdconfig.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/stdconfig.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/stdconfig.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -98,7 +98,7 @@
"queryPeopleRecords": True,
"peopleNode": "/Search/Contacts",
"queryUserRecords": True,
- "userNode": "/Search",
+ "userNode": "/Search/Contacts",
"maxDSQueryRecords":150,
"queryDSLocal": False,
"ignoreSystemRecords": True,
@@ -945,25 +945,30 @@
partitions.clear()
def _updateCompliance(configDict):
- if configDict.Scheduling.CalDAV.OldDraftCompatibility:
- compliance = caldavxml.caldav_full_compliance
+
+
+ if configDict.EnableCalDAV:
+ if configDict.Scheduling.CalDAV.OldDraftCompatibility:
+ compliance = caldavxml.caldav_full_compliance
+ else:
+ compliance = caldavxml.caldav_implicit_compliance
+ if configDict.EnableProxyPrincipals:
+ compliance += customxml.calendarserver_proxy_compliance
+ if configDict.EnablePrivateEvents:
+ compliance += customxml.calendarserver_private_events_compliance
+ if configDict.Scheduling.CalDAV.EnablePrivateComments:
+ compliance += customxml.calendarserver_private_comments_compliance
+ if config.Sharing.Enabled:
+ compliance += customxml.calendarserver_sharing_compliance
else:
- compliance = caldavxml.caldav_implicit_compliance
+ compliance = ()
- if configDict.EnableProxyPrincipals:
- compliance += customxml.calendarserver_proxy_compliance
- if configDict.EnablePrivateEvents:
- compliance += customxml.calendarserver_private_events_compliance
- if configDict.Scheduling.CalDAV.EnablePrivateComments:
- compliance += customxml.calendarserver_private_comments_compliance
if configDict.EnableCardDAV:
compliance += carddavxml.carddav_compliance
+ # Principal property search is always enabled
compliance += customxml.calendarserver_principal_property_search_compliance
- if config.Sharing.Enabled:
- compliance += customxml.calendarserver_sharing_compliance
-
configDict.CalDAVComplianceClasses = compliance
Modified: CalendarServer/branches/new-store/twistedcaldav/test/test_options.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/test/test_options.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/test/test_options.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -109,3 +109,37 @@
request = SimpleRequest(self.site, "OPTIONS", "/")
return self.send(request, do_test)
+
+ def test_dav_header_caldav_disabled(self):
+ """
+ DAV header does not advertise CalDAV
+ """
+ def do_test(response):
+ response = IResponse(response)
+
+ dav = response.headers.getHeader("dav")
+ if not dav: self.fail("no DAV header: %s" % (response.headers,))
+ self.assertIn("1", dav, "no DAV level 1 header")
+ self.assertNotIn("calendar-access", dav, "DAV calendar-access header")
+ self.patch(config, "EnableCalDAV", False)
+ request = SimpleRequest(self.site, "OPTIONS", "/")
+
+ return self.send(request, do_test)
+
+ def test_dav_header_carddav_disabled(self):
+ """
+ DAV header does not advertise CardDAV
+ """
+ def do_test(response):
+ response = IResponse(response)
+
+ dav = response.headers.getHeader("dav")
+ if not dav: self.fail("no DAV header: %s" % (response.headers,))
+ self.assertIn("1", dav, "no DAV level 1 header")
+ self.assertNotIn("addressbook", dav, "DAV addressbook header")
+
+ self.patch(config, "EnableCardDAV", False)
+ request = SimpleRequest(self.site, "OPTIONS", "/")
+
+ return self.send(request, do_test)
+
Modified: CalendarServer/branches/new-store/twistedcaldav/upgrade.py
===================================================================
--- CalendarServer/branches/new-store/twistedcaldav/upgrade.py 2010-07-14 05:33:29 UTC (rev 5890)
+++ CalendarServer/branches/new-store/twistedcaldav/upgrade.py 2010-07-14 07:40:35 UTC (rev 5891)
@@ -301,6 +301,11 @@
if os.path.exists(dbPath):
os.chown(dbPath, uid, gid)
+ journalPath = "%s-journal" % (dbPath,)
+ if os.path.exists(journalPath):
+ os.chown(journalPath, uid, gid)
+
+
def createTaskServiceDirectory(config, uid, gid):
taskDir = os.path.join(config.DataRoot, "tasks")
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20100714/6fe83571/attachment-0001.html>
More information about the calendarserver-changes
mailing list