[CalendarServer-changes] [11200] CalendarServer/trunk
source_changes at macosforge.org
source_changes at macosforge.org
Thu May 16 08:40:19 PDT 2013
Revision: 11200
http://trac.calendarserver.org//changeset/11200
Author: cdaboo at apple.com
Date: 2013-05-16 08:40:19 -0700 (Thu, 16 May 2013)
Log Message:
-----------
Just whitespace.
Modified Paths:
--------------
CalendarServer/trunk/contrib/performance/_event_change.py
CalendarServer/trunk/contrib/performance/_event_create.py
CalendarServer/trunk/contrib/performance/benchlib.py
CalendarServer/trunk/contrib/performance/benchmark.py
CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence.py
CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence_autoaccept.py
CalendarServer/trunk/contrib/performance/benchmarks/event_autoaccept.py
CalendarServer/trunk/contrib/performance/benchmarks/event_change_date.py
CalendarServer/trunk/contrib/performance/benchmarks/event_change_summary.py
CalendarServer/trunk/contrib/performance/benchmarks/event_delete.py
CalendarServer/trunk/contrib/performance/benchmarks/event_delete_attendee.py
CalendarServer/trunk/contrib/performance/benchmarks/find_events.py
CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence.py
CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence_autoaccept.py
CalendarServer/trunk/contrib/performance/benchmarks/vfreebusy.py
CalendarServer/trunk/contrib/performance/compare.py
CalendarServer/trunk/contrib/performance/eventkitframework.py
CalendarServer/trunk/contrib/performance/graph.py
CalendarServer/trunk/contrib/performance/httpauth.py
CalendarServer/trunk/contrib/performance/loadtest/ampsim.py
CalendarServer/trunk/contrib/performance/loadtest/ical.py
CalendarServer/trunk/contrib/performance/loadtest/logger.py
CalendarServer/trunk/contrib/performance/loadtest/population.py
CalendarServer/trunk/contrib/performance/loadtest/profiles.py
CalendarServer/trunk/contrib/performance/loadtest/sim.py
CalendarServer/trunk/contrib/performance/loadtest/subscribe.py
CalendarServer/trunk/contrib/performance/loadtest/test_ical.py
CalendarServer/trunk/contrib/performance/loadtest/test_population.py
CalendarServer/trunk/contrib/performance/loadtest/test_profiles.py
CalendarServer/trunk/contrib/performance/loadtest/test_sim.py
CalendarServer/trunk/contrib/performance/loadtest/test_trafficlogger.py
CalendarServer/trunk/contrib/performance/loadtest/test_webadmin.py
CalendarServer/trunk/contrib/performance/loadtest/trafficlogger.py
CalendarServer/trunk/contrib/performance/loadtest/webadmin.py
CalendarServer/trunk/contrib/performance/massupload.py
CalendarServer/trunk/contrib/performance/report.py
CalendarServer/trunk/contrib/performance/setbackend.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/httpTests.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/invite.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/multiget.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/propfind.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/put.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/query.py
CalendarServer/trunk/contrib/performance/sqlusage/requests/sync.py
CalendarServer/trunk/contrib/performance/sqlusage/sqlusage.py
CalendarServer/trunk/contrib/performance/sqlwatch.py
CalendarServer/trunk/contrib/performance/stackedbar.py
CalendarServer/trunk/contrib/performance/stats.py
CalendarServer/trunk/contrib/performance/test_benchmark.py
CalendarServer/trunk/contrib/performance/test_httpauth.py
CalendarServer/trunk/contrib/performance/test_stats.py
CalendarServer/trunk/contrib/performance/upload.py
CalendarServer/trunk/contrib/tools/anonymous_log.py
CalendarServer/trunk/contrib/tools/dtraceanalyze.py
CalendarServer/trunk/contrib/tools/fakecalendardata.py
CalendarServer/trunk/contrib/tools/monitoranalysis.py
CalendarServer/trunk/contrib/tools/monitorsplit.py
CalendarServer/trunk/contrib/tools/netstatus.py
CalendarServer/trunk/contrib/tools/pg_stats_analysis.py
CalendarServer/trunk/contrib/tools/protocolanalysis.py
CalendarServer/trunk/contrib/tools/sqldata_from_path.py
CalendarServer/trunk/contrib/tools/tables.py
CalendarServer/trunk/contrib/tools/test_protocolanalysis.py
CalendarServer/trunk/twistedcaldav/client/__init__.py
CalendarServer/trunk/twistedcaldav/client/pool.py
CalendarServer/trunk/twistedcaldav/client/reverseproxy.py
CalendarServer/trunk/twistedcaldav/client/test/test_reverseproxy.py
CalendarServer/trunk/twistedcaldav/datafilters/addressdata.py
CalendarServer/trunk/twistedcaldav/datafilters/calendardata.py
CalendarServer/trunk/twistedcaldav/datafilters/filter.py
CalendarServer/trunk/twistedcaldav/datafilters/privateevents.py
CalendarServer/trunk/twistedcaldav/datafilters/test/test_calendardata.py
CalendarServer/trunk/twistedcaldav/datafilters/test/test_privateevents.py
CalendarServer/trunk/twistedcaldav/method/get.py
CalendarServer/trunk/twistedcaldav/method/mkcalendar.py
CalendarServer/trunk/twistedcaldav/method/report.py
CalendarServer/trunk/twistedcaldav/method/report_addressbook_query.py
CalendarServer/trunk/twistedcaldav/method/report_calendar_query.py
CalendarServer/trunk/twistedcaldav/method/report_freebusy.py
CalendarServer/trunk/twistedcaldav/method/report_multiget_common.py
CalendarServer/trunk/twistedcaldav/query/addressbookquery.py
CalendarServer/trunk/twistedcaldav/query/addressbookqueryfilter.py
CalendarServer/trunk/twistedcaldav/query/calendarquery.py
CalendarServer/trunk/twistedcaldav/query/calendarqueryfilter.py
CalendarServer/trunk/twistedcaldav/query/expression.py
CalendarServer/trunk/twistedcaldav/query/sqlgenerator.py
CalendarServer/trunk/twistedcaldav/query/test/test_addressbookquery.py
CalendarServer/trunk/twistedcaldav/query/test/test_calendarquery.py
CalendarServer/trunk/twistedcaldav/query/test/test_expression.py
CalendarServer/trunk/twistedcaldav/query/test/test_queryfilter.py
CalendarServer/trunk/twistedcaldav/test/data/makelargecalendars.py
CalendarServer/trunk/twistedcaldav/test/data/makelargefbset.py
CalendarServer/trunk/twistedcaldav/test/test_accounting.py
CalendarServer/trunk/twistedcaldav/test/test_cache.py
CalendarServer/trunk/twistedcaldav/test/test_caldavxml.py
CalendarServer/trunk/twistedcaldav/test/test_config.py
CalendarServer/trunk/twistedcaldav/test/test_customxml.py
CalendarServer/trunk/twistedcaldav/test/test_database.py
CalendarServer/trunk/twistedcaldav/test/test_dateops.py
CalendarServer/trunk/twistedcaldav/test/test_freebusyquery.py
CalendarServer/trunk/twistedcaldav/test/test_kerberos.py
CalendarServer/trunk/twistedcaldav/test/test_localization.py
CalendarServer/trunk/twistedcaldav/test/test_memcachelock.py
CalendarServer/trunk/twistedcaldav/test/test_memcachepool.py
CalendarServer/trunk/twistedcaldav/test/test_memcacheprops.py
CalendarServer/trunk/twistedcaldav/test/test_memcacher.py
CalendarServer/trunk/twistedcaldav/test/test_options.py
CalendarServer/trunk/twistedcaldav/test/test_sql.py
CalendarServer/trunk/twistedcaldav/test/test_xml.py
CalendarServer/trunk/twistedcaldav/test/test_xmlutil.py
Modified: CalendarServer/trunk/contrib/performance/_event_change.py
===================================================================
--- CalendarServer/trunk/contrib/performance/_event_change.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/_event_change.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -85,7 +85,6 @@
for (event, url) in events:
yield agent.request('PUT', url, headers, StringProducer(event))
-
# Sample changing the event according to the replacer.
samples = yield sample(
dtrace, samples,
Modified: CalendarServer/trunk/contrib/performance/_event_create.py
===================================================================
--- CalendarServer/trunk/contrib/performance/_event_create.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/_event_create.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -93,15 +93,18 @@
return ''.join(filter(str.isalnum, d.isoformat()))
+
def makeOrganizer(sequence):
return organizer % {'SEQUENCE': sequence}
+
def makeAttendees(count):
return [
attendee % {'SEQUENCE': n} for n in range(2, count + 2)]
+
def makeVCalendar(uid, start, end, recurrence, organizerSequence, attendees):
if recurrence is None:
rrule = ""
@@ -121,12 +124,13 @@
return cal.replace("\n", "\r\n")
+
def makeEvent(i, organizerSequence, attendeeCount):
base = datetime(2010, 7, 30, 11, 15, 00)
interval = timedelta(0, 5)
duration = timedelta(0, 3)
return makeVCalendar(
- uuid4(),
+ uuid4(),
base + i * interval,
base + i * interval + duration,
None,
@@ -134,6 +138,7 @@
makeAttendees(attendeeCount))
+
@inlineCallbacks
def measure(calendar, organizerSequence, events, host, port, dtrace, samples):
"""
@@ -161,7 +166,7 @@
# Sample it a bunch of times
samples = yield sample(
- dtrace, samples,
+ dtrace, samples,
agent, ((method, uri % (i,), headers, StringProducer(body))
for (i, body)
in events).next,
Modified: CalendarServer/trunk/contrib/performance/benchlib.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchlib.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchlib.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -102,10 +102,9 @@
@param deferreds: A sequence of Deferreds to wait on.
"""
-
-
+
@inlineCallbacks
def sample(dtrace, sampleTime, agent, paramgen, responseCode, concurrency=1):
urlopen = Duration('HTTP')
@@ -160,12 +159,12 @@
start = time()
requests = []
- for i in range(concurrency):
+ for _ignore_i in range(concurrency):
requests.append(once())
while requests:
try:
- result, index = yield DeferredList(requests, fireOnOneCallback=True, fireOnOneErrback=True)
+ _ignore_result, index = yield DeferredList(requests, fireOnOneCallback=True, fireOnOneErrback=True)
except FirstError, e:
e.subFailure.raiseException()
@@ -187,7 +186,7 @@
# will demolish inlineCallbacks.
if len(requests) == concurrency - 1:
msg('exhausted parameter generator')
-
+
msg('stopping dtrace')
leftOver = yield dtrace.stop()
msg('dtrace stopped')
@@ -197,6 +196,7 @@
returnValue(data)
+
def select(statistics, benchmark, parameter, statistic):
for stat, samples in statistics[benchmark][int(parameter)].iteritems():
if stat.name == statistic:
@@ -204,6 +204,7 @@
raise ValueError("Unknown statistic %r" % (statistic,))
+
def load_stats(statfiles):
data = []
for fname in statfiles:
Modified: CalendarServer/trunk/contrib/performance/benchmark.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmark.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmark.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -14,26 +14,23 @@
# limitations under the License.
##
from __future__ import print_function
-
-import sys, os, plistlib
+from contrib.performance.stats import SQLDuration, Bytes
+from datetime import datetime
from os.path import dirname
-
+from pickle import dump
from signal import SIGINT
-from pickle import dump
-
-from datetime import datetime
-
-from twisted.python.filepath import FilePath
-from twisted.python.usage import UsageError, Options, portCoerce
+from twisted.internet import reactor
+from twisted.internet.defer import Deferred, inlineCallbacks, gatherResults
from twisted.internet.protocol import ProcessProtocol
from twisted.protocols.basic import LineReceiver
-from twisted.internet.defer import (
- Deferred, inlineCallbacks, gatherResults)
-from twisted.internet import reactor
+from twisted.python.filepath import FilePath
from twisted.python.log import msg
from twisted.python.modules import getModule
+from twisted.python.usage import UsageError, Options, portCoerce
+import sys
+import os
+import plistlib
-from contrib.performance.stats import SQLDuration, Bytes
class DTraceBug(Exception):
@@ -55,7 +52,7 @@
self._out = ''
self._err = ''
-
+
def mark(self):
return self.parser.mark()
@@ -88,10 +85,12 @@
self.started.errback(RuntimeError("Exited too soon: %r/%r" % (self._out, self._err)))
+
def masterPID(directory):
return int(directory.child('caldavd.pid').getContent())
+
def instancePIDs(directory):
pids = []
for pidfile in directory.children():
@@ -102,6 +101,7 @@
return pids
+
class _DTraceParser(LineReceiver):
delimiter = '\n\1'
@@ -185,6 +185,7 @@
self.collector._write.append(int(rest))
+
class DTraceCollector(object):
def __init__(self, script, pids):
self._dScript = script
@@ -299,7 +300,7 @@
def mark(self):
marks = []
- for (process, protocol) in self.dtraces.itervalues():
+ for (_ignore_process, protocol) in self.dtraces.itervalues():
marks.append(protocol.mark())
d = gatherResults(marks)
d.addCallback(lambda ign: self.stats())
@@ -309,11 +310,10 @@
except OSError:
pass
return d
-
def stop(self):
- for (process, protocol) in self.dtraces.itervalues():
+ for (process, _ignore_protocol) in self.dtraces.itervalues():
process.signalProcess(SIGINT)
d = gatherResults(self.finished)
d.addCallback(lambda ign: self.stats())
@@ -347,6 +347,7 @@
fObj.close()
+
def logsCoerce(directory):
path = FilePath(directory)
if not path.isdir():
@@ -354,6 +355,7 @@
return path
+
class BenchmarkOptions(Options):
optParameters = [
('host', 'h', 'localhost',
@@ -396,9 +398,9 @@
if index >= count:
raise UsageError("host-index must be less than hosts-count")
benchmarks = [
- benchmark
- for (i, benchmark)
- in enumerate(benchmarks)
+ benchmark
+ for (i, benchmark)
+ in enumerate(benchmarks)
if i % self['hosts-count'] == self['host-index']]
return benchmarks
@@ -440,6 +442,7 @@
raise ValueError("Unknown benchmark: %r" % (name,))
+
def main():
from twisted.python.log import startLogging, err
Modified: CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -40,6 +40,7 @@
makeAttendees(attendeeCount))
+
def measure(host, port, dtrace, attendeeCount, samples):
calendar = "bounded-recurrence"
organizerSequence = 1
Modified: CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence_autoaccept.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence_autoaccept.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/bounded_recurrence_autoaccept.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -43,6 +43,7 @@
uuid4(), start, end, rrule, organizerSequence, attendees)
+
def measure(host, port, dtrace, attendeeCount, samples):
calendar = "bounded-recurrence-autoaccept"
organizerSequence = 1
Modified: CalendarServer/trunk/contrib/performance/benchmarks/event_autoaccept.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/event_autoaccept.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/event_autoaccept.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -36,7 +36,7 @@
'ATTENDEE;CN="Resource 01";CUTYPE=INDIVIDUAL;PARTSTAT=NEEDS-ACTION;RSVP=T\n'
' RUE;SCHEDULE-STATUS="1.2":urn:uuid:resource01\n')
return makeVCalendar(
- uuid4(),
+ uuid4(),
base + i * interval,
base + i * interval + duration,
None,
@@ -44,6 +44,7 @@
attendees)
+
def measure(host, port, dtrace, attendeeCount, samples):
calendar = "event-autoaccept-creation-benchmark"
organizerSequence = 1
Modified: CalendarServer/trunk/contrib/performance/benchmarks/event_change_date.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/event_change_date.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/event_change_date.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,6 +38,7 @@
return event[:colon + 1] + new.strftime(TIME_FORMAT) + event[eol:]
+
def replaceTimestamp(event, i):
offset = datetime.timedelta(hours=i)
return _increment(
@@ -45,6 +46,7 @@
'DTEND', offset)
+
def measure(host, port, dtrace, attendeeCount, samples):
return _event_change.measure(
host, port, dtrace, attendeeCount, samples, "change-date",
Modified: CalendarServer/trunk/contrib/performance/benchmarks/event_change_summary.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/event_change_summary.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/event_change_summary.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -21,6 +21,7 @@
return event.replace(SUMMARY, 'Replacement summary %d' % (i,))
+
def measure(host, port, dtrace, attendeeCount, samples):
return _measure(
host, port, dtrace, attendeeCount, samples, "change-summary",
Modified: CalendarServer/trunk/contrib/performance/benchmarks/event_delete.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/event_delete.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/event_delete.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -74,4 +74,3 @@
agent, (('DELETE', url) for url in urls).next,
NO_CONTENT)
returnValue(samples)
-
Modified: CalendarServer/trunk/contrib/performance/benchmarks/event_delete_attendee.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/event_delete_attendee.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/event_delete_attendee.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -21,7 +21,7 @@
"""
Add C{i} new attendees to the given event.
"""
- for n in range(attendeeCount):
+ for _ignore_n in range(attendeeCount):
# Find the beginning of an ATTENDEE line
attendee = event.find('ATTENDEE')
# And the end of it
Modified: CalendarServer/trunk/contrib/performance/benchmarks/find_events.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/find_events.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/find_events.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -52,9 +52,10 @@
StringProducer(event))
worker = worker()
return gatherResults([
- cooperate(worker).whenDone() for i in range(3)])
+ cooperate(worker).whenDone() for _ignore_i in range(3)])
+
@inlineCallbacks
def measure(host, port, dtrace, numEvents, samples):
user = password = "user11"
Modified: CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,6 +38,7 @@
makeAttendees(attendeeCount))
+
def measure(host, port, dtrace, attendeeCount, samples):
calendar = "unbounded-recurrence"
organizerSequence = 1
@@ -48,4 +49,3 @@
return _measure(
calendar, organizerSequence, events,
host, port, dtrace, samples)
-
Modified: CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence_autoaccept.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence_autoaccept.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/unbounded_recurrence_autoaccept.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -41,6 +41,7 @@
uuid4(), start, end, "RRULE:FREQ=WEEKLY", organizerSequence, attendees)
+
def measure(host, port, dtrace, attendeeCount, samples):
calendar = "unbounded-recurrence-autoaccept"
organizerSequence = 1
@@ -51,4 +52,3 @@
return _measure(
calendar, organizerSequence, events,
host, port, dtrace, samples)
-
Modified: CalendarServer/trunk/contrib/performance/benchmarks/vfreebusy.py
===================================================================
--- CalendarServer/trunk/contrib/performance/benchmarks/vfreebusy.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/benchmarks/vfreebusy.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -82,11 +82,13 @@
return ''.join(filter(str.isalnum, d.isoformat()))
+
def makeEvent(i):
# Backwards compat interface, don't delete it for a little while.
return makeEventNear(datetime(2010, 7, 30, 11, 15, 00), i)
+
def makeEventNear(base, i):
s = """\
BEGIN:VEVENT
@@ -112,10 +114,13 @@
}
return data.replace("\n", "\r\n")
+
+
def makeEvents(base, n):
return [makeEventNear(base, i) for i in range(n)]
+
@inlineCallbacks
def measure(host, port, dtrace, events, samples):
user = password = "user01"
@@ -146,7 +151,7 @@
"content-type": ["text/calendar"],
"originator": ["mailto:%s at example.com" % (user,)],
"recipient": ["urn:uuid:%s, urn:uuid:user02" % (user,)]})
-
+
vfb = VFREEBUSY % {
"attendees": "".join([
"ATTENDEE:urn:uuid:%s\n" % (user,),
@@ -161,4 +166,3 @@
agent, lambda: (method, uri, headers, body),
OK)
returnValue(samples)
-
Modified: CalendarServer/trunk/contrib/performance/compare.py
===================================================================
--- CalendarServer/trunk/contrib/performance/compare.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/compare.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -44,6 +44,7 @@
return (t, p)
+
def trim(sequence, amount):
sequence.sort()
n = len(sequence)
@@ -57,8 +58,9 @@
return sequence
+
def main():
- [(stat, first), (stat, second)] = load_stats(sys.argv[1:])
+ [(_ignore_stat, first), (_ignore_stat, second)] = load_stats(sys.argv[1:])
# Attempt to increase robustness by dropping the outlying 10% of values.
first = trim(first, 0.1)
Modified: CalendarServer/trunk/contrib/performance/eventkitframework.py
===================================================================
--- CalendarServer/trunk/contrib/performance/eventkitframework.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/eventkitframework.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -5,4 +5,3 @@
frameworkPath=_objc.pathForFramework(
"/System/Library/Frameworks/EventKit.framework"),
globals=globals())
-
Modified: CalendarServer/trunk/contrib/performance/graph.py
===================================================================
--- CalendarServer/trunk/contrib/performance/graph.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/graph.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -25,7 +25,7 @@
fig = pyplot.figure()
ax = fig.add_subplot(111)
- data = [samples for (stat, samples) in load_stats(sys.argv[1:])]
+ data = [samples for (_ignore_stat, samples) in load_stats(sys.argv[1:])]
bars = []
color = iter('rgbcmy').next
Modified: CalendarServer/trunk/contrib/performance/httpauth.py
===================================================================
--- CalendarServer/trunk/contrib/performance/httpauth.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/httpauth.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -13,16 +13,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##
-from __future__ import print_function
-import urlparse, urllib2
-
+from __future__ import print_function
+from caldavclientlibrary.protocol.http.authentication.digest import Digest
from twisted.python.log import msg
-from twisted.web.http_headers import Headers
from twisted.web.http import UNAUTHORIZED
+from twisted.web.http_headers import Headers
+import urlparse
+import urllib2
-from caldavclientlibrary.protocol.http.authentication.digest import Digest
-
class BasicChallenge(object):
def __init__(self, realm):
# The sim has no real reason to differentiate between credentials by
@@ -74,7 +73,7 @@
BigSigh.url = uri
digest.addHeaders(authorization, BigSigh())
- return {'authorization': [value for (name, value) in authorization]}
+ return {'authorization': [value for (_ignore_name, value) in authorization]}
Modified: CalendarServer/trunk/contrib/performance/loadtest/ampsim.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/ampsim.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/ampsim.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -56,7 +56,7 @@
from twext.enterprise.adbapi2 import Pickle
-from contrib.performance.loadtest.sim import _DirectoryRecord, LoadSimulator
+from contrib.performance.loadtest.sim import _DirectoryRecord, LoadSimulator
class Configure(Command):
"""
@@ -187,4 +187,3 @@
#self.output.write(pformat(event)+"\n")
msg(**event)
return {}
-
Modified: CalendarServer/trunk/contrib/performance/loadtest/ical.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/ical.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/ical.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -35,14 +35,14 @@
from twext.internet.adaptendpoint import connect
from twext.internet.gaiendpoint import GAIEndpoint
-from twisted.internet.defer import Deferred, inlineCallbacks, returnValue,\
+from twisted.internet.defer import Deferred, inlineCallbacks, returnValue, \
succeed
from twisted.internet.task import LoopingCall
from twisted.python.filepath import FilePath
from twisted.python.log import addObserver, err, msg
from twisted.python.util import FancyEqMixin
from twisted.web.client import Agent, ContentDecoderAgent, GzipDecoder
-from twisted.web.http import OK, MULTI_STATUS, CREATED, NO_CONTENT, PRECONDITION_FAILED, MOVED_PERMANENTLY,\
+from twisted.web.http import OK, MULTI_STATUS, CREATED, NO_CONTENT, PRECONDITION_FAILED, MOVED_PERMANENTLY, \
FORBIDDEN, FOUND
from twisted.web.http_headers import Headers
@@ -79,12 +79,14 @@
self.response = response
+
class MissingCalendarHome(Exception):
"""
Raised when the calendar home for a user is 404
"""
+
class XMPPPush(object, FancyEqMixin):
"""
This represents an XMPP PubSub location where push notifications for
@@ -131,11 +133,12 @@
else:
return None
+
def serialize(self):
"""
Create a dict of the data so we can serialize as JSON.
"""
-
+
result = {}
for attr in ("url", "etag", "scheduleTag", "uid",):
result[attr] = getattr(self, attr)
@@ -147,7 +150,7 @@
"""
Convert dict (deserialized from JSON) into an L{Event}.
"""
-
+
event = Event(serializeLocation, None, None)
for attr in ("url", "etag", "scheduleTag", "uid",):
setattr(event, attr, u2str(data[attr]))
@@ -194,6 +197,7 @@
os.remove(path)
+
class Calendar(object):
def __init__(self, resourceType, componentTypes, name, url, changeToken):
self.resourceType = resourceType
@@ -208,7 +212,7 @@
"""
Create a dict of the data so we can serialize as JSON.
"""
-
+
result = {}
for attr in ("resourceType", "name", "url", "changeToken"):
result[attr] = getattr(self, attr)
@@ -222,12 +226,12 @@
"""
Convert dict (deserialized from JSON) into an L{Calendar}.
"""
-
+
calendar = Calendar(None, None, None, None, None)
for attr in ("resourceType", "name", "url", "changeToken"):
setattr(calendar, attr, u2str(data[attr]))
calendar.componentTypes = set(map(u2str, data["componentTypes"]))
-
+
for event in data["events"]:
url = urljoin(calendar.url, event)
if url in events:
@@ -248,7 +252,7 @@
_events = None # Cache of events keyed by href
_calendars = None # Cache of calendars keyed by href
started = False # Whether or not startup() has been executed
- _client_type = None # Type of this client used in logging
+ _client_type = None # Type of this client used in logging
_client_id = None # Unique id for the client itself
@@ -308,6 +312,7 @@
raise NotImplementedError("%r does not implement changeEventAttendee" % (self.__class__,))
+
class _PubSubClientFactory(PubSubClientFactory):
"""
Factory for XMPP pubsub functionality.
@@ -316,12 +321,15 @@
PubSubClientFactory.__init__(self, *args, **kwargs)
self._client = client
+
def initFailed(self, reason):
print('XMPP initialization failed', reason)
+
def authFailed(self, reason):
print('XMPP Authentication failed', reason)
+
def handleMessageEventItems(self, iq):
item = iq.firstChildElement().firstChildElement()
if item:
@@ -345,7 +353,7 @@
# The default interval, used if none is specified in external
# configuration.
CALENDAR_HOME_POLL_INTERVAL = 15 * 60
-
+
# The maximum number of resources to retrieve in a single multiget
MULTIGET_BATCH_SIZE = 200
@@ -399,7 +407,7 @@
ampPushHost=None,
ampPushPort=62311,
):
-
+
self._client_id = str(uuid4())
self.reactor = reactor
@@ -540,7 +548,6 @@
else:
return parser.getResults()
-
_CALENDAR_TYPES = set([
caldavxml.calendar,
caldavxml.schedule_inbox,
@@ -562,7 +569,7 @@
StringProducer(body),
method_label=method_label,
)
-
+
body = yield readBody(response)
result = self._parseMultiStatus(body) if response.code == MULTI_STATUS else None
@@ -619,17 +626,17 @@
"""
Issue a PROPFIND on the /.well-known/caldav/ URL
"""
-
+
location = "/.well-known/caldav/"
response, result = yield self._propfind(
location,
self._STARTUP_WELL_KNOWN,
- allowedStatus=(MULTI_STATUS, MOVED_PERMANENTLY, FOUND, ),
+ allowedStatus=(MULTI_STATUS, MOVED_PERMANENTLY, FOUND,),
method_label="PROPFIND{well-known}",
)
-
+
# Follow any redirect
- if response.code in (MOVED_PERMANENTLY, FOUND, ):
+ if response.code in (MOVED_PERMANENTLY, FOUND,):
location = response.headers.getRawHeaders("location")[0]
location = urlsplit(location)[2]
response, result = yield self._propfind(
@@ -638,10 +645,10 @@
allowedStatus=(MULTI_STATUS),
method_label="PROPFIND{well-known}",
)
-
+
returnValue(result[location])
-
+
@inlineCallbacks
def _principalPropfindInitial(self, user):
"""
@@ -794,6 +801,7 @@
else:
return self._updateCalendar_PROPFIND(calendar, newToken)
+
@inlineCallbacks
def _updateCalendar_PROPFIND(self, calendar, newToken):
"""
@@ -837,8 +845,8 @@
calendar.url,
self._POLL_CALENDAR_SYNC_REPORT % {'sync-token': calendar.changeToken},
depth='1',
- allowedStatus=(MULTI_STATUS, FORBIDDEN,),
- otherTokens = True,
+ allowedStatus=(MULTI_STATUS, FORBIDDEN,),
+ otherTokens=True,
method_label="REPORT{sync}" if calendar.changeToken else "REPORT{sync-init}",
)
if result is None:
@@ -848,14 +856,13 @@
calendar.url,
self._POLL_CALENDAR_SYNC_REPORT % {'sync-token': ''},
depth='1',
- otherTokens = True,
+ otherTokens=True,
method_label="REPORT{sync}" if calendar.changeToken else "REPORT{sync-init}",
)
else:
raise IncorrectResponseCode((MULTI_STATUS,), None)
-
+
result, others = result
-
changed = []
for responseHref in result:
@@ -872,7 +879,7 @@
if result[responseHref].getStatus() / 100 == 2:
if responseHref not in self._events:
self._setEvent(responseHref, Event(self.serializeLocation(), responseHref, None))
-
+
event = self._events[responseHref]
if event.etag != etag:
changed.append(responseHref)
@@ -887,7 +894,7 @@
remove_hrefs = old_hrefs - set(changed)
for href in remove_hrefs:
self._removeEvent(href)
-
+
# Now update calendar to the new token taken from the report
for node in others:
if node.tag == davxml.sync_token:
@@ -902,7 +909,7 @@
Given a multistatus for an entire collection, sync the reported items
against the cached items.
"""
-
+
# Detect changes and new items
all_hrefs = []
changed_hrefs = []
@@ -918,20 +925,20 @@
if responseHref not in self._events:
self._setEvent(responseHref, Event(self.serializeLocation(), responseHref, None))
-
+
event = self._events[responseHref]
if event.etag != etag:
changed_hrefs.append(responseHref)
-
+
# Retrieve changes
yield self._updateChangedEvents(calendar, changed_hrefs)
-
+
# Detect removed items and purge them
remove_hrefs = old_hrefs - set(all_hrefs)
for href in remove_hrefs:
self._removeEvent(href)
-
+
@inlineCallbacks
def _updateChangedEvents(self, calendar, changed):
"""
@@ -942,7 +949,7 @@
while changed:
batchedHrefs = changed[:self.MULTIGET_BATCH_SIZE]
changed = changed[self.MULTIGET_BATCH_SIZE:]
-
+
multistatus = yield self._eventReport(calendar.url, batchedHrefs)
for responseHref in batchedHrefs:
try:
@@ -969,7 +976,7 @@
event.component = Component.fromString(body)
self.catalog["eventChanged"].issue(href)
-
+
def _eventReport(self, calendar, events):
# Next do a REPORT on events that might have information
# we don't know about.
@@ -1010,6 +1017,7 @@
pass
returnValue(result)
+
@inlineCallbacks
def _poll(self, calendarHomeSet, firstTime):
if calendarHomeSet in self._checking:
@@ -1017,7 +1025,7 @@
self._checking.add(calendarHomeSet)
calendars, results = yield self._calendarHomePropfind(calendarHomeSet)
-
+
# First time operations
if firstTime:
yield self._pollFirstTime1(results[calendarHomeSet], calendars)
@@ -1043,9 +1051,10 @@
# One time delegate expansion
if firstTime:
yield self._pollFirstTime2()
-
+
returnValue(True)
+
@inlineCallbacks
def _pollFirstTime1(self, homeNode, calendars):
# Detect sync report if needed
@@ -1091,7 +1100,7 @@
)
returnValue(result)
-
+
@inlineCallbacks
def _notificationChangesPropfind(self, notificationURL):
_ignore_response, result = yield self._propfind(
@@ -1102,7 +1111,7 @@
)
returnValue(result)
-
+
@inlineCallbacks
def _principalExpand(self, principalURL):
result = yield self._report(
@@ -1133,12 +1142,12 @@
msg(
type="operation",
phase="start",
- user=self.record.uid,
+ user=self.record.uid,
client_type=self.title,
client_id=self._client_id,
label=label,
)
-
+
try:
result = yield deferred
except IncorrectResponseCode:
@@ -1150,7 +1159,7 @@
raise
else:
success = True
-
+
after = self.reactor.seconds()
msg(
type="operation",
@@ -1186,6 +1195,7 @@
self._pushFactories.append(factory)
connect(GAIEndpoint(self.reactor, host, port), factory)
+
def _receivedPush(self, inboundID, dataChangedTimestamp):
for href, id in self.ampPushKeys.iteritems():
if inboundID == id:
@@ -1250,7 +1260,7 @@
"""
Called before connections are closed, giving a chance to clean up
"""
-
+
self.serialize()
return self._unsubscribePubSub()
@@ -1261,21 +1271,22 @@
"""
if self.serializePath is None or not os.path.isdir(self.serializePath):
return None
-
+
key = "%s-%s" % (self.record.uid, self.title.replace(" ", "_"))
path = os.path.join(self.serializePath, key)
if not os.path.exists(path):
os.mkdir(path)
elif not os.path.isdir(path):
return None
-
+
return path
-
+
+
def serialize(self):
"""
Write current state to disk.
"""
-
+
path = self.serializeLocation()
if path is None:
return
@@ -1283,26 +1294,26 @@
# Create dict for all the data we need to store
data = {
"principalURL": self.principalURL,
- "calendars": [calendar.serialize() for calendar in sorted(self._calendars.values(), key=lambda x:x.name)],
- "events": [event.serialize() for event in sorted(self._events.values(), key=lambda x:x.url)],
+ "calendars": [calendar.serialize() for calendar in sorted(self._calendars.values(), key=lambda x:x.name)],
+ "events": [event.serialize() for event in sorted(self._events.values(), key=lambda x:x.url)],
}
# Write JSON data
json.dump(data, open(os.path.join(path, "index.json"), "w"), indent=2)
-
+
def deserialize(self):
"""
Read state from disk.
"""
-
+
self._calendars = {}
self._events = {}
path = self.serializeLocation()
if path is None:
return
-
+
# Parse JSON data for calendars
try:
data = json.load(open(os.path.join(path, "index.json")))
@@ -1318,8 +1329,8 @@
for calendar in data["calendars"]:
calendar = Calendar.deserialize(calendar, self._events)
self._calendars[calendar.url] = calendar
-
+
def _makeSelfAttendee(self):
attendee = Property(
name=u'ATTENDEE',
@@ -1371,7 +1382,7 @@
label_suffix = "large"
if len(attendees) > 75:
label_suffix = "huge"
-
+
# At last, upload the new event definition
response = yield self._request(
(NO_CONTENT, PRECONDITION_FAILED,),
@@ -1405,7 +1416,7 @@
email = email[7:]
elif attendee.hasParameter('EMAIL'):
email = attendee.parameterValue('EMAIL').encode("utf-8")
-
+
# First try to discover some names to supply to the
# auto-completion
yield self._report(
@@ -1419,7 +1430,7 @@
depth=None,
method_label="REPORT{psearch}",
)
-
+
# Now learn about the attendee's availability
yield self.requestAvailability(
component.mainComponent().getStartDateUTC(),
@@ -1453,7 +1464,7 @@
label_suffix = "large"
if len(attendees) > 75:
label_suffix = "huge"
-
+
response = yield self._request(
okCodes,
'PUT',
@@ -1472,7 +1483,7 @@
Issue a DELETE for the given URL and remove local state
associated with that event.
"""
-
+
self._removeEvent(href)
response = yield self._request(
@@ -1516,14 +1527,14 @@
Add an event that is an invite - i.e., has attendees. We will do attendee lookups and freebusy
checks on each attendee to simulate what happens when an organizer creates a new invite.
"""
-
+
# Do lookup and free busy of each attendee (not self)
attendees = list(component.mainComponent().properties('ATTENDEE'))
for attendee in attendees:
if attendee.value() in (self.uuid, self.email):
continue
yield self._attendeeAutoComplete(component, attendee)
-
+
# Now do a normal PUT
yield self.addEvent(href, component, invite=True)
@@ -1653,7 +1664,7 @@
# configuration. This is also the actual value used by Snow
# Leopard iCal.
CALENDAR_HOME_POLL_INTERVAL = 15 * 60
-
+
# The maximum number of resources to retrieve in a single multiget
MULTIGET_BATCH_SIZE = 200
@@ -1695,7 +1706,7 @@
# Try to read data from disk - if it succeeds self.principalURL will be set
self.deserialize()
-
+
if self.principalURL is None:
# PROPFIND principal path to retrieve actual principal-URL
response = yield self._principalPropfindInitial(self.record.uid)
@@ -1707,6 +1718,7 @@
returnValue(principal)
+
class OS_X_10_7(BaseAppleClient):
"""
Implementation of the OS X 10.7 iCal network behavior.
@@ -1720,7 +1732,7 @@
# configuration. This is also the actual value used by Snow
# Leopard iCal.
CALENDAR_HOME_POLL_INTERVAL = 15 * 60
-
+
# The maximum number of resources to retrieve in a single multiget
MULTIGET_BATCH_SIZE = 50
@@ -1764,7 +1776,7 @@
Default is to add User-Agent, sub-classes should override to add other
client specific things, Accept etc.
"""
-
+
super(OS_X_10_7, self)._addDefaultHeaders(headers)
headers.setRawHeaders('Accept', ['*/*'])
headers.setRawHeaders('Accept-Language', ['en-us'])
@@ -1777,7 +1789,7 @@
# Try to read data from disk - if it succeeds self.principalURL will be set
self.deserialize()
-
+
if self.principalURL is None:
# PROPFIND well-known with redirect
response = yield self._startupPropfindWellKnown()
@@ -1811,7 +1823,7 @@
# configuration. This is also the actual value used by Snow
# Leopard iCal.
CALENDAR_HOME_POLL_INTERVAL = 15 * 60
-
+
# The maximum number of resources to retrieve in a single multiget
MULTIGET_BATCH_SIZE = 50
@@ -1849,7 +1861,7 @@
Default is to add User-Agent, sub-classes should override to add other
client specific things, Accept etc.
"""
-
+
super(iOS_5, self)._addDefaultHeaders(headers)
headers.setRawHeaders('Accept', ['*/*'])
headers.setRawHeaders('Accept-Language', ['en-us'])
@@ -1909,7 +1921,7 @@
now.offsetMonth(-1) # 1 month back default
result = yield self._report(
calendar.url,
- self._POLL_CALENDAR_VEVENT_TR_QUERY % {"start-date":now.getText()},
+ self._POLL_CALENDAR_VEVENT_TR_QUERY % {"start-date": now.getText()},
depth='1',
method_label="REPORT{vevent}",
)
@@ -1948,7 +1960,7 @@
# Try to read data from disk - if it succeeds self.principalURL will be set
self.deserialize()
-
+
if self.principalURL is None:
# PROPFIND well-known with redirect
response = yield self._startupPropfindWellKnown()
@@ -1968,6 +1980,7 @@
returnValue(principal)
+
class RequestLogger(object):
format = u"%(user)s request %(code)s%(success)s[%(duration)5.2f s] %(method)8s %(url)s"
success = u"\N{CHECK MARK}"
@@ -1982,7 +1995,7 @@
code=event['code'],
duration=event['duration'],
)
-
+
if event['success']:
formatArgs['success'] = self.success
else:
@@ -1998,7 +2011,7 @@
return []
-
+
def main():
from urllib2 import HTTPDigestAuthHandler
from twisted.internet import reactor
@@ -2013,7 +2026,7 @@
from sim import _DirectoryRecord
client = OS_X_10_6(
- reactor, 'http://127.0.0.1:8008/',
+ reactor, 'http://127.0.0.1:8008/',
_DirectoryRecord(
u'user01', u'user01', u'User 01', u'user01 at example.org'),
auth)
Modified: CalendarServer/trunk/contrib/performance/loadtest/logger.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/logger.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/logger.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -54,7 +54,7 @@
# Determine PASS/FAIL
failure = False
count = len(data)
-
+
if failed * 100.0 / count > self._fail_cut_off:
failure = True
Modified: CalendarServer/trunk/contrib/performance/loadtest/population.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/population.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/population.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -149,7 +149,7 @@
"""
Generate individuals such as might be randomly selected from a
population with the given parameters.
-
+
@type parameters: L{PopulationParameters}
@rtype: generator of L{ClientType} instances
"""
@@ -176,6 +176,7 @@
TimezoneCache.create()
+
def getUserRecord(self, index):
return self._records[index]
@@ -201,7 +202,7 @@
uri=self.server,
user=user.encode('utf-8'),
passwd=record.password.encode('utf-8'))
- return user, {"basic": authBasic, "digest": authDigest,}
+ return user, {"basic": authBasic, "digest": authDigest, }
def stop(self):
@@ -223,7 +224,7 @@
def add(self, numClients, clientsPerUser):
for _ignore_n in range(numClients):
number = self._nextUserNumber()
-
+
for _ignore_peruser in range(clientsPerUser):
clientType = self._pop.next()
if (number % self.workerCount) != self.workerIndex:
@@ -231,9 +232,9 @@
# we have to skip all but every Nth request (since every node
# runs the same arrival policy).
continue
-
+
_ignore_user, auth = self._createUser(number)
-
+
reactor = loggedReactor(self.reactor)
client = clientType.new(
reactor,
@@ -246,7 +247,7 @@
self.clients.append(client)
d = client.run()
d.addErrback(self._clientFailure, reactor)
-
+
for profileType in clientType.profileTypes:
profile = profileType(reactor, self, client, number)
if profile.enabled:
@@ -334,6 +335,7 @@
self._failures = collections.defaultdict(int)
self._simFailures = collections.defaultdict(int)
+
def eventReceived(self, event):
self._times.append(event['duration'])
if len(self._times) == 200:
@@ -352,6 +354,7 @@
self._simFailures[event] += 1
+
class ReportStatistics(StatisticsBase, SummarizingMixin):
"""
@@ -363,21 +366,21 @@
# the response time thresholds to display together with failing % count threshold
_thresholds_default = {
- "requests":{
- "limits": [ 0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
- "thresholds":{
- "default":[ 100.0, 100.0, 100.0, 5.0, 1.0, 0.5, 0.0],
+ "requests": {
+ "limits": [0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
+ "thresholds": {
+ "default": [100.0, 100.0, 100.0, 5.0, 1.0, 0.5, 0.0],
}
}
}
- _fail_cut_off = 1.0 # % of total count at which failed requests will cause a failure
+ _fail_cut_off = 1.0 # % of total count at which failed requests will cause a failure
_fields_init = [
('request', -25, '%-25s'),
('count', 8, '%8s'),
('failed', 8, '%8s'),
]
-
+
_fields_extend = [
('mean', 8, '%8.4f'),
('median', 8, '%8.4f'),
@@ -393,7 +396,7 @@
self._failed_sim = collections.defaultdict(int)
self._startTime = datetime.now()
- # Load parameters from config
+ # Load parameters from config
if "thresholdsPath" in params:
jsondata = json.load(open(params["thresholdsPath"]))
elif "thresholds" in params:
@@ -404,7 +407,7 @@
for ctr, item in enumerate(self._thresholds):
for k, v in jsondata["requests"]["thresholds"].items():
item[1][k] = v[ctr]
-
+
self._fields = self._fields_init[:]
for threshold, _ignore_fail_at in self._thresholds:
self._fields.append(('>%g sec' % (threshold,), 10, '%10s'))
@@ -447,7 +450,7 @@
def printMiscellaneous(self, output, items):
maxColumnWidth = str(len(max(items.iterkeys(), key=len)))
- fmt = "%"+maxColumnWidth+"s : %-s\n"
+ fmt = "%" + maxColumnWidth + "s : %-s\n"
for k in sorted(items.iterkeys()):
output.write(fmt % (k.title(), items[k],))
@@ -469,7 +472,7 @@
'Users': self.countUsers(),
'Clients': self.countClients(),
'Start time': self._startTime.strftime('%m/%d %H:%M:%S'),
- 'Run time': "%02d:%02d:%02d" % (runHours,runMinutes,runSeconds),
+ 'Run time': "%02d:%02d:%02d" % (runHours, runMinutes, runSeconds),
'CPU Time': "user %-5.2f sys %-5.2f total %02d:%02d:%02d" % (cpuUser, cpuSys, cpuHours, cpuMinutes, cpuSeconds,)
}
if self.countClientFailures() > 0:
@@ -513,7 +516,7 @@
checks = [
(failures, self._fail_cut_off, self._FAILED_REASON),
]
-
+
for ctr, item in enumerate(self._thresholds):
threshold, fail_at = item
fail_at = fail_at.get(method, fail_at["default"])
Modified: CalendarServer/trunk/contrib/performance/loadtest/profiles.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/profiles.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/profiles.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -22,7 +22,8 @@
from __future__ import division
import json
-import sys, random
+import random
+import sys
from uuid import uuid4
from caldavclientlibrary.protocol.caldav.definitions import caldavxml
@@ -65,9 +66,9 @@
def _calendarsOfType(self, calendarType, componentType):
return [
- cal
- for cal
- in self._client._calendars.itervalues()
+ cal
+ for cal
+ in self._client._calendars.itervalues()
if cal.resourceType == calendarType and componentType in cal.componentTypes]
@@ -143,14 +144,17 @@
"""
Indicates no new attendees can be invited to a particular event.
"""
+ pass
+
def loopWithDistribution(reactor, distribution, function):
result = Deferred()
def repeat(ignored):
reactor.callLater(distribution.sample(), iterate)
+
def iterate():
d = function()
if d is not None:
@@ -231,10 +235,10 @@
otherwise a L{Deferred} which fires when the attendee
change has been made.
"""
-
+
if not self._client.started:
return succeed(None)
-
+
# Find calendars which are eligible for invites
calendars = self._calendarsOfType(caldavxml.calendar, "VEVENT")
@@ -301,7 +305,7 @@
END:VCALENDAR
""".replace("\n", "\r\n"))
-
+
def setParameters(
self,
enabled=True,
@@ -387,7 +391,7 @@
if not self._client.started:
return succeed(None)
-
+
# Find calendars which are eligible for invites
calendars = self._calendarsOfType(caldavxml.calendar, "VEVENT")
@@ -409,7 +413,7 @@
vevent.replaceProperty(Property("DTSTART", dtstart))
vevent.replaceProperty(Property("DTEND", dtend))
vevent.replaceProperty(Property("UID", uid))
-
+
rrule = self._recurrenceDistribution.sample()
if rrule is not None:
vevent.addProperty(Property(None, None, None, pycalendar=rrule))
@@ -429,6 +433,8 @@
d = self._client.addInvite(href, vcalendar)
return self._newOperation("invite", d)
+
+
class Accepter(ProfileBase):
"""
A Calendar user who accepts invitations to events. As well as accepting requests, this
@@ -466,6 +472,7 @@
else:
return
+
def calendarEventChanged(self, calendar, href):
if href in self._accepting:
return
@@ -554,6 +561,7 @@
passthrough = passthrough.value.response
return passthrough
+
def _handleCancel(self, href):
uid = self._client._events[href].getUID()
@@ -629,7 +637,7 @@
def _addEvent(self):
if not self._client.started:
return succeed(None)
-
+
calendars = self._calendarsOfType(caldavxml.calendar, "VEVENT")
while calendars:
@@ -648,7 +656,7 @@
vevent.replaceProperty(Property("DTSTART", dtstart))
vevent.replaceProperty(Property("DTEND", dtend))
vevent.replaceProperty(Property("UID", uid))
-
+
rrule = self._recurrenceDistribution.sample()
if rrule is not None:
vevent.addProperty(Property(None, None, None, pycalendar=rrule))
@@ -658,6 +666,7 @@
return self._newOperation("create", d)
+
class Tasker(ProfileBase):
"""
A Calendar user who creates new tasks.
@@ -697,7 +706,7 @@
def _addTask(self):
if not self._client.started:
return succeed(None)
-
+
calendars = self._calendarsOfType(caldavxml.calendar, "VTODO")
while calendars:
@@ -720,6 +729,7 @@
return self._newOperation("create", d)
+
class OperationLogger(SummarizingMixin):
"""
Profiles will initiate operations which may span multiple requests. Start
@@ -736,22 +746,22 @@
# the response time thresholds to display together with failing % count threshold
_thresholds_default = {
- "operations":{
- "limits": [ 0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
- "thresholds":{
- "default":[ 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0],
+ "operations": {
+ "limits": [0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
+ "thresholds": {
+ "default": [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0],
}
}
}
- _lag_cut_off = 1.0 # Maximum allowed median scheduling latency, seconds
- _fail_cut_off = 1.0 # % of total count at which failed requests will cause a failure
+ _lag_cut_off = 1.0 # Maximum allowed median scheduling latency, seconds
+ _fail_cut_off = 1.0 # % of total count at which failed requests will cause a failure
_fields_init = [
('operation', -25, '%-25s'),
('count', 8, '%8s'),
('failed', 8, '%8s'),
]
-
+
_fields_extend = [
('mean', 8, '%8.4f'),
('median', 8, '%8.4f'),
@@ -766,8 +776,8 @@
if outfile is None:
outfile = sys.stdout
self._outfile = outfile
-
- # Load parameters from config
+
+ # Load parameters from config
if "thresholdsPath" in params:
jsondata = json.load(open(params["thresholdsPath"]))
elif "thresholds" in params:
@@ -778,7 +788,7 @@
for ctr, item in enumerate(self._thresholds):
for k, v in jsondata["operations"]["thresholds"].items():
item[1][k] = v[ctr]
-
+
self._fields = self._fields_init[:]
for threshold, _ignore_fail_at in self._thresholds:
self._fields.append(('>%g sec' % (threshold,), 10, '%10s'))
@@ -790,6 +800,7 @@
if "failCutoff" in params:
self._fail_cut_off = params["failCutoff"]
+
def observe(self, event):
if event.get("type") == "operation":
event = event.copy()
@@ -809,6 +820,7 @@
dataset = self._perOperationLags.setdefault(event[u'label'], [])
dataset.append(lag)
+
def _summarizeData(self, operation, data):
avglag = mean(self._perOperationLags.get(operation, [0.0])) * 1000.0
data = SummarizingMixin._summarizeData(self, operation, data)
Modified: CalendarServer/trunk/contrib/performance/loadtest/sim.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/sim.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/sim.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -55,6 +55,7 @@
self.email = email
+
def generateRecords(count, uidPattern="user%d", passwordPattern="user%d",
namePattern="User %d", emailPattern="user%d at example.com"):
for i in xrange(count):
@@ -66,6 +67,7 @@
yield _DirectoryRecord(uid, password, name, email)
+
def recordsFromCSVFile(path):
if path:
pathObj = FilePath(path)
@@ -99,9 +101,11 @@
def __init__(self, reactor):
self._reactor = reactor
+
def __getattr__(self, name):
return getattr(self._reactor, name)
+
def callLater(self, delay, function, *args, **kwargs):
expected = self._reactor.seconds() + delay
def modifyContext():
@@ -110,6 +114,7 @@
return self._reactor.callLater(delay, modifyContext)
+
class SimOptions(Options):
"""
Command line configuration options for the load simulator.
@@ -172,7 +177,7 @@
try:
self.config = readPlist(configFile)
except ExpatError, e:
- raise UsageError("--config %s: %s" % (self['config'].path, e))
+ raise UsageError("--config %s: %s" % (self['config'].path, e))
finally:
configFile.close()
@@ -254,7 +259,6 @@
if 'principalPathTemplate' in config:
principalPathTemplate = config['principalPathTemplate']
-
if 'clientDataSerialization' in config:
if config['clientDataSerialization']['Enabled']:
serializationPath = config['clientDataSerialization']['Path']
@@ -268,7 +272,7 @@
if 'arrival' in config:
arrival = Arrival(
- namedAny(config['arrival']['factory']),
+ namedAny(config['arrival']['factory']),
config['arrival']['params'])
else:
arrival = Arrival(
@@ -422,6 +426,7 @@
return self.ms.stopService()
+
def attachService(reactor, loadsim, service):
"""
Attach a given L{IService} provider to the given L{IReactorCore}; cause it
@@ -524,6 +529,7 @@
output.write('\n*** PASS\n')
+
class ProcessProtocolBridge(ProcessProtocol):
def __init__(self, spawner, proto):
Modified: CalendarServer/trunk/contrib/performance/loadtest/subscribe.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/subscribe.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/subscribe.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -12,8 +12,8 @@
def issue(self, issue):
self.subscriber(issue)
-
+
class Periodical(object):
def __init__(self):
self.subscriptions = []
Modified: CalendarServer/trunk/contrib/performance/loadtest/test_ical.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/test_ical.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/test_ical.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -1272,6 +1272,7 @@
self.client._parseMultiStatus(CALENDAR_HOME_PROPFIND_RESPONSE_XMPP_MISSING), home)
self.assertEqual({}, self.client.xmpp)
+
def test_changeEventAttendee(self):
"""
OS_X_10_6.changeEventAttendee removes one attendee from an
@@ -1377,13 +1378,13 @@
consumer = MemoryConsumer()
yield body.startProducing(consumer)
-
+
response = MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "MultiStatus", Headers({}),
StringProducer("<?xml version='1.0' encoding='UTF-8'?><multistatus xmlns='DAV:' />"))
-
+
returnValue(response)
-
+
@inlineCallbacks
def _testPost(*args, **kwargs):
expectedResponseCode, method, url, headers, body = args
@@ -1396,19 +1397,19 @@
consumer = MemoryConsumer()
yield body.startProducing(consumer)
self.assertNotEqual(consumer.value().find(kwargs["attendee"]), -1)
-
+
response = MemoryResponse(
('HTTP', '1', '1'), OK, "OK", Headers({}),
StringProducer(""))
-
+
returnValue(response)
-
+
def _testPost02(*args, **kwargs):
return _testPost(*args, attendee="ATTENDEE:mailto:user02 at example.com", **kwargs)
-
+
def _testPost03(*args, **kwargs):
return _testPost(*args, attendee="ATTENDEE:mailto:user03 at example.com", **kwargs)
-
+
@inlineCallbacks
def _testPut(*args, **kwargs):
expectedResponseCode, method, url, headers, body = args
@@ -1423,21 +1424,22 @@
self.assertEqual(
Component.fromString(consumer.value()),
Component.fromString(EVENT_INVITE))
-
+
response = MemoryResponse(
('HTTP', '1', '1'), CREATED, "Created", Headers({}),
StringProducer(""))
-
+
returnValue(response)
-
- requests = [_testReport, _testPost02, _testReport, _testPost03, _testPut,]
-
+
+ requests = [_testReport, _testPost02, _testReport, _testPost03, _testPut, ]
+
def _requestHandler(*args, **kwargs):
handler = requests.pop(0)
return handler(*args, **kwargs)
self.client._request = _requestHandler
yield self.client.addInvite('/mumble/frotz.ics', vcalendar)
+
def test_deleteEvent(self):
"""
L{OS_X_10_6.deleteEvent} DELETEs the event at the relative
@@ -1528,56 +1530,56 @@
self.client.serialize()
self.assertTrue(os.path.exists(clientPath))
self.assertTrue(os.path.exists(indexPath))
- self.assertEqual(open(indexPath).read(), """{
+ self.assertEqual(open(indexPath).read().replace(" \n", "\n"), """{
"calendars": [
{
- "changeToken": "123",
- "name": "calendar",
- "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
+ "changeToken": "123",
+ "name": "calendar",
+ "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VEVENT"
- ],
- "url": "/home/calendar/",
+ ],
+ "url": "/home/calendar/",
"events": [
"1.ics"
]
- },
+ },
{
- "changeToken": "789",
- "name": "calendar",
- "resourceType": "{urn:ietf:params:xml:ns:caldav}schedule-inbox",
+ "changeToken": "789",
+ "name": "calendar",
+ "resourceType": "{urn:ietf:params:xml:ns:caldav}schedule-inbox",
"componentTypes": [
- "VEVENT",
+ "VEVENT",
"VTODO"
- ],
- "url": "/home/inbox/",
+ ],
+ "url": "/home/inbox/",
"events": [
"i1.ics"
]
- },
+ },
{
- "changeToken": "456",
- "name": "tasks",
- "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
+ "changeToken": "456",
+ "name": "tasks",
+ "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VTODO"
- ],
- "url": "/home/tasks/",
+ ],
+ "url": "/home/tasks/",
"events": []
}
- ],
- "principalURL": null,
+ ],
+ "principalURL": null,
"events": [
{
- "url": "/home/calendar/1.ics",
- "scheduleTag": null,
- "etag": "123.123",
+ "url": "/home/calendar/1.ics",
+ "scheduleTag": null,
+ "etag": "123.123",
"uid": "004f8e41-b071-4b30-bb3b-6aada4adcc10"
- },
+ },
{
- "url": "/home/inbox/i1.ics",
- "scheduleTag": null,
- "etag": "123.123",
+ "url": "/home/inbox/i1.ics",
+ "scheduleTag": null,
+ "etag": "123.123",
"uid": "00a79cad-857b-418e-a54a-340b5686d747"
}
]
@@ -1633,53 +1635,53 @@
open(indexPath, "w").write("""{
"calendars": [
{
- "changeToken": "321",
- "name": "calendar",
- "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
+ "changeToken": "321",
+ "name": "calendar",
+ "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VEVENT"
- ],
- "url": "/home/calendar/",
+ ],
+ "url": "/home/calendar/",
"events": [
"2.ics"
]
- },
+ },
{
- "changeToken": "987",
- "name": "calendar",
- "resourceType": "{urn:ietf:params:xml:ns:caldav}schedule-inbox",
+ "changeToken": "987",
+ "name": "calendar",
+ "resourceType": "{urn:ietf:params:xml:ns:caldav}schedule-inbox",
"componentTypes": [
- "VEVENT",
+ "VEVENT",
"VTODO"
- ],
- "url": "/home/inbox/",
+ ],
+ "url": "/home/inbox/",
"events": [
"i2.ics"
]
- },
+ },
{
- "changeToken": "654",
- "name": "tasks",
- "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
+ "changeToken": "654",
+ "name": "tasks",
+ "resourceType": "{urn:ietf:params:xml:ns:caldav}calendar",
"componentTypes": [
"VTODO"
- ],
- "url": "/home/tasks/",
+ ],
+ "url": "/home/tasks/",
"events": []
}
- ],
- "principalURL": null,
+ ],
+ "principalURL": null,
"events": [
{
- "url": "/home/calendar/2.ics",
- "scheduleTag": null,
- "etag": "321.321",
+ "url": "/home/calendar/2.ics",
+ "scheduleTag": null,
+ "etag": "321.321",
"uid": "004f8e41-b071-4b30-bb3b-6aada4adcc10"
- },
+ },
{
- "url": "/home/inbox/i2.ics",
- "scheduleTag": null,
- "etag": "987.987",
+ "url": "/home/inbox/i2.ics",
+ "scheduleTag": null,
+ "etag": "987.987",
"uid": "00a79cad-857b-418e-a54a-340b5686d747"
}
]
@@ -1716,6 +1718,7 @@
self.assertEqual(str(self.client._events["/home/inbox/i2.ics"].component), cal2)
+
class UpdateCalendarTests(OS_X_10_6Mixin, TestCase):
"""
Tests for L{OS_X_10_6._updateCalendar}.
@@ -1864,7 +1867,7 @@
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_PROPFIND_RESPONSE_BODY)))
-
+
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
@@ -1907,7 +1910,7 @@
MemoryResponse(
('HTTP', '1', '1'), MULTI_STATUS, "Multi-status", None,
StringProducer(self._CALENDAR_PROPFIND_RESPONSE_BODY)))
-
+
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
@@ -1921,7 +1924,7 @@
self.assertTrue(self.client._events['/something/anotherthing.ics'].etag is not None)
self.assertTrue(self.client._events['/something/else.ics'].etag is None)
-
+
result, req = requests.pop(0)
expectedResponseCode, method, url, _ignore_headers, _ignore_body = req
self.assertEqual('REPORT', method)
@@ -1937,6 +1940,7 @@
self.assertTrue(self.client._events['/something/else.ics'].etag is not None)
+
class VFreeBusyTests(OS_X_10_6Mixin, TestCase):
"""
Tests for L{OS_X_10_6.requestAvailability}.
@@ -1996,7 +2000,7 @@
SUMMARY:Availability for urn:uuid:user05, urn:uuid:user10
END:VFREEBUSY
END:VCALENDAR
-""".replace('\n', '\r\n') % {'uid': uid, 'dtstamp': dtstamp},consumer.value())
+""".replace('\n', '\r\n') % {'uid': uid, 'dtstamp': dtstamp}, consumer.value())
finished.addCallback(cbFinished)
@@ -2008,4 +2012,3 @@
finished.addCallback(requested)
return d
-
Modified: CalendarServer/trunk/contrib/performance/loadtest/test_population.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/test_population.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/test_population.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -179,16 +179,16 @@
PUT(xxx-huge/large/medium/small} have different thresholds. Test that requests straddling
each of those are correctly determined to be failures or not.
"""
-
+
_thresholds = {
- "requests":{
- "limits": [ 0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
- "thresholds":{
- "default": [ 100.0, 100.0, 100.0, 5.0, 1.0, 0.5, 0.0],
- "PUT{organizer-small}": [ 100.0, 50.0, 25.0, 5.0, 1.0, 0.5, 0.0],
- "PUT{organizer-medium}":[ 100.0, 100.0, 50.0, 25.0, 5.0, 1.0, 0.5],
- "PUT{organizer-large}": [ 100.0, 100.0, 100.0, 50.0, 25.0, 5.0, 1.0],
- "PUT{organizer-huge}": [ 100.0, 100.0, 100.0, 100.0, 100.0, 50.0, 25.0],
+ "requests": {
+ "limits": [0.1, 0.5, 1.0, 3.0, 5.0, 10.0, 30.0],
+ "thresholds": {
+ "default": [100.0, 100.0, 100.0, 5.0, 1.0, 0.5, 0.0],
+ "PUT{organizer-small}": [100.0, 50.0, 25.0, 5.0, 1.0, 0.5, 0.0],
+ "PUT{organizer-medium}": [100.0, 100.0, 50.0, 25.0, 5.0, 1.0, 0.5],
+ "PUT{organizer-large}": [100.0, 100.0, 100.0, 50.0, 25.0, 5.0, 1.0],
+ "PUT{organizer-huge}": [100.0, 100.0, 100.0, 100.0, 100.0, 50.0, 25.0],
}
}
}
@@ -208,7 +208,7 @@
type='response', method='PUT{organizer-small}', success=True,
duration=0.2, user='user01', client_type="test", client_id="1234"))
self.assertEqual([], logger.failures())
-
+
# -small above 0.5 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
@@ -227,7 +227,7 @@
["Greater than 50% PUT{organizer-small} exceeded 0.5 second response time"],
logger.failures()
)
-
+
# -medium below 0.5 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
@@ -246,7 +246,7 @@
[],
logger.failures()
)
-
+
# -medium above 1.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
@@ -265,7 +265,7 @@
["Greater than 50% PUT{organizer-medium} exceeded 1 second response time"],
logger.failures()
)
-
+
# -large below 1.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
@@ -284,7 +284,7 @@
[],
logger.failures()
)
-
+
# -large above 3.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
@@ -322,7 +322,7 @@
[],
logger.failures()
)
-
+
# -huge above 10.0 threshold
logger = ReportStatistics(thresholds=_thresholds)
logger.observe(dict(
Modified: CalendarServer/trunk/contrib/performance/loadtest/test_profiles.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/test_profiles.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/test_profiles.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -156,7 +156,7 @@
END:VCALENDAR
"""
-INBOX_REPLY = """\
+INBOX_REPLY = """\
BEGIN:VCALENDAR
METHOD:REPLY
VERSION:2.0
@@ -175,6 +175,7 @@
return _AnyRecord(index)
+
class _AnyRecord(object):
def __init__(self, index):
self.uid = u"user%02d" % (index,)
@@ -183,6 +184,7 @@
self.email = u"user%02d at example.com" % (index,)
+
class Deterministic(object):
def __init__(self, value=None):
self.value = value
@@ -243,17 +245,17 @@
"""
if self.serializePath is None or not os.path.isdir(self.serializePath):
return None
-
+
key = "%s-%s" % (self.record.uid, "StubClient")
path = os.path.join(self.serializePath, key)
if not os.path.exists(path):
os.mkdir(path)
elif not os.path.isdir(path):
return None
-
+
return path
-
+
def addEvent(self, href, vevent):
self._events[href] = Event(self.serializePath, href, None, vevent)
return succeed(None)
@@ -273,6 +275,7 @@
else:
return succeed(None)
+
def updateEvent(self, href):
self.rescheduled.remove(href)
return succeed(None)
@@ -363,10 +366,10 @@
userNumber = 13
client = StubClient(userNumber, self.mktemp())
- inviter = Inviter(None, self.sim, client, userNumber, **{"enabled":False})
+ inviter = Inviter(None, self.sim, client, userNumber, **{"enabled": False})
self.assertEqual(inviter.enabled, False)
- inviter = Inviter(None, self.sim, client, userNumber, **{"enabled":True})
+ inviter = Inviter(None, self.sim, client, userNumber, **{"enabled": True})
self.assertEqual(inviter.enabled, True)
@@ -437,7 +440,6 @@
self.assertEqual(attendees[0].parameterValue(paramname), paramvalue)
-
def test_doNotAddSelfToEvent(self):
"""
If the inviter randomly selects its own user to be added to
@@ -466,7 +468,6 @@
self.assertEqual(attendees[0].parameterValue(paramname), paramvalue)
-
def test_doNotAddExistingToEvent(self):
"""
If the inviter randomly selects a user which is already an
@@ -561,12 +562,13 @@
userNumber = 13
client = StubClient(userNumber, self.mktemp())
- inviter = RealisticInviter(None, self.sim, client, userNumber, **{"enabled":False})
+ inviter = RealisticInviter(None, self.sim, client, userNumber, **{"enabled": False})
self.assertEqual(inviter.enabled, False)
- inviter = RealisticInviter(None, self.sim, client, userNumber, **{"enabled":True})
+ inviter = RealisticInviter(None, self.sim, client, userNumber, **{"enabled": True})
self.assertEqual(inviter.enabled, True)
+
def test_doNotAddInviteToInbox(self):
"""
When the only calendar with any events is a schedule inbox, no
@@ -578,7 +580,7 @@
client = StubClient(userNumber, self.mktemp())
client._calendars.update({calendar.url: calendar})
- inviter = RealisticInviter(None, self.sim, client, userNumber, **{"enabled":False})
+ inviter = RealisticInviter(None, self.sim, client, userNumber, **{"enabled": False})
inviter._invite()
self.assertEquals(client._events, {})
@@ -616,13 +618,12 @@
inviter._invite()
self.assertEquals(len(client._events), 1)
attendees = tuple(client._events.values()[0].component.mainComponent().properties('ATTENDEE'))
- expected = set(("mailto:user%02d at example.com" % (userNumber,), "mailto:user%02d at example.com" % (userNumber + 1,),))
+ expected = set(("mailto:user%02d at example.com" % (userNumber,), "mailto:user%02d at example.com" % (userNumber + 1,),))
for attendee in attendees:
expected.remove(attendee.value())
self.assertEqual(len(expected), 0)
-
def test_doNotAddSelfToEvent(self):
"""
If the inviter randomly selects its own user to be added to
@@ -645,13 +646,12 @@
inviter._invite()
self.assertEquals(len(client._events), 1)
attendees = tuple(client._events.values()[0].component.mainComponent().properties('ATTENDEE'))
- expected = set(("mailto:user%02d at example.com" % (selfNumber,), "mailto:user%02d at example.com" % (otherNumber,),))
+ expected = set(("mailto:user%02d at example.com" % (selfNumber,), "mailto:user%02d at example.com" % (otherNumber,),))
for attendee in attendees:
expected.remove(attendee.value())
self.assertEqual(len(expected), 0)
-
def test_doNotAddExistingToEvent(self):
"""
If the inviter randomly selects a user which is already an
@@ -676,9 +676,9 @@
self.assertEquals(len(client._events), 1)
attendees = tuple(client._events.values()[0].component.mainComponent().properties('ATTENDEE'))
expected = set((
- "mailto:user%02d at example.com" % (selfNumber,),
- "mailto:user%02d at example.com" % (inviteeNumber,),
- "mailto:user%02d at example.com" % (anotherNumber,),
+ "mailto:user%02d at example.com" % (selfNumber,),
+ "mailto:user%02d at example.com" % (inviteeNumber,),
+ "mailto:user%02d at example.com" % (anotherNumber,),
))
for attendee in attendees:
expected.remove(attendee.value())
@@ -719,12 +719,13 @@
userNumber = 13
client = StubClient(userNumber, self.mktemp())
- accepter = Accepter(None, self.sim, client, userNumber, **{"enabled":False})
+ accepter = Accepter(None, self.sim, client, userNumber, **{"enabled": False})
self.assertEqual(accepter.enabled, False)
- accepter = Accepter(None, self.sim, client, userNumber, **{"enabled":True})
+ accepter = Accepter(None, self.sim, client, userNumber, **{"enabled": True})
self.assertEqual(accepter.enabled, True)
+
def test_ignoreEventOnUnknownCalendar(self):
"""
If an event on an unknown calendar changes, it is ignored.
@@ -812,7 +813,7 @@
inboxEvent = Event(client.serializeLocation(), inboxURL + u'4321.ics', None, vevent)
client._setEvent(inboxEvent.url, inboxEvent)
- accepter = Accepter(clock, self.sim, client, userNumber)
+ accepter = Accepter(clock, self.sim, client, userNumber)
accepter.eventChanged(inboxEvent.url)
clock.advance(3)
self.assertNotIn(inboxEvent.url, client._events)
@@ -841,7 +842,7 @@
Response(
('HTTP', 1, 1), PRECONDITION_FAILED,
'Precondition Failed', None, None)))
- accepter = Accepter(clock, self.sim, client, userNumber)
+ accepter = Accepter(clock, self.sim, client, userNumber)
accepter.eventChanged(inboxEvent.url)
clock.advance(3)
self.assertNotIn(inboxEvent.url, client._events)
@@ -967,8 +968,6 @@
-
-
class EventerTests(TestCase):
"""
Tests for loadtest.profiles.Eventer, a profile which adds new
@@ -983,12 +982,13 @@
userNumber = 13
client = StubClient(userNumber, self.mktemp())
- eventer = Eventer(None, self.sim, client, None, **{"enabled":False})
+ eventer = Eventer(None, self.sim, client, None, **{"enabled": False})
self.assertEqual(eventer.enabled, False)
- eventer = Eventer(None, self.sim, client, None, **{"enabled":True})
+ eventer = Eventer(None, self.sim, client, None, **{"enabled": True})
self.assertEqual(eventer.enabled, True)
+
def test_doNotAddEventOnInbox(self):
"""
When the only calendar is a schedule inbox, no attempt is made
Modified: CalendarServer/trunk/contrib/performance/loadtest/test_sim.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/test_sim.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/test_sim.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -157,7 +157,7 @@
def stop(self):
return succeed(None)
-
+
class BrokenProfile(object):
def __init__(self, reactor, simulator, client, userNumber, runResult):
self._runResult = runResult
@@ -209,6 +209,7 @@
self._triggers.append((phase, event, thunk))
+
class Observer(object):
def __init__(self):
self.reported = False
@@ -227,6 +228,7 @@
return []
+
class NullArrival(object):
def run(self, sim):
pass
@@ -326,6 +328,7 @@
self.assertEqual(sim.records[98].commonName, 'User 99')
self.assertEqual(sim.records[98].email, 'user99 at example.com')
+
def test_generateRecordsDefaultPatterns(self):
"""
L{LoadSimulator.fromCommandLine} takes an account loader from the
@@ -353,6 +356,7 @@
self.assertEqual(sim.records[1].commonName, 'User 2')
self.assertEqual(sim.records[1].email, 'user2 at example.com')
+
def test_generateRecordsNonDefaultPatterns(self):
"""
L{LoadSimulator.fromCommandLine} takes an account loader from the
@@ -384,6 +388,7 @@
self.assertEqual(sim.records[2].commonName, 'Test User 003')
self.assertEqual(sim.records[2].email, 'USER003 at example2.com')
+
def test_specifyRuntime(self):
"""
L{LoadSimulator.fromCommandLine} recognizes the I{--runtime} option to
@@ -472,7 +477,7 @@
"class": "contrib.performance.loadtest.profiles.Eventer"}],
"weight": 3,
}]}))
-
+
sim = LoadSimulator.fromCommandLine(['--config', config.path])
expectedParameters = PopulationParameters()
expectedParameters.addClient(
@@ -481,7 +486,7 @@
"eventStartDistribution": NormalDistribution(123, 456)})]))
self.assertEquals(sim.parameters, expectedParameters)
-
+
def test_requireClient(self):
"""
At least one client is required, so if a configuration with an
@@ -504,7 +509,7 @@
"""
config = FilePath(self.mktemp())
config.setContent(writePlistToString({
- "observers": [{"type":"contrib.performance.loadtest.population.SimpleStatistics", "params":{},},]
+ "observers": [{"type":"contrib.performance.loadtest.population.SimpleStatistics", "params":{}, }, ]
}))
sim = LoadSimulator.fromCommandLine(['--config', config.path])
self.assertEquals(len(sim.observers), 1)
@@ -531,4 +536,3 @@
self.assertTrue(observers[0].reported)
self.assertEquals(
observers[0].events[0]['message'], (Reactor.message,))
-
Modified: CalendarServer/trunk/contrib/performance/loadtest/test_trafficlogger.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/test_trafficlogger.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/test_trafficlogger.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -34,6 +34,7 @@
pass
+
class Probe(object):
implements(IProbe)
@@ -42,11 +43,13 @@
def __init__(self, result=None):
self._result = result
+
def probe(self):
self._probed = True
return self._result
+
class TrafficLoggingReactorTests(TestCase):
"""
Tests for L{loggedReactor}.
@@ -152,7 +155,7 @@
self.wrapped.protocol = Discard
self.factory = _TrafficLoggingFactory(self.wrapped)
-
+
def test_receivedBytesLogged(self):
"""
When bytes are delivered through a protocol created by
Modified: CalendarServer/trunk/contrib/performance/loadtest/test_webadmin.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/test_webadmin.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/test_webadmin.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -24,33 +24,33 @@
"""
class FakeReporter(object):
-
+
def generateReport(self, output):
output.write("FakeReporter")
class FakeReactor(object):
-
+
def __init__(self):
self.running = True
-
+
def stop(self):
self.running = False
class FakeLoadSim(object):
-
+
def __init__(self):
self.reactor = WebAdminTests.FakeReactor()
self.reporter = WebAdminTests.FakeReporter()
self.running = True
-
+
def stop(self):
self.running = False
-
+
class FakeRequest(object):
-
+
def __init__(self, **kwargs):
self.args = kwargs
@@ -59,23 +59,24 @@
"""
Test render_GET
"""
-
+
loadsim = WebAdminTests.FakeLoadSim()
resource = LoadSimAdminResource(loadsim)
-
+
response = resource.render_GET(WebAdminTests.FakeRequest())
self.assertTrue(response.startswith("<html>"))
self.assertTrue(response.find(resource.token) != -1)
-
+
+
def test_resourcePOST_Stop(self):
"""
Test render_POST when Stop button is clicked
"""
-
+
loadsim = WebAdminTests.FakeLoadSim()
resource = LoadSimAdminResource(loadsim)
self.assertTrue(loadsim.reactor.running)
-
+
response = resource.render_POST(WebAdminTests.FakeRequest(
token=(resource.token,),
stop=None,
@@ -84,16 +85,17 @@
self.assertTrue(response.find(resource.token) == -1)
self.assertTrue(response.find("FakeReporter") != -1)
self.assertFalse(loadsim.running)
-
+
+
def test_resourcePOST_Stop_BadToken(self):
"""
Test render_POST when Stop button is clicked but token is wrong
"""
-
+
loadsim = WebAdminTests.FakeLoadSim()
resource = LoadSimAdminResource(loadsim)
self.assertTrue(loadsim.reactor.running)
-
+
response = resource.render_POST(WebAdminTests.FakeRequest(
token=("xyz",),
stop=None,
@@ -102,16 +104,17 @@
self.assertTrue(response.find(resource.token) != -1)
self.assertTrue(response.find("FakeReporter") == -1)
self.assertTrue(loadsim.running)
-
+
+
def test_resourcePOST_Results(self):
"""
Test render_POST when Results button is clicked
"""
-
+
loadsim = WebAdminTests.FakeLoadSim()
resource = LoadSimAdminResource(loadsim)
self.assertTrue(loadsim.reactor.running)
-
+
response = resource.render_POST(WebAdminTests.FakeRequest(
token=(resource.token,),
results=None,
@@ -120,16 +123,17 @@
self.assertTrue(response.find(resource.token) != -1)
self.assertTrue(response.find("FakeReporter") != -1)
self.assertTrue(loadsim.running)
-
+
+
def test_resourcePOST_Results_BadToken(self):
"""
Test render_POST when Results button is clicked and token is wrong
"""
-
+
loadsim = WebAdminTests.FakeLoadSim()
resource = LoadSimAdminResource(loadsim)
self.assertTrue(loadsim.reactor.running)
-
+
response = resource.render_POST(WebAdminTests.FakeRequest(
token=("xyz",),
results=None,
Modified: CalendarServer/trunk/contrib/performance/loadtest/trafficlogger.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/trafficlogger.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/trafficlogger.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -53,6 +53,7 @@
return reactor
+
class _TCPTrafficLoggingReactor(proxyForInterface(IReactorTCP, '_reactor')):
"""
A mixin for a reactor wrapper which defines C{connectTCP} so as to cause
@@ -84,6 +85,7 @@
host, port, wrapper, *args, **kwargs)
+
class _TrafficLoggingFactory(WrappingFactory):
"""
A wrapping factory which applies L{TrafficLoggingProtocolWrapper}.
Modified: CalendarServer/trunk/contrib/performance/loadtest/webadmin.py
===================================================================
--- CalendarServer/trunk/contrib/performance/loadtest/webadmin.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/loadtest/webadmin.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -81,9 +81,11 @@
self.loadsim = loadsim
self.token = str(uuid.uuid4())
+
def render_GET(self, request):
return self._renderReport()
+
def render_POST(self, request):
html = self.HEAD + self.BODY
if 'token' not in request.args or request.args['token'][0] != self.token:
@@ -96,6 +98,7 @@
return self._renderReport()
return html % (self.token,)
+
def _renderReport(self, stopped=False):
report = StringIO.StringIO()
before = clock()
Modified: CalendarServer/trunk/contrib/performance/massupload.py
===================================================================
--- CalendarServer/trunk/contrib/performance/massupload.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/massupload.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -13,17 +13,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##
+
from __future__ import print_function
-
-import sys, pickle
-
+from benchlib import select
from twisted.internet import reactor
from twisted.internet.task import coiterate
+from twisted.python.log import err
from twisted.python.usage import UsageError
-from twisted.python.log import err
-
-from benchlib import select
from upload import UploadOptions, upload
+import sys
+import pickle
class MassUploadOptions(UploadOptions):
optParameters = [
@@ -37,6 +36,7 @@
UploadOptions.parseArgs(self)
+
def main():
options = MassUploadOptions()
try:
@@ -53,6 +53,7 @@
else:
benchmarks = options['benchmarks'].split()
+
def go():
for benchmark in benchmarks:
for param in raw[benchmark].keys():
@@ -61,13 +62,13 @@
raw, benchmark, param, statistic)
samples = stat.squash(samples)
yield upload(
- reactor,
+ reactor,
options['url'], options['project'],
options['revision'], options['revision-date'],
benchmark, param, statistic,
options['backend'], options['environment'],
samples)
-
+
# This is somewhat hard-coded to the currently
# collected stats.
if statistic == 'SQL':
@@ -75,14 +76,13 @@
raw, benchmark, param, 'execute')
samples = stat.squash(samples, 'count')
yield upload(
- reactor,
+ reactor,
options['url'], options['project'],
options['revision'], options['revision-date'],
benchmark, param, statistic + 'count',
options['backend'], options['environment'],
samples)
-
d = coiterate(go())
d.addErrback(err, "Mass upload failed")
reactor.callWhenRunning(d.addCallback, lambda ign: reactor.stop())
Modified: CalendarServer/trunk/contrib/performance/report.py
===================================================================
--- CalendarServer/trunk/contrib/performance/report.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/report.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -13,12 +13,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##
+
from __future__ import print_function
+from benchlib import select
+import sys
+import pickle
-import sys, pickle
-from benchlib import select
-
def main():
if len(sys.argv) < 5:
print('Usage: %s <datafile> <benchmark name> <parameter value> <metric> [command]' % (sys.argv[0],))
@@ -31,4 +32,3 @@
print('\t' + '\n\t'.join(stat.commands))
else:
print(getattr(stat, sys.argv[5])(samples, *sys.argv[6:]))
-
Modified: CalendarServer/trunk/contrib/performance/setbackend.py
===================================================================
--- CalendarServer/trunk/contrib/performance/setbackend.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/setbackend.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -39,6 +39,7 @@
conf.write(sys.stdout)
+
def replace(elements, key, value):
found = False
for ele in elements:
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/httpTests.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/httpTests.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/httpTests.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -24,12 +24,13 @@
"""
class SQLResults(object):
-
+
def __init__(self, count, rows, timing):
self.count = count
self.rows = rows
self.timing = timing
-
+
+
def __init__(self, label, sessions, logFilePath):
"""
@param label: label used to identify the test
@@ -40,11 +41,12 @@
self.logFilePath = logFilePath
self.result = None
+
def execute(self):
"""
Execute the HTTP request and read the results.
"""
-
+
self.prepare()
self.clearLog()
self.doRequest()
@@ -52,36 +54,40 @@
self.cleanup()
return self.result
+
def prepare(self):
"""
Do some setup prior to the real request.
"""
pass
+
def clearLog(self):
"""
Clear the server's SQL log file.
"""
open(self.logFilePath, "w").write("")
+
def doRequest(self):
"""
Execute the actual HTTP request. Sub-classes override.
"""
raise NotImplementedError
+
def collectResults(self):
"""
Parse the server log file to extract the details we need.
"""
-
+
def extractInt(line):
pos = line.find(": ")
- return int(line[pos+2:])
+ return int(line[pos + 2:])
def extractFloat(line):
pos = line.find(": ")
- return float(line[pos+2:])
+ return float(line[pos + 2:])
data = open(self.logFilePath).read()
lines = data.splitlines()
@@ -90,6 +96,7 @@
timing = extractFloat(lines[6])
self.result = HTTPTestBase.SQLResults(count, rows, timing)
+
def cleanup(self):
"""
Do some cleanup after the real request.
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/invite.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/invite.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/invite.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -71,6 +71,7 @@
href = joinURL(self.sessions[1].calendarHref, "organizer.ics")
self.sessions[1].writeData(URL(path=href), ICAL % (now.getYear() + 1,), "text/calendar")
+
def cleanup(self):
"""
Do some cleanup after the real request.
@@ -89,4 +90,3 @@
for href in results.keys():
if href != self.sessions[0].inboxHref:
self.sessions[0].deleteResource(URL(path=href))
-
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/multiget.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/multiget.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/multiget.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -29,12 +29,13 @@
def __init__(self, label, sessions, logFilePath, count):
super(MultigetTest, self).__init__(label, sessions, logFilePath)
self.count = count
-
+
+
def doRequest(self):
"""
Execute the actual HTTP request.
"""
- hrefs = [joinURL(self.sessions[0].calendarHref, "%d.ics" % (i+1,)) for i in range(self.count)]
+ hrefs = [joinURL(self.sessions[0].calendarHref, "%d.ics" % (i + 1,)) for i in range(self.count)]
props = (
davxml.getetag,
caldavxml.calendar_data,
@@ -45,10 +46,10 @@
request = Multiget(self.sessions[0], self.sessions[0].calendarHref, hrefs, props)
result = ResponseDataString()
request.setOutput(result)
-
+
# Process it
self.sessions[0].runSession(request)
-
+
# If its a 207 we want to parse the XML
if request.getStatusCode() == statuscodes.MultiStatus:
pass
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/propfind.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/propfind.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/propfind.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -15,7 +15,7 @@
##
from caldavclientlibrary.protocol.http.data.string import ResponseDataString
-from caldavclientlibrary.protocol.webdav.definitions import davxml, statuscodes,\
+from caldavclientlibrary.protocol.webdav.definitions import davxml, statuscodes, \
headers
from caldavclientlibrary.protocol.webdav.propfind import PropFind
from contrib.performance.sqlusage.requests.httpTests import HTTPTestBase
@@ -28,7 +28,8 @@
def __init__(self, label, sessions, logFilePath, depth=1):
super(PropfindTest, self).__init__(label, sessions, logFilePath)
self.depth = headers.Depth1 if depth == 1 else headers.Depth0
-
+
+
def doRequest(self):
"""
Execute the actual HTTP request.
@@ -42,10 +43,10 @@
request = PropFind(self.sessions[0], self.sessions[0].calendarHref, self.depth, props)
result = ResponseDataString()
request.setOutput(result)
-
+
# Process it
self.sessions[0].runSession(request)
-
+
# If its a 207 we want to parse the XML
if request.getStatusCode() == statuscodes.MultiStatus:
pass
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/put.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/put.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/put.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -66,6 +66,7 @@
href = joinURL(self.sessions[0].calendarHref, "put.ics")
self.sessions[0].writeData(URL(path=href), ICAL % (now.getYear() + 1,), "text/calendar")
+
def cleanup(self):
"""
Do some cleanup after the real request.
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/query.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/query.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/query.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -63,7 +63,8 @@
def __init__(self, label, sessions, logFilePath, count):
super(QueryTest, self).__init__(label, sessions, logFilePath)
self.count = count
-
+
+
def prepare(self):
"""
Do some setup prior to the real request.
@@ -74,9 +75,10 @@
self.end = self.start.duplicate()
self.end.offsetHours(1)
for i in range(self.count):
- href = joinURL(self.sessions[0].calendarHref, "tr-query-%d.ics" % (i+1,))
- self.sessions[0].writeData(URL(path=href), ICAL % (self.start.getText(), i+1,), "text/calendar")
+ href = joinURL(self.sessions[0].calendarHref, "tr-query-%d.ics" % (i + 1,))
+ self.sessions[0].writeData(URL(path=href), ICAL % (self.start.getText(), i + 1,), "text/calendar")
+
def doRequest(self):
"""
Execute the actual HTTP request.
@@ -90,21 +92,22 @@
request = QueryVEVENTTimeRange(self.sessions[0], self.sessions[0].calendarHref, self.start.getText(), self.end.getText(), props)
result = ResponseDataString()
request.setOutput(result)
-
+
# Process it
self.sessions[0].runSession(request)
-
+
# If its a 207 we want to parse the XML
if request.getStatusCode() == statuscodes.MultiStatus:
pass
else:
raise RuntimeError("Query request failed: %s" % (request.getStatusCode(),))
+
def cleanup(self):
"""
Do some cleanup after the real request.
"""
# Remove created resources
for i in range(self.count):
- href = joinURL(self.sessions[0].calendarHref, "tr-query-%d.ics" % (i+1,))
+ href = joinURL(self.sessions[0].calendarHref, "tr-query-%d.ics" % (i + 1,))
self.sessions[0].deleteResource(URL(path=href))
Modified: CalendarServer/trunk/contrib/performance/sqlusage/requests/sync.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/requests/sync.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/requests/sync.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -63,7 +63,8 @@
self.full = full
self.count = count
self.synctoken = ""
-
+
+
def prepare(self):
"""
Do some setup prior to the real request.
@@ -72,13 +73,14 @@
# Get current sync token
results, _ignore_bad = self.sessions[0].getProperties(URL(path=self.sessions[0].calendarHref), (davxml.sync_token,))
self.synctoken = results[davxml.sync_token]
-
+
# Add resources to create required number of changes
now = PyCalendarDateTime.getNowUTC()
for i in range(self.count):
- href = joinURL(self.sessions[0].calendarHref, "sync-collection-%d.ics" % (i+1,))
- self.sessions[0].writeData(URL(path=href), ICAL % (now.getYear() + 1, i+1,), "text/calendar")
+ href = joinURL(self.sessions[0].calendarHref, "sync-collection-%d.ics" % (i + 1,))
+ self.sessions[0].writeData(URL(path=href), ICAL % (now.getYear() + 1, i + 1,), "text/calendar")
+
def doRequest(self):
"""
Execute the actual HTTP request.
@@ -91,6 +93,7 @@
# Run sync collection
self.sessions[0].syncCollection(URL(path=self.sessions[0].calendarHref), self.synctoken, props)
+
def cleanup(self):
"""
Do some cleanup after the real request.
@@ -98,5 +101,5 @@
if not self.full:
# Remove created resources
for i in range(self.count):
- href = joinURL(self.sessions[0].calendarHref, "sync-collection-%d.ics" % (i+1,))
+ href = joinURL(self.sessions[0].calendarHref, "sync-collection-%d.ics" % (i + 1,))
self.sessions[0].deleteResource(URL(path=href))
Modified: CalendarServer/trunk/contrib/performance/sqlusage/sqlusage.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlusage/sqlusage.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlusage/sqlusage.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,7 +38,7 @@
count the total number of SQL statements per request, the total number of rows
returned per request and the total SQL execution time per request. Each series
will be repeated against a varying calendar size so the variation in SQL use
-with calendar size can be plotted.
+with calendar size can be plotted.
"""
EVENT_COUNTS = (0, 1, 5, 10, 50, 100, 500, 1000, 5000)
@@ -77,17 +77,18 @@
""".replace("\n", "\r\n")
class SQLUsageSession(CalDAVSession):
-
+
def __init__(self, server, port=None, ssl=False, user="", pswd="", principal=None, root=None, logging=False):
super(SQLUsageSession, self).__init__(server, port, ssl, user, pswd, principal, root, logging)
self.homeHref = "/calendars/users/%s/" % (self.user,)
self.calendarHref = "/calendars/users/%s/calendar/" % (self.user,)
self.inboxHref = "/calendars/users/%s/inbox/" % (self.user,)
-
+
+
class SQLUsage(object):
-
+
def __init__(self, server, port, users, pswds, logFilePath):
self.server = server
self.port = port
@@ -98,8 +99,9 @@
self.results = {}
self.currentCount = 0
+
def runLoop(self, counts):
-
+
# Make the sessions
sessions = [
SQLUsageSession(self.server, self.port, user=user, pswd=pswd, root="/")
@@ -125,7 +127,7 @@
for session in sessions:
session.getPropertiesOnHierarchy(URL(path=session.homeHref), props)
session.getPropertiesOnHierarchy(URL(path=session.calendarHref), props)
-
+
# Now loop over sets of events
for count in counts:
print("Testing count = %d" % (count,))
@@ -135,16 +137,18 @@
print(" Test = %s" % (request.label,))
result[request.label] = request.execute()
self.results[count] = result
-
+
+
def report(self):
-
+
self._printReport("SQL Statement Count", "count", "%d")
self._printReport("SQL Rows Returned", "rows", "%d")
self._printReport("SQL Time", "timing", "%.1f")
-
+
+
def _printReport(self, title, attr, colFormat):
table = tables.Table()
-
+
print(title)
headers = ["Events"] + self.requestLabels
table.addHeader(headers)
@@ -158,11 +162,12 @@
table.printTable(os=os)
print(os.getvalue())
print("")
-
+
+
def ensureEvents(self, session, calendarhref, n):
"""
Make sure the required number of events are present in the calendar.
-
+
@param n: number of events
@type n: C{int}
"""
@@ -171,9 +176,11 @@
index = self.currentCount + i + 1
href = joinURL(calendarhref, "%d.ics" % (index,))
session.writeData(URL(path=href), ICAL % (now.getYear() + 1, index,), "text/calendar")
-
+
self.currentCount = n
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -200,7 +207,7 @@
sys.exit(0)
if __name__ == '__main__':
-
+
server = "localhost"
port = 8008
users = ("user01", "user02",)
@@ -208,7 +215,7 @@
file = "sqlstats.logs"
counts = EVENT_COUNTS
- options, args = getopt.getopt(sys.argv[1:], "h", ["server=", "port=", "user=", "pswd=", "counts=",])
+ options, args = getopt.getopt(sys.argv[1:], "h", ["server=", "port=", "user=", "pswd=", "counts=", ])
for option, value in options:
if option == "-h":
Modified: CalendarServer/trunk/contrib/performance/sqlwatch.py
===================================================================
--- CalendarServer/trunk/contrib/performance/sqlwatch.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/sqlwatch.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -13,18 +13,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##
+
from __future__ import print_function
-
-import sys, signal, time
-
+from benchmark import DTraceCollector, instancePIDs
+from twisted.internet import reactor
+from twisted.internet.defer import Deferred, inlineCallbacks
+from twisted.python.failure import Failure
from twisted.python.log import err
-from twisted.python.failure import Failure
-from twisted.internet.defer import Deferred, inlineCallbacks
-from twisted.internet import reactor
+import sys
+import signal
+import time
-from benchmark import DTraceCollector, instancePIDs
-
class Stop(Exception):
pass
@@ -48,6 +48,7 @@
return d
+
@inlineCallbacks
def collect(directory):
while True:
@@ -69,6 +70,7 @@
print('Stopped')
+
def main():
from twisted.python.failure import startDebugMode
startDebugMode()
Modified: CalendarServer/trunk/contrib/performance/stackedbar.py
===================================================================
--- CalendarServer/trunk/contrib/performance/stackedbar.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/stackedbar.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -23,16 +23,16 @@
N = 5
-menMeans = (20, 35, 30, 35, 27)
+menMeans = (20, 35, 30, 35, 27)
womenMeans = (25, 32, 34, 20, 25)
-menStd = (2, 3, 4, 1, 2)
-womenStd = (3, 5, 2, 3, 3)
-otherMeans = (15, 30, 25, 40, 35)
+menStd = (2, 3, 4, 1, 2)
+womenStd = (3, 5, 2, 3, 3)
+otherMeans = (15, 30, 25, 40, 35)
ind = np.arange(N) # the x locations for the groups
width = 0.35 # the width of the bars: can also be len(x) sequence
-p1 = plt.bar(ind, menMeans, width, color='r', yerr=womenStd)
+p1 = plt.bar(ind, menMeans, width, color='r', yerr=womenStd)
p2 = plt.bar(ind, womenMeans, width, color='y',
bottom=menMeans, yerr=menStd)
p3 = plt.bar(ind, otherMeans, width, color='g',
@@ -40,8 +40,8 @@
plt.ylabel('Scores')
plt.title('Scores by group and gender')
-plt.xticks(ind+width/2., ('G1', 'G2', 'G3', 'G4', 'G5') )
-plt.yticks(np.arange(0,81,10))
-plt.legend( (p1[0], p2[0], p3[0]), ('Men', 'Women', 'Other') )
+plt.xticks(ind + width / 2., ('G1', 'G2', 'G3', 'G4', 'G5'))
+plt.yticks(np.arange(0, 81, 10))
+plt.legend((p1[0], p2[0], p3[0]), ('Men', 'Women', 'Other'))
plt.show()
Modified: CalendarServer/trunk/contrib/performance/stats.py
===================================================================
--- CalendarServer/trunk/contrib/performance/stats.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/stats.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -13,21 +13,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
##
+
from __future__ import print_function
-
-import random, time
-
-from zope.interface import Interface, implements
-
-from twisted.python.util import FancyEqMixin
-
-import sqlparse
+from math import log, sqrt
from pycalendar.datetime import PyCalendarDateTime
from pycalendar.duration import PyCalendarDuration
+from pycalendar.property import PyCalendarProperty
from pycalendar.timezone import PyCalendarTimezone
-from pycalendar.property import PyCalendarProperty
-from math import log, sqrt
+from twisted.python.util import FancyEqMixin
+from zope.interface import Interface, implements
+import random
+import time
+import sqlparse
+
NANO = 1000000000.0
@@ -35,20 +34,24 @@
return sum(samples) / len(samples)
+
def median(samples):
return sorted(samples)[len(samples) / 2]
+
def residuals(samples, from_):
return [from_ - s for s in samples]
+
def stddev(samples):
m = mean(samples)
variance = sum([datum ** 2 for datum in residuals(samples, m)]) / len(samples)
return variance ** 0.5
+
def mad(samples):
"""
Return the median absolute deviation of the given data set.
@@ -58,6 +61,7 @@
return median(res)
+
class _Statistic(object):
commands = ['summarize']
@@ -118,6 +122,7 @@
return True
return False
+
def _substitute(self, expression, replacement):
try:
expression.tokens
@@ -171,7 +176,7 @@
times.append(total / NANO * 1000)
return ''.join([
'%d: %s\n' % (count, statement)
- for (statement, count)
+ for (statement, count)
in statements.iteritems()]) + _Statistic.summarize(self, times)
@@ -203,8 +208,9 @@
for (sql, _ignore_interval) in data:
statements.append(self.normalize(sql))
return '\n'.join(statements) + '\n'
-
+
+
class Bytes(_Statistic):
def squash(self, samples):
return [sum(bytes) for bytes in samples]
@@ -214,6 +220,7 @@
return _Statistic.summarize(self, self.squash(samples))
+
def quantize(data):
"""
Given some continuous data, quantize it into appropriately sized
@@ -224,14 +231,16 @@
return []
+
class IPopulation(Interface):
def sample(): #@NoSelf
pass
+
class UniformDiscreteDistribution(object, FancyEqMixin):
"""
-
+
"""
implements(IPopulation)
@@ -264,7 +273,7 @@
compareAttributes = ['_mu', '_sigma', '_maximum']
def __init__(self, mu=None, sigma=None, mean=None, mode=None, median=None, maximum=None):
-
+
if mu is not None and sigma is not None:
scale = 1.0
elif not (mu is None and sigma is None):
@@ -285,7 +294,7 @@
sigma = sqrt(log(mean) - log(mode) / 2.0)
else:
raise ValueError("When using mode one of median or mean must be defined")
-
+
self._mu = mu
self._sigma = sigma
self._scale = scale
@@ -304,6 +313,7 @@
return result
+
class FixedDistribution(object, FancyEqMixin):
"""
"""
@@ -319,6 +329,7 @@
return self._value
+
class NearFutureDistribution(object, FancyEqMixin):
compareAttributes = ['_offset']
@@ -420,32 +431,34 @@
offset.setDuration(offset.getTotalSeconds() - (end - start).getTotalSeconds())
beginning = end
+
+
class RecurrenceDistribution(object, FancyEqMixin):
compareAttributes = ["_allowRecurrence", "_weights"]
_model_rrules = {
- "none": None,
- "daily": "RRULE:FREQ=DAILY",
- "weekly": "RRULE:FREQ=WEEKLY",
- "monthly": "RRULE:FREQ=MONTHLY",
- "yearly": "RRULE:FREQ=YEARLY",
- "dailylimit": "RRULE:FREQ=DAILY;COUNT=14",
+ "none": None,
+ "daily": "RRULE:FREQ=DAILY",
+ "weekly": "RRULE:FREQ=WEEKLY",
+ "monthly": "RRULE:FREQ=MONTHLY",
+ "yearly": "RRULE:FREQ=YEARLY",
+ "dailylimit": "RRULE:FREQ=DAILY;COUNT=14",
"weeklylimit": "RRULE:FREQ=WEEKLY;COUNT=4",
- "workdays": "RRULE:FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR"
- }
+ "workdays": "RRULE:FREQ=DAILY;BYDAY=MO,TU,WE,TH,FR"
+ }
def __init__(self, allowRecurrence, weights={}):
self._allowRecurrence = allowRecurrence
self._rrules = []
if self._allowRecurrence:
- for rrule, count in sorted(weights.items(), key=lambda x:x[0]):
+ for rrule, count in sorted(weights.items(), key=lambda x: x[0]):
for _ignore in range(count):
self._rrules.append(self._model_rrules[rrule])
- self._helperDistribution = UniformIntegerDistribution(0, len(self._rrules)-1)
+ self._helperDistribution = UniformIntegerDistribution(0, len(self._rrules) - 1)
def sample(self):
-
+
if self._allowRecurrence:
index = self._helperDistribution.sample()
rrule = self._rrules[index]
@@ -453,11 +466,11 @@
prop = PyCalendarProperty()
prop.parse(rrule)
return prop
-
+
return None
if __name__ == '__main__':
-
+
from collections import defaultdict
mu = 1.5
sigma = 1.22
@@ -468,11 +481,10 @@
if s > 300:
continue
result[s] += 1
-
+
total = 0
- for k, v in sorted(result.items(), key=lambda x:x[0]):
- print("%d\t%.5f" % (k, float(v)/result[1]))
+ for k, v in sorted(result.items(), key=lambda x: x[0]):
+ print("%d\t%.5f" % (k, float(v) / result[1]))
total += k * v
-
+
print("Average: %.2f" % (float(total) / sum(result.values()),))
-
Modified: CalendarServer/trunk/contrib/performance/test_benchmark.py
===================================================================
--- CalendarServer/trunk/contrib/performance/test_benchmark.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/test_benchmark.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -152,7 +152,7 @@
str(exc),
"host-index must be less than hosts-count")
-
+
def test_hostIndexAndCount(self):
"""
If I{--hosts-count} and I{--host-index} are supplied, of the
Modified: CalendarServer/trunk/contrib/performance/test_httpauth.py
===================================================================
--- CalendarServer/trunk/contrib/performance/test_httpauth.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/test_httpauth.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -34,7 +34,7 @@
)
for hdrvalue in headers:
-
+
hdlr = AuthHandlerAgent(None, None)
ch = hdlr._parse(hdrvalue)
self.assertTrue(ch is not None)
Modified: CalendarServer/trunk/contrib/performance/test_stats.py
===================================================================
--- CalendarServer/trunk/contrib/performance/test_stats.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/test_stats.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -75,7 +75,8 @@
self.assertRaises(ValueError, LogNormalDistribution, mode=1)
self.assertRaises(ValueError, LogNormalDistribution, mean=1)
self.assertRaises(ValueError, LogNormalDistribution, median=1)
-
+
+
def test_uniformdiscrete(self):
population = [1, 5, 6, 9]
counts = dict.fromkeys(population, 0)
@@ -89,7 +90,7 @@
tzname = "US/Eastern"
dist = WorkDistribution(["mon", "wed", "thu", "sat"], 10, 20, tzname)
dist._helperDistribution = UniformDiscreteDistribution([35 * 60 * 60 + 30 * 60])
- dist.now = lambda tzname=None: PyCalendarDateTime(2011, 5, 29, 18, 5, 36, tzid=tzname)
+ dist.now = lambda tzname = None: PyCalendarDateTime(2011, 5, 29, 18, 5, 36, tzid=tzname)
value = dist.sample()
self.assertEqual(
# Move past three workdays - monday, wednesday, thursday - using 30
@@ -117,7 +118,7 @@
value = dist.sample()
self.assertTrue(value is None)
- dist = RecurrenceDistribution(True, {"daily":1, "none":2, "weekly":1})
+ dist = RecurrenceDistribution(True, {"daily": 1, "none": 2, "weekly": 1})
dist._helperDistribution = UniformDiscreteDistribution([0, 3, 2, 1, 0], randomize=False)
value = dist.sample()
self.assertTrue(value is not None)
@@ -130,6 +131,7 @@
value = dist.sample()
self.assertTrue(value is not None)
+
def test_uniform(self):
dist = UniformIntegerDistribution(-5, 10)
for _ignore_i in range(100):
@@ -171,6 +173,7 @@
the standard deviation of the sample, that bucket is split in
half so each bucket has one value.
"""
+ pass
def xtest_alpha(self):
Modified: CalendarServer/trunk/contrib/performance/upload.py
===================================================================
--- CalendarServer/trunk/contrib/performance/upload.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/performance/upload.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -87,6 +87,7 @@
return d
+
def upload(reactor, url, project, revision, revision_date, benchmark, param, statistic, backend, environment, samples):
d = _upload(
reactor,
@@ -106,6 +107,7 @@
return d
+
def main():
options = UploadOptions()
try:
Modified: CalendarServer/trunk/contrib/tools/anonymous_log.py
===================================================================
--- CalendarServer/trunk/contrib/tools/anonymous_log.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/anonymous_log.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -23,9 +23,9 @@
import traceback
class CalendarServerLogAnalyzer(object):
-
+
def __init__(self):
-
+
self.userCtr = 1
self.users = {}
@@ -35,49 +35,50 @@
self.resourceCtr = 1
self.resources = {}
+
def anonymizeLogFile(self, logFilePath):
-
+
fpath = os.path.expanduser(logFilePath)
if fpath.endswith(".gz"):
f = GzipFile(fpath)
else:
f = open(fpath)
-
+
try:
for line in f:
-
+
if not line.startswith("Log"):
line = self.anonymizeLine(line)
print(line, end="")
-
+
except Exception, e:
print("Exception: %s for %s" % (e, line,))
raise
+
def anonymizeLine(self, line):
-
startPos = line.find("- ")
endPos = line.find(" [")
- userid = line[startPos+2:endPos]
-
+ userid = line[startPos + 2:endPos]
+
if userid != "-":
if userid not in self.users:
self.users[userid] = "user%05d" % (self.userCtr,)
self.userCtr += 1
- line = line[:startPos+2] + self.users[userid] + line[endPos:]
+ line = line[:startPos + 2] + self.users[userid] + line[endPos:]
endPos = line.find(" [")
-
+
startPos = endPos + 1
-
+
startPos = line.find(']', startPos + 21) + 3
endPos = line.find(' ', startPos)
if line[startPos] != '?':
-
+
startPos = endPos + 1
endPos = line.find(" HTTP/", startPos)
uri = line[startPos:endPos]
-
+
splits = uri.split("/")
if len(splits) >= 4:
if splits[1] in ("calendars", "principals"):
@@ -93,7 +94,7 @@
self.resources[splits[4]] = "resource%d" % (self.resourceCtr,)
self.resourceCtr += 1
splits[4] = self.resources[splits[4]]
-
+
if len(splits) > 5:
for x in range(5, len(splits)):
if splits[x]:
@@ -101,12 +102,13 @@
self.resources[splits[x]] = "resource%d%s" % (self.resourceCtr, os.path.splitext(splits[x])[1])
self.resourceCtr += 1
splits[x] = self.resources[splits[x]]
-
-
+
line = line[:startPos] + "/".join(splits) + line[endPos:]
-
+
return line
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
Modified: CalendarServer/trunk/contrib/tools/dtraceanalyze.py
===================================================================
--- CalendarServer/trunk/contrib/tools/dtraceanalyze.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/dtraceanalyze.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -27,9 +27,9 @@
import tables
class Dtrace(object):
-
+
class DtraceLine(object):
-
+
prefix_maps = {
"/usr/share/caldavd/lib/python/": "{caldavd}/",
"/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.6": "{Python}",
@@ -41,19 +41,19 @@
}
contains_maps = {
"/CalendarServer": "{caldavd}",
- "/Twisted": "{Twisted}",
- "/pycalendar": "{pycalendar}",
+ "/Twisted": "{Twisted}",
+ "/pycalendar": "{pycalendar}",
}
def __init__(self, line, lineno):
-
+
self.entering = True
self.function_name = ""
self.file_location = ""
self.parent = None
self.children = []
self.lineno = lineno
-
+
re_matched = re.match("(..) ([^ ]+) \(([^\)]+)\)", line)
if re_matched is None:
print(line)
@@ -64,7 +64,7 @@
self.entering = True
else:
raise ValueError("Invalid start of line at %d" % (lineno,))
-
+
self.function_name = results[1]
self.file_location = results[2]
for key, value in Dtrace.DtraceLine.prefix_maps.iteritems():
@@ -75,13 +75,13 @@
for key, value in Dtrace.DtraceLine.contains_maps.iteritems():
found1 = self.file_location.find(key)
if found1 != -1:
- found2 = self.file_location[found1+1:].find('/')
+ found2 = self.file_location[found1 + 1:].find('/')
if found2 != -1:
- self.file_location = value + self.file_location[found1+found2+1:]
+ self.file_location = value + self.file_location[found1 + found2 + 1:]
else:
self.file_location = value
break
-
+
def __repr__(self):
return "%s (%s)" % self.getKey()
@@ -106,7 +106,7 @@
return self.file_location[0:self.file_location.rfind(':')]
def prettyPrint(self, indent, indents, sout):
-
+
indenter = ""
for level in indents:
if level > 0:
@@ -120,8 +120,9 @@
def stackName(self):
return self.function_name, self.filePath()
+
class DtraceStack(object):
-
+
def __init__(self, lines, no_collapse):
self.start_indent = 0
self.stack = []
@@ -129,9 +130,9 @@
self.call_into = {}
self.processLines(lines, no_collapse)
-
+
def processLines(self, lines, no_collapse):
-
+
new_lines = []
last_line = None
for line in lines:
@@ -174,14 +175,15 @@
while backstack and indent and stackName != backstack[-1]:
indent -= 1
backstack.pop()
- if backstack: backstack.pop()
+ if backstack:
+ backstack.pop()
if indent < 0:
print("help")
current_line = current_line.parent if current_line else None
min_indent = min(min_indent, indent)
for block in blocks:
- self.stack.extend(block)
+ self.stack.extend(block)
if min_indent < 0:
self.start_indent = -min_indent
else:
@@ -190,10 +192,10 @@
self.generateCallInfo()
def generateCallInfo(self):
-
+
for _ignore, line in self.stack:
key = line.getKey()
-
+
if line.parent:
parent_key = line.parent.getKey()
parent_calls = self.called_by.setdefault(key, {}).get(parent_key, 0)
@@ -210,7 +212,7 @@
maxctr = len(self.stack) - 1
for indent, line in self.stack:
current_indent = self.start_indent + indent
- next_indent = (self.start_indent + self.stack[ctr+1][0]) if ctr < maxctr else 10000
+ next_indent = (self.start_indent + self.stack[ctr + 1][0]) if ctr < maxctr else 10000
if len(indents) == current_indent:
pass
elif len(indents) < current_indent:
@@ -222,16 +224,18 @@
line.prettyPrint(self.start_indent + indent, indents, sout)
ctr += 1
+
def __init__(self, filepath):
-
+
self.filepath = filepath
self.calltimes = collections.defaultdict(lambda: [0, 0, 0])
self.exclusiveTotal = 0
+
def analyze(self, do_stack, no_collapse):
-
+
print("Parsing dtrace output.")
-
+
# Parse the trace lines first and look for the start of the call times
lines = []
traces = True
@@ -253,17 +257,18 @@
self.parseCallTimeLine(line, index)
self.printTraceDetails(lines, do_stack, no_collapse)
-
+
for ctr, title in enumerate(("Sorted by Count", "Sorted by Exclusive", "Sorted by Inclusive",)):
print(title)
self.printCallTimeTotals(ctr)
+
def printTraceDetails(self, lines, do_stack, no_collapse):
print("Found %d lines" % (len(lines),))
print("============================")
print("")
-
+
self.stack = Dtrace.DtraceStack(lines, no_collapse)
if do_stack:
with file("stacked.txt", "w") as f:
@@ -285,7 +290,7 @@
stats[key] = counts
else:
last_exit = line.getPartialKey()
-
+
print("Function Call Counts")
print("")
table = tables.Table()
@@ -327,8 +332,9 @@
table.printTable()
print("")
+
def parseCallTimeLine(self, line, index):
-
+
file, type, name, value = line.split()
if file in ("-", "FILE"):
return
@@ -336,22 +342,23 @@
self.calltimes[(file, name)][index] = int(value)
if index == 1:
self.exclusiveTotal += int(value)
-
+
+
def printCallTimeTotals(self, sortIndex):
-
+
table = tables.Table()
-
+
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
table.addHeader(("File", "Name", "Count", "Inclusive", "Exclusive", "Children",))
- for key, value in sorted(self.calltimes.items(), key=lambda x:x[1][sortIndex], reverse=True):
+ for key, value in sorted(self.calltimes.items(), key=lambda x: x[1][sortIndex], reverse=True):
table.addRow((
key[0],
key[1],
@@ -369,10 +376,12 @@
self.exclusiveTotal,
"",
))
-
+
table.printTable()
print("")
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -393,7 +402,7 @@
To use this do the following (where PID is the pid of the
Python process to monitor:
-
+
> sudo ./trace.d PID > results.txt
...
> ./dtraceanalyze.py results.txt
@@ -428,11 +437,11 @@
usage("Must have one argument")
else:
fname = args[0]
-
+
filepath = os.path.expanduser(fname)
if not os.path.exists(filepath):
usage("File '%s' does not exist" % (filepath,))
-
+
print("CalendarServer dtrace analysis tool tool")
print("=====================================")
print("")
@@ -444,7 +453,7 @@
print("Consecutive function calls will be removed.")
print("============================")
print("")
-
+
Dtrace(filepath).analyze(do_stack, no_collapse)
except Exception, e:
Modified: CalendarServer/trunk/contrib/tools/fakecalendardata.py
===================================================================
--- CalendarServer/trunk/contrib/tools/fakecalendardata.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/fakecalendardata.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -87,7 +87,7 @@
)
def makeVEVENT(recurring, atendees, date, hour, count):
-
+
subs = {
"UID": str(uuid.uuid4()),
"START" : "",
@@ -99,22 +99,26 @@
if recurring:
subs["RRULE"] = random.choice(rrules_template)
-
+
if attendees:
subs["ORGANIZER"] = organizer_template % {"SEQUENCE": 1}
for ctr in range(2, random.randint(2, 10)):
subs["ATTENDEES"] += attendee_template % {"SEQUENCE": ctr}
-
+
subs["START"] = "%04d%02d%02dT%02d0000" % (date.year, date.month, date.day, hour)
- return vevent_template % subs
+ return vevent_template % subs
+
+
def argPath(path):
fpath = os.path.expanduser(path)
if not fpath.startswith("/"):
fpath = os.path.join(pwd, fpath)
return fpath
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -182,7 +186,7 @@
totalRecurring = (totalCount * percentRecurring) / 100
totalRecurringWithAttendees = (totalRecurring * percentWithAttendees) / 100
totalRecurringWithoutAttendees = totalRecurring - totalRecurringWithAttendees
-
+
totalNonRecurring = totalCount - totalRecurring
totalNonRecurringWithAttendees = (totalNonRecurring * percentWithAttendees) / 100
totalNonRecurringWithoutAttendees = totalNonRecurring - totalNonRecurringWithAttendees
@@ -196,9 +200,9 @@
totalYears = yearsPast + yearsFuture
totalDays = totalYears * 365
-
- startDate = datetime.date.today() - datetime.timedelta(days=yearsPast*365)
+ startDate = datetime.date.today() - datetime.timedelta(days=yearsPast * 365)
+
for i in range(len(eventTypes)):
eventTypes[i] += (
startDate + datetime.timedelta(days=random.randint(0, totalDays)),
@@ -208,6 +212,6 @@
vevents = []
for count, (recurring, attendees, date, hour) in enumerate(eventTypes):
#print(recurring, attendees, date, hour)
- vevents.append(makeVEVENT(recurring, attendees, date, hour, count+1))
+ vevents.append(makeVEVENT(recurring, attendees, date, hour, count + 1))
print(calendar_template % {"VEVENTS" : "".join(vevents)})
Modified: CalendarServer/trunk/contrib/tools/monitoranalysis.py
===================================================================
--- CalendarServer/trunk/contrib/tools/monitoranalysis.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/monitoranalysis.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -26,7 +26,7 @@
initialDate = None
def analyze(fpath, noweekends, startDate=None, endDate=None, title=None):
-
+
print("Analyzing data for %s" % (fpath,))
data = []
firstDate = None
@@ -35,11 +35,11 @@
for line in f:
try:
if line.startswith("2010/0"):
-
+
date = line[:10]
if startDate and date < startDate or endDate and date > endDate:
continue
-
+
if noweekends:
dt = datetime.date(int(date[0:4]), int(date[5:7]), int(date[8:10]))
if dt.weekday() > 4:
@@ -51,7 +51,7 @@
f.next()
continue
dtstamp = line[:19]
-
+
if firstDate is None:
firstDate = date.replace("/", "")
if initialDate is None:
@@ -61,27 +61,27 @@
lqnon = line[len("2010/05/12 22:27:24 Listenq (ssl+non): "):].split("+", 1)[1]
else:
lqnon = line[len("2010/01/05 19:47:23 Listen queue: "):]
-
+
lqnon = int(lqnon.split(" ", 1)[0])
-
+
line = f.next()
cpu = int(line[len("CPU idle %: "):].split(" ", 1)[0])
-
+
line = f.next()
if line.startswith("Memory"):
line = f.next()
reqs = int(float(line.split(" ", 1)[0]))
-
+
line = f.next()
resp = line[len("Response time: average "):].split(" ", 1)[0]
- resp = int(float(resp)/10.0) * 10
-
+ resp = int(float(resp) / 10.0) * 10
+
if reqs <= 80:
data.append((dtstamp, reqs, resp, lqnon, cpu))
#print("%s %d %d %d %d" % (dtstamp, reqs, resp, lqnon, cpu))
except StopIteration:
break
-
+
if not title:
if startDate and endDate:
title = "Between %s and %s" % (startDate, endDate,)
@@ -93,9 +93,11 @@
title = "Start at %s" % (firstDate,)
dataset.append((title, data,))
-
+
print("Stored %d data points" % (len(data),))
+
+
def plotListenQBands(data, first, last, xlim, ylim):
x1 = []
@@ -104,7 +106,7 @@
y2 = []
x3 = []
y3 = []
- for datetime, reqs, resp, lq, cpu in data:
+ for datetime, reqs, resp, lq, _ignore_cpu in data:
if lq == 0:
x1.append(reqs)
y1.append(resp)
@@ -114,9 +116,9 @@
else:
x3.append(reqs)
y3.append(resp)
-
+
plt.plot(x1, y1, "b+", x2, y2, "g+", x3, y3, "y+")
-
+
if first:
plt.legend(('ListenQ at zero', 'ListenQ < 50', 'ListenQ >= 50'),
'upper left', shadow=True, fancybox=True)
@@ -126,11 +128,13 @@
plt.xlim(0, xlim)
plt.ylim(0, ylim)
+
+
def plotCPUBands(data, first, last, xlim, ylim):
x = [[], [], [], []]
y = [[], [], [], []]
- for datetime, reqs, resp, lq, cpu in data:
+ for datetime, reqs, resp, _ignore_lq, cpu in data:
if cpu > 75:
x[0].append(reqs)
y[0].append(resp)
@@ -143,14 +147,14 @@
else:
x[3].append(reqs)
y[3].append(resp)
-
+
plt.plot(
x[0], y[0], "b+",
x[1], y[1], "g+",
x[2], y[2], "y+",
x[3], y[3], "m+",
)
-
+
if first:
plt.legend(('CPU < 1/4', 'CPU < 1/2', 'CPU < 3/4', "CPU High"),
'upper left', shadow=True, fancybox=True)
@@ -160,38 +164,46 @@
plt.xlim(0, xlim)
plt.ylim(0, ylim)
+
+
def plot(figure, noshow, nosave, pngDir, xlim, ylim):
-
+
print("Plotting data")
-
+
plt.figure(figure, figsize=(16, 5 * len(dataset)))
nplots = len(dataset)
- subplot = nplots*100 + 20
-
+ subplot = nplots * 100 + 20
+
for ctr, item in enumerate(dataset):
-
+
title, data = item
if not title:
- title = "#%d" % (ctr+1,)
+ title = "#%d" % (ctr + 1,)
- plt.subplot(subplot + 2*ctr + 1)
- plotListenQBands(data, first=(ctr == 0), last=(ctr+1 == len(dataset)), xlim=xlim, ylim=ylim)
+ plt.subplot(subplot + 2 * ctr + 1)
+ plotListenQBands(data, first=(ctr == 0), last=(ctr + 1 == len(dataset)), xlim=xlim, ylim=ylim)
plt.title("ListenQ %s" % (title,))
-
- plt.subplot(subplot + 2*ctr + 2)
- plotCPUBands(data, first=(ctr == 0), last=(ctr+1 == len(dataset)), xlim=xlim, ylim=ylim)
+
+ plt.subplot(subplot + 2 * ctr + 2)
+ plotCPUBands(data, first=(ctr == 0), last=(ctr + 1 == len(dataset)), xlim=xlim, ylim=ylim)
plt.title("CPU %s" % (title,))
+
+
def argPath(path):
fpath = os.path.expanduser(path)
if not fpath.startswith("/"):
fpath = os.path.join(pwd, fpath)
return fpath
+
+
def expandDate(date):
return "%s/%s/%s" % (date[0:4], date[4:6], date[6:8],)
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -260,7 +272,7 @@
if not nosave and not os.path.isdir(pngDir):
usage("Must have a valid -d path for saving images")
-
+
# Process arguments
if len(args) == 0 and scanDir is None:
usage("Must have arguments")
@@ -285,7 +297,7 @@
if not nosave:
plt.savefig(os.path.expanduser(os.path.join(pngDir, "Monitor-%s" % (trailer,))))
count += 1
-
+
if not noshow:
plt.show()
else:
@@ -309,10 +321,10 @@
else:
start = (None,)
end = (None,)
-
+
for i in range(len(start)):
analyze(argPath(arg), noweekends, start[i], end[i])
-
+
plot(1, noshow, nosave, pngDir, xlim, ylim)
if not nosave:
plt.savefig(os.path.expanduser(os.path.join(pngDir, "Monitor-%s" % (initialDate,))))
Modified: CalendarServer/trunk/contrib/tools/monitorsplit.py
===================================================================
--- CalendarServer/trunk/contrib/tools/monitorsplit.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/monitorsplit.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -27,7 +27,7 @@
lastWeek = None
def split(fpath, outputDir):
-
+
global outputFile, fileCount, lastWeek
print("Splitting data for %s" % (fpath,))
@@ -37,9 +37,9 @@
date = line[:10]
date = date.replace("/", "")
hours = line[11:13]
-
+
dt = datetime.date(int(date[0:4]), int(date[4:6]), int(date[6:8]))
-
+
currentWeek = dt.isocalendar()[1]
if dt.weekday() == 0 and hours <= "06":
currentWeek -= 1
@@ -50,7 +50,7 @@
fileCount += 1
lastWeek = currentWeek
print("Changed to week of %s" % (date,))
-
+
output = ["-----\n"]
output.append(line)
try:
@@ -65,15 +65,21 @@
outputFile.write("".join(output))
f.close()
+
+
def argPath(path):
fpath = os.path.expanduser(path)
if not fpath.startswith("/"):
fpath = os.path.join(pwd, fpath)
return fpath
+
+
def expandDate(date):
return "%s/%s/%s" % (date[0:4], date[4:6], date[6:8],)
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -125,5 +131,5 @@
for arg in args:
split(argPath(arg), outputDir)
-
+
print("Created %d files" % (fileCount,))
Modified: CalendarServer/trunk/contrib/tools/netstatus.py
===================================================================
--- CalendarServer/trunk/contrib/tools/netstatus.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/netstatus.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,27 +38,27 @@
)
if __name__ == '__main__':
-
+
pendingq = {}
while True:
timestamp = time.time()
output = commands.getoutput("netstat -n")
-
+
states = [(0, 0, 0)] * len(stateNames)
newqs = {}
for line in output.split("\n"):
-
+
if not line.startswith("tcp4 ") and not line.startswith("tcp "):
continue
splits = line.split()
if not splits[3].endswith("8443") and not splits[3].endswith("8008"):
continue
for ctr, items in enumerate(stateNames):
-
+
for item in items:
if item in line:
-
+
total, recvq, sendq = states[ctr]
total += 1
if splits[1] != "0":
@@ -69,7 +69,7 @@
newqs[splits[4]] = (splits[1], splits[2],)
states[ctr] = (total, recvq, sendq,)
break
-
+
oldqs = set(pendingq.keys())
for key in oldqs.difference(newqs.keys()):
del pendingq[key]
@@ -87,10 +87,10 @@
print("State Total RecvQ SendQ")
for ctr, items in enumerate(stateNames):
print("%11s %5d %5d %5d" % (items[0], states[ctr][0], states[ctr][1], states[ctr][2]))
-
+
print("")
print("Source IP Established (secs) RecvQ SendQ")
- for key, value in sorted(pendingq.iteritems(), key=lambda x:x[1]):
+ for key, value in sorted(pendingq.iteritems(), key=lambda x: x[1]):
startedat, recv, sendq = value
deltatime = timestamp - startedat
if deltatime > 0:
Modified: CalendarServer/trunk/contrib/tools/pg_stats_analysis.py
===================================================================
--- CalendarServer/trunk/contrib/tools/pg_stats_analysis.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/pg_stats_analysis.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -27,6 +27,8 @@
def safePercent(x, y, multiplier=100):
return ((multiplier * x) / y) if y else 0
+
+
def _is_literal(token):
if token.ttype in sqlparse.tokens.Literal:
return True
@@ -34,6 +36,8 @@
return True
return False
+
+
def _substitute(expression, replacement):
try:
expression.tokens
@@ -48,6 +52,8 @@
else:
_substitute(token, replacement)
+
+
def sqlnormalize(sql):
try:
statements = sqlparse.parse(sql)
@@ -58,13 +64,13 @@
_substitute(statements[0], qmark)
return sqlparse.format(statements[0].to_unicode().encode('ascii'))
-COLUMN_userid = 0
+COLUMN_userid = 0
COLUMN_dbid = 1
COLUMN_query = 2
COLUMN_calls = 3
COLUMN_total_time = 4
COLUMN_rows = 5
-COLUMN_shared_blks_hit = 6
+COLUMN_shared_blks_hit = 6
COLUMN_shared_blks_read = 7
COLUMN_shared_blks_written = 8
COLUMN_local_blks_hit = 9
@@ -74,7 +80,7 @@
COLUMN_temp_blks_written = 13
def sqlStatementsReport(entries):
-
+
dcount = collections.defaultdict(int)
dtime = collections.defaultdict(float)
drows = collections.defaultdict(int)
@@ -82,10 +88,10 @@
dcount[entry[COLUMN_query]] += int(entry[COLUMN_calls])
dtime[entry[COLUMN_query]] += float(entry[COLUMN_total_time])
drows[entry[COLUMN_query]] += int(entry[COLUMN_rows])
-
+
daverage = {}
for k in dcount.keys():
- daverage[k] = dtime[k]/dcount[k]
+ daverage[k] = dtime[k] / dcount[k]
counttotal = sum(dcount.values())
timetotal = sum(dtime.values())
@@ -99,7 +105,7 @@
table = tables.Table()
table.addHeader(("Statement", "Count", "Count %", "Total Time", "Total Time %", "Av. Time", "Av. Time %", "Av. rows",))
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.2f%%", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
@@ -108,9 +114,9 @@
tables.Table.ColumnFormat("%.2f%%", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
for key in sortedkeys:
-
+
keylines = textwrap.wrap(key, 72, subsequent_indent=" ")
table.addRow((
keylines[0],
@@ -120,9 +126,9 @@
safePercent(dtime[key], timetotal, 100.0),
daverage[key],
safePercent(daverage[key], averagetotal, 100.0),
- float(drows[key])/dcount[key],
+ float(drows[key]) / dcount[key],
))
-
+
for keyline in keylines[1:]:
table.addRow((
keyline,
@@ -138,15 +144,17 @@
print("Queries sorted by %s" % (sorttype,))
table.printTable()
print("")
-
+
+
+
def parseStats(logFilePath, donormlize=True, verbose=False):
-
+
fpath = os.path.expanduser(logFilePath)
if fpath.endswith(".gz"):
f = GzipFile(fpath)
else:
f = open(fpath)
-
+
# Punt past data
for line in f:
if line.startswith("---"):
@@ -160,14 +168,14 @@
line = f.next()
newbits = line.split("|")
bits[COLUMN_query] = bits[COLUMN_query][:-1] + newbits[COLUMN_query]
-
+
pos = bits[COLUMN_query].find("BEGIN:VCALENDAR")
if pos != -1:
bits[COLUMN_query] = bits[COLUMN_query][:pos]
-
+
if donormlize:
bits[COLUMN_query] = sqlnormalize(bits[COLUMN_query].strip())
-
+
if bits[COLUMN_query] not in (
"BEGIN",
"COMMIT",
@@ -179,12 +187,14 @@
print("%d entries" % (len(entries),))
#if float(bits[COLUMN_total_time]) > 1:
# print(bits[COLUMN_total_time], bits[COLUMN_query])
-
+
if verbose:
print("Read %d entries" % (len(entries,)))
-
+
sqlStatementsReport(entries)
-
+
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -208,10 +218,10 @@
sys.exit(0)
if __name__ == '__main__':
-
+
normalize = True
verbose = False
- options, args = getopt.getopt(sys.argv[1:], "hv", ["no-normalize",])
+ options, args = getopt.getopt(sys.argv[1:], "hv", ["no-normalize", ])
for option, value in options:
if option == "-h":
Modified: CalendarServer/trunk/contrib/tools/protocolanalysis.py
===================================================================
--- CalendarServer/trunk/contrib/tools/protocolanalysis.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/protocolanalysis.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -42,55 +42,55 @@
)
requestSizeBuckets = (
- ( 1000, "(a):0-1KB"),
- ( 2500, "(b):1-2.5KB"),
- ( 5000, "(c):2.5-5KB"),
- ( 10000, "(d):5-10KB"),
- ( 100000, "(e):10-100KB"),
- ( 1000000, "(f):100KB-1MB"),
- ( 10000000, "(g):1-10MB"),
+ (1000, "(a):0-1KB"),
+ (2500, "(b):1-2.5KB"),
+ (5000, "(c):2.5-5KB"),
+ (10000, "(d):5-10KB"),
+ (100000, "(e):10-100KB"),
+ (1000000, "(f):100KB-1MB"),
+ (10000000, "(g):1-10MB"),
(100000000, "(h):10-100MB"),
- ( None, "(i):100+MB"),
+ (None, "(i):100+MB"),
)
responseSizeBuckets = (
- ( 1000, "(a):0-1KB"),
- ( 5000, "(b):1-2.5KB"),
- ( 5000, "(c):2.5-5KB"),
- ( 10000, "(d):5-10KB"),
- ( 100000, "(e):10-100KB"),
- ( 1000000, "(f):100KB-1MB"),
+ (1000, "(a):0-1KB"),
+ (5000, "(b):1-2.5KB"),
+ (5000, "(c):2.5-5KB"),
+ (10000, "(d):5-10KB"),
+ (100000, "(e):10-100KB"),
+ (1000000, "(f):100KB-1MB"),
(10000000, "(g):1-10MB"),
- ( None, "(h):10+MB"),
+ (None, "(h):10+MB"),
)
requestTimeBuckets = (
- ( 10, "(a):0-10ms"),
- ( 50, "(b):10-50ms"),
- ( 100, "(c):50-100ms"),
- ( 250, "(d):100-250ms"),
- ( 500, "(e):250-500ms"),
- ( 1000, "(f):500ms-1s"),
- ( 5000, "(g):1-5s"),
- ( 10000, "(h):5-10s"),
- ( 30000, "(i):10-30s"),
- ( 60000, "(j):30-60s"),
+ (10, "(a):0-10ms"),
+ (50, "(b):10-50ms"),
+ (100, "(c):50-100ms"),
+ (250, "(d):100-250ms"),
+ (500, "(e):250-500ms"),
+ (1000, "(f):500ms-1s"),
+ (5000, "(g):1-5s"),
+ (10000, "(h):5-10s"),
+ (30000, "(i):10-30s"),
+ (60000, "(j):30-60s"),
(120000, "(k):60-120s"),
- ( None, "(l):120s+"),
+ (None, "(l):120s+"),
)
userInteractionCountBuckets = (
- ( 0, "(a):0"),
- ( 1, "(b):1"),
- ( 2, "(c):2"),
- ( 3, "(d):3"),
- ( 4, "(e):4"),
- ( 5, "(f):5"),
- ( 10, "(g):6-10"),
- ( 15, "(h):11-15"),
- ( 20, "(i):16-20"),
- ( 30, "(j):21-30"),
- ( 50, "(k):31-50"),
+ (0, "(a):0"),
+ (1, "(b):1"),
+ (2, "(c):2"),
+ (3, "(d):3"),
+ (4, "(e):4"),
+ (5, "(f):5"),
+ (10, "(g):6-10"),
+ (15, "(h):11-15"),
+ (20, "(i):16-20"),
+ (30, "(j):21-30"),
+ (50, "(k):31-50"),
(None, "(l):51+"),
)
@@ -191,7 +191,7 @@
METHOD_401 = "Z 401s"
class CalendarServerLogAnalyzer(object):
-
+
"""
@ivar resolutionMinutes: The number of minutes long a statistics
bucket will be. For example, if this is C{5}, then all data
@@ -207,7 +207,7 @@
"""
class LogLine(object):
-
+
def __init__(self, ipaddr, userid, logDateTime, logTime, method, uri, status, reqbytes, referer, client, extended):
self.ipaddr = ipaddr
@@ -222,11 +222,12 @@
self.client = client
self.extended = extended
+
def __init__(
self,
startHour=None,
endHour=None,
- utcoffset = 0,
+ utcoffset=0,
resolutionMinutes=60,
filterByUser=None,
filterByClient=None,
@@ -242,50 +243,50 @@
self.filterByClient = filterByClient
self.ignoreNonHTTPMethods = ignoreNonHTTPMethods
self.separate401s = separate401s
-
+
self.startTime = datetime.datetime.now().replace(microsecond=0)
-
+
self.host = socket.getfqdn()
self.startLog = ""
self.endLog = ""
-
+
self.resolutionMinutes = resolutionMinutes
self.timeBucketCount = (24 * 60) / resolutionMinutes
self.loggedUTCOffset = None
- self.hourlyTotals = [[0, 0, 0, collections.defaultdict(int), 0.0,] for _ignore in xrange(self.timeBucketCount)]
-
- self.clientTotals = collections.defaultdict(lambda:[0, set(), set()])
+ self.hourlyTotals = [[0, 0, 0, collections.defaultdict(int), 0.0, ] for _ignore in xrange(self.timeBucketCount)]
+
+ self.clientTotals = collections.defaultdict(lambda: [0, set(), set()])
self.clientIDMap = {}
- self.clientByMethodCount = collections.defaultdict(lambda:collections.defaultdict(int))
+ self.clientByMethodCount = collections.defaultdict(lambda: collections.defaultdict(int))
self.clientIDByMethodCount = {}
- self.clientByMethodTotalTime = collections.defaultdict(lambda:collections.defaultdict(float))
- self.clientByMethodAveragedTime = collections.defaultdict(lambda:collections.defaultdict(float))
- self.statusByMethodCount = collections.defaultdict(lambda:collections.defaultdict(int))
-
- self.hourlyByMethodCount = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
- self.hourlyByOKMethodCount = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
- self.hourlyByMethodTime = collections.defaultdict(lambda:[0.0,] * self.timeBucketCount)
- self.hourlyByOKMethodTime = collections.defaultdict(lambda:[0.0,] * self.timeBucketCount)
- self.averagedHourlyByMethodTime = collections.defaultdict(lambda:[0.0,] * self.timeBucketCount)
- self.averagedHourlyByOKMethodTime = collections.defaultdict(lambda:[0.0,] * self.timeBucketCount)
- self.hourlyPropfindByResponseCount = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
-
- self.hourlyByStatus = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
-
- self.hourlyByRecipientCount = collections.defaultdict(lambda:[[0, 0] for _ignore in xrange(self.timeBucketCount)])
- self.averagedHourlyByRecipientCount = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
-
- self.responseTimeVsQueueDepth = collections.defaultdict(lambda:[0, 0.0,])
+ self.clientByMethodTotalTime = collections.defaultdict(lambda: collections.defaultdict(float))
+ self.clientByMethodAveragedTime = collections.defaultdict(lambda: collections.defaultdict(float))
+ self.statusByMethodCount = collections.defaultdict(lambda: collections.defaultdict(int))
+
+ self.hourlyByMethodCount = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+ self.hourlyByOKMethodCount = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+ self.hourlyByMethodTime = collections.defaultdict(lambda: [0.0, ] * self.timeBucketCount)
+ self.hourlyByOKMethodTime = collections.defaultdict(lambda: [0.0, ] * self.timeBucketCount)
+ self.averagedHourlyByMethodTime = collections.defaultdict(lambda: [0.0, ] * self.timeBucketCount)
+ self.averagedHourlyByOKMethodTime = collections.defaultdict(lambda: [0.0, ] * self.timeBucketCount)
+ self.hourlyPropfindByResponseCount = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+
+ self.hourlyByStatus = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+
+ self.hourlyByRecipientCount = collections.defaultdict(lambda: [[0, 0] for _ignore in xrange(self.timeBucketCount)])
+ self.averagedHourlyByRecipientCount = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+
+ self.responseTimeVsQueueDepth = collections.defaultdict(lambda: [0, 0.0, ])
self.averagedResponseTimeVsQueueDepth = collections.defaultdict(int)
self.instanceCount = collections.defaultdict(int)
- self.requestSizeByBucket = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
- self.responseSizeByBucket = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
+ self.requestSizeByBucket = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+ self.responseSizeByBucket = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
self.responseCountByMethod = collections.defaultdict(lambda: [0, 0])
- self.requestTimeByBucket = collections.defaultdict(lambda:[0,] * self.timeBucketCount)
-
+ self.requestTimeByBucket = collections.defaultdict(lambda: [0, ] * self.timeBucketCount)
+
self.requestURI = collections.defaultdict(int)
self.userWeights = collections.defaultdict(int)
@@ -296,14 +297,15 @@
self.currentLine = None
self.linesRead = collections.defaultdict(int)
-
+
+
def analyzeLogFile(self, logFilePath):
fpath = os.path.expanduser(logFilePath)
if fpath.endswith(".gz"):
f = GzipFile(fpath)
else:
f = open(fpath)
-
+
effectiveStart = self.startHour if self.startHour is not None else 0
effectiveEnd = self.endHour if self.endHour is not None else 23
self.maxIndex = (effectiveEnd - effectiveStart + 1) * 60 / self.resolutionMinutes
@@ -316,21 +318,21 @@
self.linesRead[logFilePath] += 1
if line.startswith("Log"):
continue
-
+
try:
self.parseLine(line)
except:
print("Could not parse line:\n%s" % (line,))
continue
-
+
# Filter method
if self.ignoreNonHTTPMethods and not self.currentLine.method.startswith("REPORT(") and self.currentLine.method not in httpMethods:
self.currentLine.method = "???"
-
+
# Do hour ranges
logHour = int(self.currentLine.logTime[0:2])
logMinute = int(self.currentLine.logTime[3:5])
-
+
if self.adjustHour is None:
self.adjustHour = self.startHour if self.startHour is not None else logHour
hourFromStart = logHour - self.adjustHour
@@ -340,17 +342,17 @@
continue
elif self.endHour is not None and logHour > self.endHour:
continue
-
+
timeBucketIndex = (hourFromStart * 60 + logMinute) / self.resolutionMinutes
if not self.startLog:
self.startLog = self.currentLine.logDateTime
- self.endLog = self.currentLine.logDateTime
-
+ self.endLog = self.currentLine.logDateTime
+
# Filter on user id
if self.filterByUser and self.currentLine.userid != self.filterByUser:
continue
-
+
# Filter on client
adjustedClient = self.getClientAdjustedName()
if self.filterByClient and adjustedClient.find(self.filterByClient) == -1:
@@ -360,7 +362,7 @@
is503 = self.currentLine.method == "???"
isOK = self.currentLine.status / 100 == 2
adjustedMethod = self.getAdjustedMethodName()
-
+
instance = self.currentLine.extended.get("i", "")
responseTime = float(self.currentLine.extended.get("t", 0.0))
queueDepth = int(self.currentLine.extended.get("or", 0))
@@ -368,14 +370,14 @@
rcount = int(self.currentLine.extended.get("responses", -1))
if rcount == -1:
rcount = int(self.currentLine.extended.get("rcount", -1))
-
+
# Main summary
self.hourlyTotals[timeBucketIndex][0] += 1
self.hourlyTotals[timeBucketIndex][1] += 1 if is503 else 0
self.hourlyTotals[timeBucketIndex][2] += queueDepth
self.hourlyTotals[timeBucketIndex][3][instance] = max(self.hourlyTotals[timeBucketIndex][3][instance], queueDepth)
self.hourlyTotals[timeBucketIndex][4] += responseTime
-
+
# Client analysis
if not is503:
self.clientTotals[" TOTAL"][0] += 1
@@ -384,42 +386,42 @@
self.clientTotals[adjustedClient][0] += 1
self.clientTotals[adjustedClient][1].add(self.currentLine.userid)
self.clientTotals[adjustedClient][2].add(self.currentLine.ipaddr)
-
+
self.clientByMethodCount[" TOTAL"][" TOTAL"] += 1
self.clientByMethodCount[" TOTAL"][adjustedMethod] += 1
self.clientByMethodCount[adjustedClient][" TOTAL"] += 1
self.clientByMethodCount[adjustedClient][adjustedMethod] += 1
-
+
self.clientByMethodTotalTime[" TOTAL"][" TOTAL"] += responseTime
self.clientByMethodTotalTime[" TOTAL"][adjustedMethod] += responseTime
self.clientByMethodTotalTime[adjustedClient][" TOTAL"] += responseTime
self.clientByMethodTotalTime[adjustedClient][adjustedMethod] += responseTime
-
+
self.statusByMethodCount[" TOTAL"][" TOTAL"] += 1
self.statusByMethodCount[" TOTAL"][adjustedMethod] += 1
self.statusByMethodCount["2xx" if isOK else "%d" % (self.currentLine.status,)][" TOTAL"] += 1
self.statusByMethodCount["2xx" if isOK else "%d" % (self.currentLine.status,)][adjustedMethod] += 1
-
+
# Method counts, timing and status
self.hourlyByMethodCount[" TOTAL"][timeBucketIndex] += 1
self.hourlyByMethodCount[adjustedMethod][timeBucketIndex] += 1
self.hourlyByOKMethodCount[" TOTAL"][timeBucketIndex] += 1
self.hourlyByOKMethodCount[adjustedMethod if isOK else METHOD_401][timeBucketIndex] += 1
-
+
self.hourlyByMethodTime[" TOTAL"][timeBucketIndex] += responseTime
self.hourlyByMethodTime[adjustedMethod][timeBucketIndex] += responseTime
self.hourlyByOKMethodTime[" TOTAL"][timeBucketIndex] += responseTime
self.hourlyByOKMethodTime[adjustedMethod if isOK else METHOD_401][timeBucketIndex] += responseTime
-
+
self.hourlyByStatus[" TOTAL"][timeBucketIndex] += 1
self.hourlyByStatus[self.currentLine.status][timeBucketIndex] += 1
-
+
# Cache analysis
if adjustedMethod == METHOD_PROPFIND_CALENDAR and self.currentLine.status == 207:
responses = int(self.currentLine.extended.get("responses", 0))
self.hourlyPropfindByResponseCount[" TOTAL"][timeBucketIndex] += 1
self.hourlyPropfindByResponseCount[self.getCountBucket(responses, responseCountBuckets)][timeBucketIndex] += 1
-
+
# Scheduling analysis
if adjustedMethod == METHOD_POST_FREEBUSY:
recipients = int(self.currentLine.extended.get("recipients", 0)) + int(self.currentLine.extended.get("freebusy", 0))
@@ -437,7 +439,7 @@
self.hourlyByRecipientCount["iTIP Average"][timeBucketIndex][1] += recipients
self.hourlyByRecipientCount["iTIP Max."][timeBucketIndex][0] = max(self.hourlyByRecipientCount["iTIP Max."][timeBucketIndex][0], recipients)
elif adjustedMethod == METHOD_POST_ISCHEDULE_FREEBUSY:
- recipients = int(self.currentLine.extended.get("recipients", 0)) + int(self.currentLine.extended.get("freebusy", 0))
+ recipients = int(self.currentLine.extended.get("recipients", 0)) + int(self.currentLine.extended.get("freebusy", 0))
self.hourlyByRecipientCount["iFreebusy One Offs" if recipients == 1 else "iFreebusy Average"][timeBucketIndex][0] += 1
self.hourlyByRecipientCount["iFreebusy One Offs" if recipients == 1 else "iFreebusy Average"][timeBucketIndex][1] += recipients
self.hourlyByRecipientCount["iFreebusy Max."][timeBucketIndex][0] = max(self.hourlyByRecipientCount["iFreebusy Max."][timeBucketIndex][0], recipients)
@@ -446,11 +448,11 @@
self.hourlyByRecipientCount["iSchedule Average"][timeBucketIndex][0] += 1
self.hourlyByRecipientCount["iSchedule Average"][timeBucketIndex][1] += recipients
self.hourlyByRecipientCount["iSchedule Max."][timeBucketIndex][0] = max(self.hourlyByRecipientCount["iSchedule Max."][timeBucketIndex][0], recipients)
-
+
# Queue depth analysis
self.responseTimeVsQueueDepth[queueDepth][0] += 1
self.responseTimeVsQueueDepth[queueDepth][1] += responseTime
-
+
# Instance counts
self.instanceCount[instance] += 1
@@ -461,7 +463,7 @@
if adjustedMethod != METHOD_GET_DROPBOX:
self.responseSizeByBucket[" TOTAL"][timeBucketIndex] += 1
self.responseSizeByBucket[self.getCountBucket(self.currentLine.bytes, responseSizeBuckets)][timeBucketIndex] += 1
-
+
if rcount != -1:
self.responseCountByMethod[" TOTAL"][0] += rcount
self.responseCountByMethod[" TOTAL"][1] += 1
@@ -483,7 +485,7 @@
except Exception:
print("Failed to process line:\n%s" % (line,))
raise
-
+
# Average various items
self.averagedHourlyByMethodTime.clear()
for method, hours in self.hourlyByMethodTime.iteritems():
@@ -503,11 +505,11 @@
else:
newValue = hours[hour]
self.averagedHourlyByOKMethodTime[method][hour] = newValue
-
+
self.averagedResponseTimeVsQueueDepth.clear()
for k, v in self.responseTimeVsQueueDepth.iteritems():
- self.averagedResponseTimeVsQueueDepth[k] = (v[0], v[1] / v[0], )
-
+ self.averagedResponseTimeVsQueueDepth[k] = (v[0], v[1] / v[0],)
+
self.averagedHourlyByRecipientCount.clear()
for method, value in self.hourlyByRecipientCount.iteritems():
for hour in xrange(self.timeBucketCount):
@@ -516,7 +518,7 @@
else:
newValue = value[hour][0]
self.averagedHourlyByRecipientCount[method][hour] = newValue
-
+
averaged = collections.defaultdict(int)
for key, value in self.responseCountByMethod.iteritems():
averaged[key] = (value[0] / value[1]) if value[1] else 0
@@ -525,30 +527,31 @@
for client, data in self.clientByMethodTotalTime.iteritems():
for method, totaltime in data.iteritems():
count = self.clientByMethodCount[client][method]
- self.clientByMethodAveragedTime[client][method] = totaltime/count if count else 0
+ self.clientByMethodAveragedTime[client][method] = totaltime / count if count else 0
self.clientIDMap = {}
for ctr, client in enumerate(sorted(self.clientByMethodCount.keys())):
- self.clientIDMap[client] = "ID-%02d" % (ctr+1,)
+ self.clientIDMap[client] = "ID-%02d" % (ctr + 1,)
self.clientIDByMethodCount = {}
for client, data in self.clientByMethodCount.iteritems():
self.clientIDByMethodCount[self.clientIDMap[client]] = data
+
def parseLine(self, line):
-
+
startPos = line.find("- ")
endPos = line.find(" [")
-
- ipaddr = line[0:startPos-2]
- userid = line[startPos+2:endPos]
-
+
+ ipaddr = line[0:startPos - 2]
+ userid = line[startPos + 2:endPos]
+
startPos = endPos + 1
logDateTime = line[startPos + 1:startPos + 21]
logTime = line[startPos + 13:startPos + 21]
-
+
if self.loggedUTCOffset is None:
self.loggedUTCOffset = int(line[startPos + 22:startPos + 25])
-
+
startPos = line.find(']', startPos + 21) + 3
endPos = line.find(' ', startPos)
if line[startPos] == '?':
@@ -557,52 +560,53 @@
startPos += 5
else:
method = line[startPos:endPos]
-
+
startPos = endPos + 1
endPos = line.find(" HTTP/", startPos)
uri = line[startPos:endPos]
startPos = endPos + 11
-
- status = int(line[startPos:startPos+3])
-
+
+ status = int(line[startPos:startPos + 3])
+
startPos += 4
endPos = line.find(' ', startPos)
reqbytes = int(line[startPos:endPos])
-
+
startPos = endPos + 2
endPos = line.find('"', startPos)
# Handle "attacks" where double-quotes may appear in the string
if line[endPos + 1] != ' ':
- endPos = line.find('"', endPos+1)
+ endPos = line.find('"', endPos + 1)
referrer = line[startPos:endPos]
-
+
startPos = endPos + 3
endPos = line.find('"', startPos)
client = line[startPos:endPos]
-
+
startPos = endPos + 2
if line[startPos] == '[':
extended = {}
-
+
startPos += 1
endPos = line.find(' ', startPos)
extended["t"] = line[startPos:endPos]
-
+
startPos = endPos + 6
endPos = line.find(' ', startPos)
extended["i"] = line[startPos:endPos]
-
+
startPos = endPos + 1
endPos = line.find(']', startPos)
extended["or"] = line[startPos:endPos]
else:
items = line[startPos:].split()
extended = dict([item.split('=') for item in items if item.find("=") != -1])
-
+
self.currentLine = CalendarServerLogAnalyzer.LogLine(ipaddr, userid, logDateTime, logTime, method, uri, status, reqbytes, referrer, client, extended)
-
+
+
def getClientAdjustedName(self):
-
+
versionClients = (
"iCal/",
"iPhone/",
@@ -623,7 +627,7 @@
endex = len(self.currentLine.client)
name = self.currentLine.client[index:endex]
return name
-
+
index = self.currentLine.client.find("calendarclient")
if index != -1:
code = self.currentLine.client[14]
@@ -633,9 +637,9 @@
return "iPhone/3 sim"
else:
return "Simulator"
-
+
quickclients = (
- ("CardDAVPlugin/", "CardDAVPlugin"),
+ ("CardDAVPlugin/", "CardDAVPlugin"),
("Address%20Book/", "AddressBook"),
("AddressBook/", "AddressBook"),
("Mail/", "Mail"),
@@ -648,7 +652,8 @@
return result
return self.currentLine.client[:20]
-
+
+
def getAdjustedMethodName(self):
uribits = self.currentLine.uri.rstrip("/").split('/')[1:]
@@ -657,13 +662,13 @@
calendar_specials = ("dropbox", "notification", "freebusy", "outbox",)
adbk_specials = ("notification",)
-
+
if self.currentLine.method == "PROPFIND":
-
+
cached = "cached" in self.currentLine.extended
if uribits[0] == "calendars":
-
+
if len(uribits) == 3:
return METHOD_PROPFIND_CACHED_CALENDAR_HOME if cached else METHOD_PROPFIND_CALENDAR_HOME
elif len(uribits) > 3:
@@ -674,9 +679,9 @@
return METHOD_PROPFIND_INBOX
else:
return METHOD_PROPFIND_CALENDAR
-
+
elif uribits[0] == "addressbooks":
-
+
if len(uribits) == 3:
return METHOD_PROPFIND_CACHED_ADDRESSBOOK_HOME if cached else METHOD_PROPFIND_ADDRESSBOOK_HOME
elif len(uribits) > 3:
@@ -684,15 +689,15 @@
return "PROPFIND %s" % (uribits[3],)
elif len(uribits) == 4:
return METHOD_PROPFIND_ADDRESSBOOK
-
+
elif uribits[0] == "directory":
return METHOD_PROPFIND_DIRECTORY
-
+
elif uribits[0] == "principals":
return METHOD_PROPFIND_CACHED_PRINCIPALS if cached else METHOD_PROPFIND_PRINCIPALS
-
+
elif self.currentLine.method.startswith("REPORT"):
-
+
if "(" in self.currentLine.method:
report_type = self.currentLine.method.split("}" if "}" in self.currentLine.method else ":")[1][:-1]
if report_type == "addressbook-query":
@@ -717,18 +722,18 @@
"expand-property" : METHOD_REPORT_EXPAND_P,
}
return mappedNames.get(report_type, "REPORT %s" % (report_type,))
-
+
elif self.currentLine.method == "PROPPATCH":
-
+
if uribits[0] == "calendars":
return METHOD_PROPPATCH_CALENDAR
elif uribits[0] == "addressbooks":
return METHOD_PROPPATCH_ADDRESSBOOK
-
+
elif self.currentLine.method == "POST":
-
+
if uribits[0] == "calendars":
-
+
if len(uribits) == 3:
return METHOD_POST_CALENDAR_HOME
elif len(uribits) == 4:
@@ -747,9 +752,9 @@
pass
else:
return METHOD_POST_CALENDAR
-
+
elif uribits[0] == "addressbooks":
-
+
if len(uribits) == 3:
return METHOD_POST_ADDRESSBOOK_HOME
elif len(uribits) == 4:
@@ -763,15 +768,15 @@
return METHOD_POST_ISCHEDULE_FREEBUSY
else:
return METHOD_POST_ISCHEDULE
-
+
elif uribits[0].startswith("timezones"):
return METHOD_POST_TIMEZONES
-
+
elif uribits[0].startswith("apns"):
return METHOD_POST_APNS
-
+
elif self.currentLine.method == "PUT":
-
+
if uribits[0] == "calendars":
if len(uribits) > 3:
if uribits[3] in calendar_specials:
@@ -794,11 +799,11 @@
pass
else:
return METHOD_PUT_VCF
-
+
elif self.currentLine.method == "GET":
-
+
if uribits[0] == "calendars":
-
+
if len(uribits) == 3:
return METHOD_GET_CALENDAR_HOME
elif len(uribits) > 3:
@@ -810,9 +815,9 @@
return METHOD_GET_INBOX_ICS
else:
return METHOD_GET_ICS
-
+
elif uribits[0] == "addressbooks":
-
+
if len(uribits) == 3:
return METHOD_GET_ADDRESSBOOK_HOME
elif len(uribits) > 3:
@@ -827,9 +832,9 @@
return METHOD_GET_TIMEZONES
elif self.currentLine.method == "DELETE":
-
+
if uribits[0] == "calendars":
-
+
if len(uribits) == 3:
return METHOD_DELETE_CALENDAR_HOME
elif len(uribits) > 3:
@@ -841,9 +846,9 @@
return METHOD_DELETE_INBOX_ICS
else:
return METHOD_DELETE_ICS
-
+
elif uribits[0] == "addressbooks":
-
+
if len(uribits) == 3:
return METHOD_DELETE_ADDRESSBOOK_HOME
elif len(uribits) > 3:
@@ -855,9 +860,10 @@
return METHOD_DELETE_VCF
return self.currentLine.method
-
+
+
def getCountBucket(self, count, buckets):
-
+
for limit, key in buckets:
if limit is None:
break
@@ -901,7 +907,7 @@
weighting = {}
def userAnalysis(self, adjustedMethod):
-
+
if self.currentLine.userid == "-":
return
# try:
@@ -909,8 +915,7 @@
# except KeyError:
# self.userWeights[self.currentLine.userid] += 5
self.userWeights[self.currentLine.userid] += 1
-
-
+
responseTime = float(self.currentLine.extended.get("t", 0.0))
self.userCounts["%s:%s" % (self.currentLine.userid, self.getClientAdjustedName(),)] += 1
self.userResponseTimes["%s:%s" % (self.currentLine.userid, self.getClientAdjustedName(),)] += responseTime
@@ -942,90 +947,91 @@
def printAll(self, doTabs, summary):
self.printInfo(doTabs)
-
+
print("Load Analysis")
self.printHourlyTotals(doTabs, summary)
-
+
if not summary:
print("Client Analysis")
self.printClientTotals(doTabs)
-
+
print("Protocol Analysis Count")
self.printHourlyByXXXDetails(
self.hourlyByOKMethodCount if self.separate401s else self.hourlyByMethodCount,
doTabs,
)
-
+
print("Protocol Analysis Average Response Time (ms)")
self.printHourlyByXXXDetails(
self.averagedHourlyByOKMethodTime if self.separate401s else self.averagedHourlyByMethodTime,
doTabs,
showAverages=True,
)
-
+
print("Protocol Analysis Total Response Time (ms)")
self.printHourlyByXXXDetails(
self.hourlyByOKMethodTime if self.separate401s else self.hourlyByMethodTime,
doTabs,
showFloatPercent=True,
)
-
+
print("Status Code Analysis")
self.printHourlyByXXXDetails(self.hourlyByStatus, doTabs)
-
+
print("Protocol Analysis by Status")
self.printXXXMethodDetails(self.statusByMethodCount, doTabs, False)
-
+
print("Cache Analysis")
self.printHourlyCacheDetails(doTabs)
-
+
if len(self.hourlyPropfindByResponseCount):
print("PROPFIND Calendar response count distribution")
self.printHourlyByXXXDetails(self.hourlyPropfindByResponseCount, doTabs)
-
+
if len(self.averagedHourlyByRecipientCount):
print("Average Recipient Counts")
self.printHourlyByXXXDetails(self.averagedHourlyByRecipientCount, doTabs, showTotals=False)
-
+
print("Queue Depth vs Response Time")
self.printQueueDepthResponseTime(doTabs)
-
+
print("Instance Count Distribution")
self.printInstanceCount(doTabs)
-
+
print("Protocol Analysis by Client")
self.printXXXMethodDetails(self.clientIDByMethodCount, doTabs)
-
+
if len(self.requestSizeByBucket):
print("Request size distribution")
self.printHourlyByXXXDetails(self.requestSizeByBucket, doTabs)
-
+
if len(self.responseSizeByBucket):
print("Response size distribution (excluding GET Dropbox)")
self.printHourlyByXXXDetails(self.responseSizeByBucket, doTabs)
-
+
if len(self.averageResponseCountByMethod):
print("Average response count by method")
self.printResponseCounts(doTabs)
-
+
if len(self.requestTimeByBucket):
print("Response time distribution")
self.printHourlyByXXXDetails(self.requestTimeByBucket, doTabs)
-
+
print("URI Counts")
self.printURICounts(doTabs)
-
+
#print("User Interaction Counts")
#self.printUserInteractionCounts(doTabs)
-
+
print("User Weights (top 100)")
self.printUserWeights(doTabs)
-
+
#print("User Response times")
#self.printUserResponseTimes(doTabs)
+
def printInfo(self, doTabs):
-
+
table = tables.Table()
table.addRow(("Run on:", self.startTime.isoformat(' '),))
table.addRow(("Host:", self.host,))
@@ -1036,42 +1042,44 @@
if self.filterByClient:
table.addRow(("Filtered to client:", self.filterByClient,))
table.addRow(("Lines Analyzed:", sum(self.linesRead.values()),))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def getHourFromIndex(self, index):
-
+
if index >= self.maxIndex:
return None
totalminutes = index * self.resolutionMinutes
-
+
offsethour, minute = divmod(totalminutes, 60)
localhour = divmod(offsethour + self.adjustHour + self.utcoffset, 24)[1]
utchour = divmod(localhour - self.loggedUTCOffset - self.utcoffset, 24)[1]
-
+
# Clip to select hour range
return "%02d:%02d (%02d:%02d)" % (localhour, minute, utchour, minute,)
-
+
+
def printHourlyTotals(self, doTabs, summary):
-
+
table = tables.Table()
table.addHeader(
- ("Local (UTC)", "Total", "Av. Requests", "Av. Response", "Av. Queue",)
+ ("Local (UTC)", "Total", "Av. Requests", "Av. Response", "Av. Queue",)
)
table.addHeader(
- ("", "Requests", "Per Second", "Time(ms)", "Depth")
+ ("", "Requests", "Per Second", "Time(ms)", "Depth")
)
table.setDefaultColumnFormats(
(
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.2f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
)
)
-
+
totalRequests = 0
totalDepth = 0
totalTime = 0.0
@@ -1093,7 +1101,7 @@
totalRequests += countRequests
totalDepth += countDepth
totalTime += countTime
-
+
table.addFooter(
(
"Total:",
@@ -1102,33 +1110,33 @@
safePercent(totalTime, totalRequests, 1.0),
safePercent(float(totalDepth), totalRequests, 1),
),
- columnFormats=
- (
- tables.Table.ColumnFormat("%s"),
+ columnFormats=(
+ tables.Table.ColumnFormat("%s"),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.2f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
)
)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printClientTotals(self, doTabs):
-
+
table = tables.Table()
-
+
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
table.addHeader(("Client", "Total", "Unique", "Unique"))
- table.addHeader(( "", "", "Users", "IP addrs"))
- for title, clientData in sorted(self.clientTotals.iteritems(), key=lambda x:x[0].lower()):
+ table.addHeader(("", "", "Users", "IP addrs"))
+ for title, clientData in sorted(self.clientTotals.iteritems(), key=lambda x: x[0].lower()):
if title == " TOTAL":
continue
table.addRow((
@@ -1137,26 +1145,27 @@
"%d (%2d%%)" % (len(clientData[1]), safePercent(len(clientData[1]), len(self.clientTotals[" TOTAL"][1])),),
"%d (%2d%%)" % (len(clientData[2]), safePercent(len(clientData[2]), len(self.clientTotals[" TOTAL"][2])),),
))
-
+
table.addFooter((
"All",
"%d " % (self.clientTotals[" TOTAL"][0],),
"%d " % (len(self.clientTotals[" TOTAL"][1]),),
"%d " % (len(self.clientTotals[" TOTAL"][2]),),
))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printHourlyByXXXDetails(self, hourlyByXXX, doTabs, showTotals=True, showAverages=False, showFloatPercent=False):
-
+
totals = [(0, 0,)] * len(hourlyByXXX)
table = tables.Table()
-
- headers = [["Local (UTC)",], ["",], ["",], ["",],]
+
+ headers = [["Local (UTC)", ], ["", ], ["", ], ["", ], ]
use_headers = [True, False, False, False]
- formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),]
- for k in sorted(hourlyByXXX.keys(), key=lambda x:str(x).lower()):
+ formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY), ]
+ for k in sorted(hourlyByXXX.keys(), key=lambda x: str(x).lower()):
if type(k) is str:
if k[0] == ' ':
k = k[1:]
@@ -1191,13 +1200,13 @@
if use_headers[3]:
table.addHeader(headers[3])
table.setDefaultColumnFormats(formats)
-
+
for ctr in xrange(self.timeBucketCount):
row = ["-"] * (len(hourlyByXXX) + 1)
row[0] = self.getHourFromIndex(ctr)
if row[0] is None:
continue
- for colctr, items in enumerate(sorted(hourlyByXXX.items(), key=lambda x:str(x[0]).lower())):
+ for colctr, items in enumerate(sorted(hourlyByXXX.items(), key=lambda x: str(x[0]).lower())):
_ignore, value = items
data = value[ctr]
if " TOTAL" in hourlyByXXX:
@@ -1219,7 +1228,7 @@
if data:
totals[colctr] = (totals[colctr][0] + data, totals[colctr][1] + 1,)
table.addRow(row)
-
+
if showTotals or showAverages:
row = ["-"] * (len(hourlyByXXX) + 1)
row[0] = "Average:" if showAverages else "Total:"
@@ -1234,17 +1243,18 @@
else:
row[colctr + 1] = "%.1f" % (data,)
table.addFooter(row)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printHourlyCacheDetails(self, doTabs):
-
- totals = [0,] * 7
+
+ totals = [0, ] * 7
table = tables.Table()
-
+
header1 = ["Local (UTC)", "PROPFIND Calendar Home", "", "PROPFIND Address Book Home", "", "PROPFIND Principals", ""]
- header2 = ["", "Uncached", "Cached", "Uncached", "Cached", "Uncached", "Cached"]
+ header2 = ["", "Uncached", "Cached", "Uncached", "Cached", "Uncached", "Cached"]
formats = [
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
@@ -1254,8 +1264,8 @@
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
]
-
- table.addHeader(header1, columnFormats = [
+
+ table.addHeader(header1, columnFormats=[
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY, span=2),
None,
@@ -1264,9 +1274,9 @@
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY, span=2),
None,
])
-
+
table.addHeaderDivider(skipColumns=(0,))
- table.addHeader(header2, columnFormats = [
+ table.addHeader(header2, columnFormats=[
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
@@ -1276,7 +1286,7 @@
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
])
table.setDefaultColumnFormats(formats)
-
+
for ctr in xrange(self.timeBucketCount):
hour = self.getHourFromIndex(ctr)
if hour is None:
@@ -1284,38 +1294,37 @@
row = []
row.append(hour)
-
+
calHomeUncached = self.hourlyByOKMethodCount[METHOD_PROPFIND_CALENDAR_HOME][ctr]
calHomeCached = self.hourlyByOKMethodCount[METHOD_PROPFIND_CACHED_CALENDAR_HOME][ctr]
calHomeTotal = calHomeUncached + calHomeCached
-
+
adbkHomeUncached = self.hourlyByOKMethodCount[METHOD_PROPFIND_ADDRESSBOOK_HOME][ctr]
adbkHomeCached = self.hourlyByOKMethodCount[METHOD_PROPFIND_CACHED_ADDRESSBOOK_HOME][ctr]
adbkHomeTotal = adbkHomeUncached + adbkHomeCached
-
+
principalUncached = self.hourlyByOKMethodCount[METHOD_PROPFIND_PRINCIPALS][ctr]
principalCached = self.hourlyByOKMethodCount[METHOD_PROPFIND_CACHED_PRINCIPALS][ctr]
principalTotal = principalUncached + principalCached
-
-
+
row.append("%d (%2d%%)" % (calHomeUncached, safePercent(calHomeUncached, calHomeTotal),))
row.append("%d (%2d%%)" % (calHomeCached, safePercent(calHomeCached, calHomeTotal),))
-
+
row.append("%d (%2d%%)" % (adbkHomeUncached, safePercent(adbkHomeUncached, adbkHomeTotal),))
row.append("%d (%2d%%)" % (adbkHomeCached, safePercent(adbkHomeCached, adbkHomeTotal),))
-
+
row.append("%d (%2d%%)" % (principalUncached, safePercent(principalUncached, principalTotal),))
row.append("%d (%2d%%)" % (principalCached, safePercent(principalCached, principalTotal),))
-
+
totals[1] += calHomeUncached
totals[2] += calHomeCached
totals[3] += adbkHomeUncached
totals[4] += adbkHomeCached
totals[5] += principalUncached
totals[6] += principalCached
-
+
table.addRow(row)
-
+
row = []
row.append("Total:")
row.append("%d (%2d%%)" % (totals[1], safePercent(totals[1], totals[1] + totals[2]),))
@@ -1325,79 +1334,82 @@
row.append("%d (%2d%%)" % (totals[5], safePercent(totals[5], totals[5] + totals[6]),))
row.append("%d (%2d%%)" % (totals[6], safePercent(totals[6], totals[5] + totals[6]),))
table.addFooter(row)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printQueueDepthResponseTime(self, doTabs):
-
+
table = tables.Table()
-
+
table.setDefaultColumnFormats((
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
table.addHeader(("Queue Depth", "Av. Response Time (ms)", "Number"))
- for k, v in sorted(self.averagedResponseTimeVsQueueDepth.iteritems(), key=lambda x:x[0]):
+ for k, v in sorted(self.averagedResponseTimeVsQueueDepth.iteritems(), key=lambda x: x[0]):
table.addRow((
"%d" % (k,),
"%.1f" % (v[1],),
"%d" % (v[0],),
))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printXXXMethodDetails(self, data, doTabs, verticalTotals=True):
-
+
table = tables.Table()
-
- header = ["Method",]
- formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),]
- for k in sorted(data.keys(), key=lambda x:x.lower()):
+
+ header = ["Method", ]
+ formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY), ]
+ for k in sorted(data.keys(), key=lambda x: x.lower()):
header.append(k)
formats.append(tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY))
table.addHeader(header)
table.setDefaultColumnFormats(formats)
-
+
# Get full set of methods
methods = set()
for v in data.itervalues():
methods.update(v.keys())
-
+
for method in sorted(methods):
if method == " TOTAL":
continue
row = []
row.append(method)
- for k,v in sorted(data.iteritems(), key=lambda x:x[0].lower()):
+ for k, v in sorted(data.iteritems(), key=lambda x: x[0].lower()):
total = v[" TOTAL"] if verticalTotals else data[" TOTAL"][method]
row.append("%d (%2d%%)" % (v[method], safePercent(v[method], total),))
table.addRow(row)
-
+
row = []
row.append("Total:")
- for k,v in sorted(data.iteritems(), key=lambda x:x[0].lower()):
+ for k, v in sorted(data.iteritems(), key=lambda x: x[0].lower()):
row.append("%d" % (v[" TOTAL"],))
table.addFooter(row)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
+
def printInstanceCount(self, doTabs):
-
+
total = sum(self.instanceCount.values())
-
+
table = tables.Table()
table.addHeader(("Instance ID", "Count", "%% Total",))
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
+ tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f%%%%", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
# Top 100 only
def safeIntKey(v):
try:
@@ -1411,66 +1423,69 @@
value,
safePercent(value, total, 1000.0),
))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
+
def printURICounts(self, doTabs):
-
+
total = sum(self.requestURI.values())
-
+
table = tables.Table()
table.addHeader(("Request URI", "Count", "%% Total",))
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f%%%%", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
# Top 100 only
- for key, value in sorted(self.requestURI.iteritems(), key=lambda x:x[1], reverse=True)[:100]:
+ for key, value in sorted(self.requestURI.iteritems(), key=lambda x: x[1], reverse=True)[:100]:
table.addRow((
key,
value,
safePercent(value, total, 1000.0),
))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
+
def printUserWeights(self, doTabs):
-
+
total = sum(self.userWeights.values())
-
+
table = tables.Table()
table.addHeader(("User ID", "Weight", "%% Total",))
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f%%%%", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
+
# Top 100 only
- for key, value in sorted(self.userWeights.iteritems(), key=lambda x:x[1], reverse=True)[:100]:
+ for key, value in sorted(self.userWeights.iteritems(), key=lambda x: x[1], reverse=True)[:100]:
table.addRow((
key,
value,
safePercent(value, total, 1000.0),
))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
+
def printResponseCounts(self, doTabs):
-
+
table = tables.Table()
table.addHeader(("Method", "Av. Response Count",))
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
- for method, value in sorted(self.averageResponseCountByMethod.iteritems(), key=lambda x:x[0]):
+
+ for method, value in sorted(self.averageResponseCountByMethod.iteritems(), key=lambda x: x[0]):
if method == " TOTAL":
continue
table.addRow((
@@ -1479,12 +1494,13 @@
))
table.addFooter(("Total:", self.averageResponseCountByMethod[" TOTAL"],))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
+
def printUserResponseTimes(self, doTabs):
-
+
totalCount = 0
averages = {}
for user in self.userResponseTimes.keys():
@@ -1496,21 +1512,22 @@
table = tables.Table()
table.addHeader(("User ID/Client", "Av. Response (ms)", "%% Total",))
table.setDefaultColumnFormats((
- tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
+ tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),
tables.Table.ColumnFormat("%d", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
tables.Table.ColumnFormat("%.1f%%%%", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
))
-
- for key, value in sorted(averages.iteritems(), key=lambda x:x[1], reverse=True):
+
+ for key, value in sorted(averages.iteritems(), key=lambda x: x[1], reverse=True):
table.addRow((
key,
value,
safePercent(value, total, 1000.0),
))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
+
def printUserInteractionCounts(self, doTabs):
table = tables.Table()
table.setDefaultColumnFormats((
@@ -1528,32 +1545,33 @@
print("")
+
class TablePrinter(object):
-
+
@classmethod
def printDictDictTable(cls, data, doTabs):
-
+
table = tables.Table()
-
- header = ["",]
- formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),]
- for k in sorted(data.keys(), key=lambda x:x.lower()):
+
+ header = ["", ]
+ formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY), ]
+ for k in sorted(data.keys(), key=lambda x: x.lower()):
header.append(k)
formats.append(tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.RIGHT_JUSTIFY))
table.addHeader(header)
table.setDefaultColumnFormats(formats)
-
+
# Get full set of row names
rowNames = set()
for v in data.itervalues():
rowNames.update(v.keys())
-
+
for rowName in sorted(rowNames):
if rowName == " TOTAL":
continue
row = []
row.append(rowName)
- for k,v in sorted(data.iteritems(), key=lambda x:x[0].lower()):
+ for k, v in sorted(data.iteritems(), key=lambda x: x[0].lower()):
value = v[rowName]
if type(value) is str:
row.append(value)
@@ -1568,11 +1586,11 @@
else:
row.append(fmt % (value,))
table.addRow(row)
-
+
if " TOTAL" in rowNames:
row = []
row.append("Total:")
- for k,v in sorted(data.iteritems(), key=lambda x:x[0].lower()):
+ for k, v in sorted(data.iteritems(), key=lambda x: x[0].lower()):
value = v[" TOTAL"]
if type(value) is str:
fmt = "%s"
@@ -1582,18 +1600,21 @@
fmt = "%d"
row.append(fmt % (value,))
table.addFooter(row)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
+
class Differ(TablePrinter):
-
+
def __init__(self, analyzers):
-
+
self.analyzers = analyzers
-
+
+
def printAll(self, doTabs, summary):
-
+
self.printInfo(doTabs)
print("Load Analysis Differences")
@@ -1603,40 +1624,42 @@
if not summary:
print("Client Differences")
self.printClientTotals(doTabs)
-
+
print("Protocol Count Differences")
self.printMethodCountDetails(doTabs)
-
+
print("Average Response Time Differences")
self.printMethodTimingDetails("clientByMethodAveragedTime", doTabs)
-
+
print("Total Response Time Differences")
self.printMethodTimingDetails("clientByMethodTotalTime", doTabs)
-
+
print("Average Response Count Differences")
self.printResponseCountDetails(doTabs)
+
def printInfo(self, doTabs):
-
+
table = tables.Table()
table.addRow(("Run on:", self.analyzers[0].startTime.isoformat(' '),))
table.addRow(("Host:", self.analyzers[0].host,))
for ctr, analyzer in enumerate(self.analyzers):
- table.addRow(("Log Start #%d:" % (ctr+1,), analyzer.startLog,))
+ table.addRow(("Log Start #%d:" % (ctr + 1,), analyzer.startLog,))
if self.analyzers[0].filterByUser:
table.addRow(("Filtered to user:", self.analyzers[0].filterByUser,))
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printLoadAnalysisDetails(self, doTabs):
-
+
# First gather all the data
- byCategory = collections.defaultdict(lambda:collections.defaultdict(str))
+ byCategory = collections.defaultdict(lambda: collections.defaultdict(str))
firstData = []
lastData = []
for ctr, analyzer in enumerate(self.analyzers):
- title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+ title = "#%d %s" % (ctr + 1, analyzer.startLog[0:11],)
totalRequests = 0
totalTime = 0.0
@@ -1651,7 +1674,7 @@
byCategory[title]["#1 Total Requests"] = "%d" % (totalRequests,)
byCategory[title]["#2 Av. Response Time (ms)"] = "%.1f" % (safePercent(totalTime, totalRequests, 1.0),)
-
+
if ctr == 0:
firstData = (totalRequests, safePercent(totalTime, totalRequests, 1.0),)
lastData = (totalRequests, safePercent(totalTime, totalRequests, 1.0),)
@@ -1662,8 +1685,9 @@
self.printDictDictTable(byCategory, doTabs)
+
def printHourlyTotals(self, doTabs):
-
+
table = tables.Table()
hdr1 = [""]
hdr2 = ["Local (UTC)"]
@@ -1671,10 +1695,10 @@
fmt1 = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY)]
fmt23 = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY)]
for ctr, analyzer in enumerate(self.analyzers):
- title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+ title = "#%d %s" % (ctr + 1, analyzer.startLog[0:11],)
hdr1.extend([title, "", ""])
- hdr2.extend(["Total", "Av. Requests", "Av. Response"])
- hdr3.extend(["Requests", "Per Second", "Time(ms)"])
+ hdr2.extend(["Total", "Av. Requests", "Av. Response"])
+ hdr3.extend(["Requests", "Per Second", "Time(ms)"])
fmt1.extend([
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY, span=3),
None,
@@ -1687,8 +1711,8 @@
])
title = "Difference"
hdr1.extend([title, "", ""])
- hdr2.extend(["Total", "Av. Requests", "Av. Response"])
- hdr3.extend(["Requests", "Per Second", "Time(ms)"])
+ hdr2.extend(["Total", "Av. Requests", "Av. Response"])
+ hdr3.extend(["Requests", "Per Second", "Time(ms)"])
fmt1.extend([
tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY, span=3),
None,
@@ -1712,14 +1736,14 @@
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
])
table.setDefaultColumnFormats(fmt)
-
+
totalRequests = [0] * len(self.analyzers)
totalTime = [0.0] * len(self.analyzers)
for ctr in xrange(self.analyzers[0].timeBucketCount):
hour = self.analyzers[0].getHourFromIndex(ctr)
if hour is None:
continue
-
+
diffRequests = None
diffRequestRate = None
diffTime = None
@@ -1727,7 +1751,7 @@
for ctr2, analyzer in enumerate(self.analyzers):
value = analyzer.hourlyTotals[ctr]
countRequests, _ignore503, _ignore_countDepth, _ignore_maxDepth, countTime = value
-
+
requestRate = (1.0 * countRequests) / analyzer.resolutionMinutes / 60
averageTime = safePercent(countTime, countRequests, 1.0)
row.extend([
@@ -1737,7 +1761,7 @@
])
totalRequests[ctr2] += countRequests
totalTime[ctr2] += countTime
-
+
diffRequests = countRequests if diffRequests is None else countRequests - diffRequests
diffRequestRate = requestRate if diffRequestRate is None else requestRate - diffRequestRate
diffTime = averageTime if diffTime is None else averageTime - diffTime
@@ -1748,7 +1772,7 @@
diffTime,
])
table.addRow(row)
-
+
ftr = ["Total:"]
diffRequests = None
diffRequestRate = None
@@ -1761,7 +1785,7 @@
requestRate,
averageTime,
])
-
+
diffRequests = totalRequests[ctr] if diffRequests is None else totalRequests[ctr] - diffRequests
diffRequestRate = requestRate if diffRequestRate is None else requestRate - diffRequestRate
diffTime = averageTime if diffTime is None else averageTime - diffTime
@@ -1780,21 +1804,22 @@
tables.Table.ColumnFormat("%.1f", tables.Table.ColumnFormat.RIGHT_JUSTIFY),
])
table.addFooter(ftr, columnFormats=fmt)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printClientTotals(self, doTabs):
-
+
table = tables.Table()
-
- header1 = ["Client",]
- header2 = ["",]
- header1formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),]
- header2formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY),]
- formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY),]
+
+ header1 = ["Client", ]
+ header2 = ["", ]
+ header1formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY), ]
+ header2formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.CENTER_JUSTIFY), ]
+ formats = [tables.Table.ColumnFormat("%s", tables.Table.ColumnFormat.LEFT_JUSTIFY), ]
for ctr, analyzer in enumerate(self.analyzers):
- title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+ title = "#%d %s" % (ctr + 1, analyzer.startLog[0:11],)
header1.extend((title, ""))
header2.extend(("Total", "Unique",))
header1formats.extend((
@@ -1829,15 +1854,15 @@
table.addHeaderDivider(skipColumns=(0,))
table.addHeader(header2, columnFormats=header2formats)
table.setDefaultColumnFormats(formats)
-
+
allClients = set()
for analyzer in self.analyzers:
allClients.update(analyzer.clientTotals.keys())
- for title in sorted(allClients, key=lambda x:x.lower()):
+ for title in sorted(allClients, key=lambda x: x.lower()):
if title == " TOTAL":
continue
- row = [title,]
+ row = [title, ]
for analyzer in self.analyzers:
row.append("%d (%2d%%)" % (analyzer.clientTotals[title][0], safePercent(analyzer.clientTotals[title][0], analyzer.clientTotals[" TOTAL"][0]),))
row.append("%d (%2d%%)" % (len(analyzer.clientTotals[title][1]), safePercent(len(analyzer.clientTotals[title][1]), len(analyzer.clientTotals[" TOTAL"][1])),))
@@ -1850,8 +1875,8 @@
row.append("%+d (%+.1f%%)" % (lastUnique[0] - firstUnique[0], lastUnique[1] - firstUnique[1]))
table.addRow(row)
-
- footer = ["All",]
+
+ footer = ["All", ]
for analyzer in self.analyzers:
footer.append("%d " % (analyzer.clientTotals[" TOTAL"][0],))
footer.append("%d " % (len(analyzer.clientTotals[" TOTAL"][1]),))
@@ -1862,17 +1887,18 @@
footer.append("%+d (%+.1f%%)" % (lastTotal - firstTotal, safePercent(lastTotal - firstTotal, firstTotal, 100.0),))
footer.append("%+d (%+.1f%%)" % (lastUnique - firstUnique, safePercent(lastUnique - firstUnique, firstUnique, 100.0),))
table.addFooter(footer)
-
+
table.printTabDelimitedData() if doTabs else table.printTable()
print("")
-
+
+
def printMethodCountDetails(self, doTabs):
-
+
# First gather all the data
allMethods = set()
- byMethod = collections.defaultdict(lambda:collections.defaultdict(int))
+ byMethod = collections.defaultdict(lambda: collections.defaultdict(int))
for ctr, analyzer in enumerate(self.analyzers):
- title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+ title = "#%d %s" % (ctr + 1, analyzer.startLog[0:11],)
for method, value in analyzer.clientByMethodCount[" TOTAL"].iteritems():
byMethod[title][method] = value
allMethods.add(method)
@@ -1892,13 +1918,14 @@
self.printDictDictTable(byMethod, doTabs)
+
def printMethodTimingDetails(self, timingType, doTabs):
-
+
# First gather all the data
allMethods = set()
- byMethod = collections.defaultdict(lambda:collections.defaultdict(int))
+ byMethod = collections.defaultdict(lambda: collections.defaultdict(int))
for ctr, analyzer in enumerate(self.analyzers):
- title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+ title = "#%d %s" % (ctr + 1, analyzer.startLog[0:11],)
for method, value in getattr(analyzer, timingType)[" TOTAL"].iteritems():
byMethod[title][method] = value
allMethods.add(method)
@@ -1928,13 +1955,14 @@
self.printDictDictTable(byMethod, doTabs)
+
def printResponseCountDetails(self, doTabs):
-
+
# First gather all the data
allMethods = set()
- byMethod = collections.defaultdict(lambda:collections.defaultdict(int))
+ byMethod = collections.defaultdict(lambda: collections.defaultdict(int))
for ctr, analyzer in enumerate(self.analyzers):
- title = "#%d %s" % (ctr+1, analyzer.startLog[0:11],)
+ title = "#%d %s" % (ctr + 1, analyzer.startLog[0:11],)
for method, value in analyzer.averageResponseCountByMethod.iteritems():
byMethod[title][method] = value
allMethods.add(method)
@@ -1952,6 +1980,8 @@
self.printDictDictTable(byMethod, doTabs)
+
+
def usage(error_msg=None):
if error_msg:
print(error_msg)
@@ -2057,7 +2087,7 @@
print("Path does not exist: '%s'. Ignoring." % (arg,))
continue
logs.append(arg)
-
+
analyzers = []
for log in logs:
if diffMode or not analyzers:
@@ -2069,7 +2099,7 @@
Differ(analyzers).printAll(doTabDelimited, summary)
else:
analyzers[0].printAll(doTabDelimited, summary)
-
+
if repeat:
while True:
again = raw_input("Repeat analysis [y/n]:")
@@ -2080,7 +2110,7 @@
print("Analyzing: %s" % (arg,))
analyzers[0].analyzeLogFile(arg)
analyzers[0].printAll(doTabDelimited, summary)
-
+
except Exception, e:
print(traceback.print_exc())
sys.exit(str(e))
Modified: CalendarServer/trunk/contrib/tools/sqldata_from_path.py
===================================================================
--- CalendarServer/trunk/contrib/tools/sqldata_from_path.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/sqldata_from_path.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -45,20 +45,20 @@
sys.exit(0)
if __name__ == '__main__':
-
+
options, args = getopt.getopt(sys.argv[1:], "", [])
if options:
usage("No options allowed")
-
+
if len(args) != 1:
usage("One argument only must be provided.")
-
+
# Determine the type of path
segments = args[0].split("/")
if len(segments) not in (6, 8,):
usage("Must provide a path to a calendar or addressbook object resource.")
-
+
if segments[0] != "":
usage("Must provide a /calendars/... or /addressbooks/... path.")
if segments[1] not in ("calendars", "addressbooks",):
@@ -66,12 +66,11 @@
if segments[2] != "__uids__":
usage("Must provide a /.../__uids__/... path.")
-
datatype = segments[1]
uid = segments[5 if len(segments[3]) == 2 else 3]
collection = segments[6 if len(segments[3]) == 2 else 4]
resource = segments[7 if len(segments[3]) == 2 else 5]
-
+
sqlstrings = {
"calendars": {
"home_table" : "CALENDAR_HOME",
@@ -81,7 +80,7 @@
"bind_home_id" : "CALENDAR_HOME_RESOURCE_ID",
"bind_name" : "CALENDAR_RESOURCE_NAME",
"bind_id" : "CALENDAR_RESOURCE_ID",
-
+
"object_bind_id" : "CALENDAR_RESOURCE_ID",
"object_name" : "RESOURCE_NAME",
"object_data" : "ICALENDAR_TEXT",
@@ -95,13 +94,13 @@
"bind_home_id" : "ADDRESSBOOK_HOME_RESOURCE_ID",
"bind_name" : "ADDRESSBOOK_RESOURCE_NAME",
"bind_id" : "ADDRESSBOOK_RESOURCE_ID",
-
+
"object_bind_id" : "ADDRESSBOOK_RESOURCE_ID",
"object_name" : "RESOURCE_NAME",
"object_data" : "VCARD_TEXT",
},
}
-
+
sqlstrings[datatype]["uid"] = uid
sqlstrings[datatype]["collection"] = collection
sqlstrings[datatype]["resource"] = resource
@@ -113,4 +112,3 @@
select RESOURCE_ID from %(home_table)s where OWNER_UID = '%(uid)s'
)
);""" % sqlstrings[datatype])
-
Modified: CalendarServer/trunk/contrib/tools/tables.py
===================================================================
--- CalendarServer/trunk/contrib/tools/tables.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/tables.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -20,28 +20,29 @@
class Table(object):
"""
Class that allows pretty printing ascii tables.
-
+
The table supports multiline headers and footers, independent
- column formatting by row, alternative tab-delimited output.
+ column formatting by row, alternative tab-delimited output.
"""
-
+
class ColumnFormat(object):
"""
Defines the format string, justification and span for a column.
"""
-
+
LEFT_JUSTIFY = 0
RIGHT_JUSTIFY = 1
CENTER_JUSTIFY = 2
def __init__(self, strFormat="%s", justify=LEFT_JUSTIFY, span=1):
-
+
self.format = strFormat
self.justify = justify
self.span = span
+
def __init__(self, table=None):
-
+
self.headers = []
self.headerColumnFormats = []
self.rows = []
@@ -54,65 +55,76 @@
if table:
self.setData(table)
+
def setData(self, table):
-
+
self.hasTitles = True
self.headers.append(table[0])
self.rows = table[1:]
self._getMaxColumnCount()
+
def setDefaultColumnFormats(self, columnFormats):
-
+
self.defaultColumnFormats = columnFormats
+
def addDefaultColumnFormat(self, columnFormat):
-
+
self.defaultColumnFormats.append(columnFormat)
+
def setHeaders(self, rows, columnFormats=None):
-
+
self.headers = rows
- self.headerColumnFormats = columnFormats if columnFormats else [None,] * len(self.headers)
+ self.headerColumnFormats = columnFormats if columnFormats else [None, ] * len(self.headers)
self._getMaxColumnCount()
+
def addHeader(self, row, columnFormats=None):
-
+
self.headers.append(row)
self.headerColumnFormats.append(columnFormats)
self._getMaxColumnCount()
+
def addHeaderDivider(self, skipColumns=()):
-
+
self.headers.append((None, skipColumns,))
self.headerColumnFormats.append(None)
+
def setFooters(self, row, columnFormats=None):
-
+
self.footers = row
- self.footerColumnFormats = columnFormats if columnFormats else [None,] * len(self.footers)
+ self.footerColumnFormats = columnFormats if columnFormats else [None, ] * len(self.footers)
self._getMaxColumnCount()
+
def addFooter(self, row, columnFormats=None):
-
+
self.footers.append(row)
self.footerColumnFormats.append(columnFormats)
self._getMaxColumnCount()
+
def addRow(self, row=None, columnFormats=None):
-
+
self.rows.append(row)
if columnFormats:
self.columnFormatsByRow[len(self.rows) - 1] = columnFormats
self._getMaxColumnCount()
-
+
+
def addDivider(self, skipColumns=()):
-
+
self.rows.append((None, skipColumns,))
+
def printTable(self, os=stdout):
-
+
maxWidths = self._getMaxWidths()
-
+
self.printDivider(os, maxWidths, False)
if self.headers:
for header, format in zip(self.headers, self.headerColumnFormats):
@@ -125,9 +137,10 @@
for footer, format in zip(self.footers, self.footerColumnFormats):
self.printRow(os, footer, self._getFooterColumnFormat(format), maxWidths)
self.printDivider(os, maxWidths, False)
-
+
+
def printRow(self, os, row, format, maxWidths):
-
+
if row is None or type(row) is tuple and row[0] is None:
self.printDivider(os, maxWidths, skipColumns=row[1] if type(row) is tuple else ())
else:
@@ -148,8 +161,8 @@
t += " " + text + " |"
t += "\n"
os.write(t)
-
+
def printDivider(self, os, maxWidths, intermediate=True, double=False, skipColumns=()):
t = "|" if intermediate else "+"
for widthctr, width in enumerate(maxWidths):
@@ -162,8 +175,9 @@
t += "\n"
os.write(t)
+
def printTabDelimitedData(self, os=stdout, footer=True):
-
+
if self.headers:
titles = [""] * len(self.headers[0])
for row, header in enumerate(self.headers):
@@ -176,20 +190,22 @@
for footer in self.footers:
self.printTabDelimitedRow(os, footer, self._getFooterColumnFormat(self.footerColumnFormats[0]))
+
def printTabDelimitedRow(self, os, row, format):
-
+
if row is None:
row = [""] * self.columnCount
-
+
if len(row) != self.columnCount:
row = list(row)
row.extend([""] * (self.columnCount - len(row)))
textItems = [self._columnText(row, ctr, format) for ctr in xrange((len(row)))]
os.write("\t".join(textItems) + "\n")
-
+
+
def _getMaxColumnCount(self):
-
+
self.columnCount = 0
if self.headers:
for header in self.headers:
@@ -200,6 +216,7 @@
for footer in self.footers:
self.columnCount = max(self.columnCount, len(footer) if footer else 0)
+
def _getMaxWidths(self):
maxWidths = [0] * self.columnCount
@@ -207,55 +224,60 @@
if self.headers:
for header, format in zip(self.headers, self.headerColumnFormats):
self._updateMaxWidthsFromRow(header, self._getHeaderColumnFormat(format), maxWidths)
-
+
for ctr, row in enumerate(self.rows):
self._updateMaxWidthsFromRow(row, self._getColumnFormatForRow(ctr), maxWidths)
if self.footers:
for footer, format in zip(self.footers, self.footerColumnFormats):
self._updateMaxWidthsFromRow(footer, self._getFooterColumnFormat(format), maxWidths)
-
+
return maxWidths
+
def _updateMaxWidthsFromRow(self, row, format, maxWidths):
-
+
if row and (type(row) is not tuple or row[0] is not None):
ctr = 0
while ctr < len(row):
-
- text = self._columnText(row, ctr, format)
+
+ text = self._columnText(row, ctr, format)
startCtr = ctr
for _ignore_span in xrange(format[startCtr].span if format else 1):
maxWidths[ctr] = max(maxWidths[ctr], len(text) / (format[startCtr].span if format else 1))
ctr += 1
-
+
+
def _getHeaderColumnFormat(self, format):
-
+
if format:
return format
else:
justify = Table.ColumnFormat.CENTER_JUSTIFY if len(self.headers) == 1 else Table.ColumnFormat.LEFT_JUSTIFY
- return [Table.ColumnFormat(justify = justify)] * self.columnCount
+ return [Table.ColumnFormat(justify=justify)] * self.columnCount
+
def _getFooterColumnFormat(self, format):
-
+
if format:
return format
else:
return self.defaultColumnFormats
+
def _getColumnFormatForRow(self, ctr):
-
+
if ctr in self.columnFormatsByRow:
return self.columnFormatsByRow[ctr]
else:
return self.defaultColumnFormats
+
def _columnText(self, row, column, format, width=0):
-
+
if row is None or column >= len(row):
return ""
-
+
colData = row[column]
if colData is None:
colData = ""
Modified: CalendarServer/trunk/contrib/tools/test_protocolanalysis.py
===================================================================
--- CalendarServer/trunk/contrib/tools/test_protocolanalysis.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/contrib/tools/test_protocolanalysis.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -44,7 +44,7 @@
path.setContent(
# A user accessing his own calendar
format % dict(user="user01", other="user01") +
-
+
# A user accessing the calendar of one other person
format % dict(user="user02", other="user01") +
Modified: CalendarServer/trunk/twistedcaldav/client/__init__.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/client/__init__.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/client/__init__.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -26,4 +26,3 @@
"x-forwarded-host": (generateList, singleHeader),
"x-forwarded-server": (generateList, singleHeader),
})
-
Modified: CalendarServer/trunk/twistedcaldav/client/pool.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/client/pool.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/client/pool.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -57,6 +57,7 @@
self.onConnect = Deferred()
self.afterConnect = Deferred()
+
def clientConnectionLost(self, connector, reason):
"""
Notify the connectionPool that we've lost our connection.
@@ -70,6 +71,7 @@
# The reactor is stopping; don't reconnect
return
+
def clientConnectionFailed(self, connector, reason):
"""
Notify the connectionPool that we're unable to connect
@@ -81,6 +83,7 @@
self.reactor.callLater(0, self.afterConnect.errback, reason)
del self.afterConnect
+
def buildProtocol(self, addr):
self.instance = self.protocol()
self.reactor.callLater(0, self.onConnect.callback, self.instance)
@@ -125,7 +128,7 @@
@param reactor: An L{IReactorTCP} provider used to initiate new
connections.
"""
-
+
self._name = name
self._scheme = scheme
self._endpoint = endpoint
@@ -145,6 +148,7 @@
self._pendingConnects = 0
self._pendingRequests = []
+
def _isIdle(self):
return (
len(self._busyClients) == 0 and
@@ -152,6 +156,7 @@
self._pendingConnects == 0
)
+
def _shutdownCallback(self):
self.shutdown_requested = True
if self._isIdle():
@@ -159,6 +164,7 @@
self.shutdown_deferred = Deferred()
return self.shutdown_deferred
+
def _newClientConnection(self):
"""
Create a new client connection.
@@ -198,9 +204,10 @@
d = factory.onConnect
d.addCallbacks(_doneOK, _doneError)
-
+
return d
+
def _performRequestOnClient(self, client, request, *args, **kwargs):
"""
Perform the given request on the given client.
@@ -231,6 +238,7 @@
d.addCallbacks(_freeClientAfterRequest, _goneClientAfterError)
return d
+
@inlineCallbacks
def submitRequest(self, request, *args, **kwargs):
"""
@@ -260,12 +268,12 @@
response = (yield self._submitRequest(request, args, kwargs))
except (ConnectionLost, ConnectionDone, ConnectError), e:
- self.log_error("HTTP pooled client connection error (attempt: %d) - retrying: %s" % (ctr+1, e,))
+ self.log_error("HTTP pooled client connection error (attempt: %d) - retrying: %s" % (ctr + 1, e,))
continue
-
+
# TODO: find the proper cause of these assertions and fix
except (AssertionError,), e:
- self.log_error("HTTP pooled client connection assertion error (attempt: %d) - retrying: %s" % (ctr+1, e,))
+ self.log_error("HTTP pooled client connection assertion error (attempt: %d) - retrying: %s" % (ctr + 1, e,))
continue
else:
@@ -274,6 +282,7 @@
self.log_error("HTTP pooled client connection error - exhausted retry attempts.")
raise HTTPError(StatusResponse(responsecode.BAD_GATEWAY, "Could not connect to HTTP pooled client host."))
+
def _submitRequest(self, request, *args, **kwargs):
"""
Select an available client and perform the given request on it.
@@ -303,6 +312,7 @@
return d
+
def _logClientStats(self):
self.log_debug("Clients #free: %d, #busy: %d, "
"#pending: %d, #queued: %d" % (
@@ -311,6 +321,7 @@
self._pendingConnects,
len(self._pendingRequests)))
+
def clientGone(self, client):
"""
Notify that the given client is to be removed from the pool completely.
@@ -328,6 +339,7 @@
self._processPending()
+
def clientBusy(self, client):
"""
Notify that the given client is being used to complete a request.
@@ -343,6 +355,7 @@
self.log_debug("Busied client: %r" % (client,))
self._logClientStats()
+
def clientFree(self, client):
"""
Notify that the given client is free to handle more requests.
@@ -362,6 +375,7 @@
self._processPending()
+
def _processPending(self):
if len(self._pendingRequests) > 0:
d, request, args, kwargs = self._pendingRequests.pop(0)
@@ -374,6 +388,7 @@
_ign_d.addCallbacks(d.callback, d.errback)
+
def suggestMaxClients(self, maxClients):
"""
Suggest the maximum number of concurrently connected clients.
@@ -386,7 +401,7 @@
_clientPools = {} # Maps a host:port to a pool object
def installPools(hosts, maxClients=5, reactor=None):
-
+
for name, url in hosts:
installPool(
name,
Modified: CalendarServer/trunk/twistedcaldav/client/reverseproxy.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/client/reverseproxy.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/client/reverseproxy.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,46 +38,48 @@
def __init__(self, poolID, *args, **kwargs):
"""
-
+
@param poolID: idenitifier of the pool to use
@type poolID: C{str}
"""
-
+
self.poolID = poolID
- self._args = args
+ self._args = args
self._kwargs = kwargs
self.allowMultiHop = False
+
def isCollection(self):
return True
+
def exists(self):
return False
+
def renderHTTP(self, request):
"""
Do the reverse proxy request and return the response.
@param request: the incoming request that needs to be proxied.
@type request: L{Request}
-
+
@return: Deferred L{Response}
"""
-
+
self.logger.info("%s %s %s" % (request.method, request.uri, "HTTP/%s.%s" % request.clientproto))
# Check for multi-hop
if not self.allowMultiHop:
- x_server = request.headers.getHeader("x-forwarded-server")
+ x_server = request.headers.getHeader("x-forwarded-server")
if x_server:
for item in x_server:
if item.lower() == config.ServerHostName.lower():
raise HTTPError(StatusResponse(responsecode.BAD_GATEWAY, "Too many x-forwarded-server hops"))
-
-
+
clientPool = getHTTPClientPool(self.poolID)
proxyRequest = ClientRequest(request.method, request.uri, request.headers, request.stream)
-
+
# Need x-forwarded-(for|host|server) headers. First strip any existing ones out, then add ours
proxyRequest.headers.removeHeader("x-forwarded-host")
proxyRequest.headers.removeHeader("x-forwarded-for")
@@ -87,4 +89,3 @@
proxyRequest.headers.addRawHeader("x-forwarded-server", config.ServerHostName)
return clientPool.submitRequest(proxyRequest)
-
Modified: CalendarServer/trunk/twistedcaldav/client/test/test_reverseproxy.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/client/test/test_reverseproxy.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/client/test/test_reverseproxy.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -17,7 +17,7 @@
from twext.web2.client.http import ClientRequest
from twext.web2.http import HTTPError
from twext.web2.test.test_server import SimpleRequest
-from twistedcaldav.client.pool import _clientPools
+from twistedcaldav.client.pool import _clientPools
from twistedcaldav.client.reverseproxy import ReverseProxyResource
from twistedcaldav.config import config
import twistedcaldav.test.util
@@ -28,9 +28,9 @@
"""
def setUp(self):
-
+
class DummyPool(object):
-
+
def submitRequest(self, request):
return request
@@ -38,35 +38,41 @@
super(ReverseProxyNoLoop, self).setUp()
+
def test_No_Header(self):
proxy = ReverseProxyResource("pool")
request = SimpleRequest(proxy, "GET", "/")
self.assertIsInstance(proxy.renderHTTP(request), ClientRequest)
+
def test_Header_Other_Server(self):
proxy = ReverseProxyResource("pool")
request = SimpleRequest(proxy, "GET", "/")
request.headers.addRawHeader("x-forwarded-server", "foobar.example.com")
self.assertIsInstance(proxy.renderHTTP(request), ClientRequest)
+
def test_Header_Other_Servers(self):
proxy = ReverseProxyResource("pool")
request = SimpleRequest(proxy, "GET", "/")
request.headers.setHeader("x-forwarded-server", ("foobar.example.com", "bar.example.com",))
self.assertIsInstance(proxy.renderHTTP(request), ClientRequest)
+
def test_Header_Our_Server(self):
proxy = ReverseProxyResource("pool")
request = SimpleRequest(proxy, "GET", "/")
request.headers.addRawHeader("x-forwarded-server", config.ServerHostName)
self.assertRaises(HTTPError, proxy.renderHTTP, request)
+
def test_Header_Our_Server_Moxied(self):
proxy = ReverseProxyResource("pool")
request = SimpleRequest(proxy, "GET", "/")
request.headers.setHeader("x-forwarded-server", ("foobar.example.com", "bar.example.com", config.ServerHostName,))
self.assertRaises(HTTPError, proxy.renderHTTP, request)
+
def test_Header_Our_Server_Allowed(self):
proxy = ReverseProxyResource("pool")
proxy.allowMultiHop = True
Modified: CalendarServer/trunk/twistedcaldav/datafilters/addressdata.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/datafilters/addressdata.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/datafilters/addressdata.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -29,23 +29,24 @@
def __init__(self, addressdata):
"""
-
+
@param addressdata: the XML element describing how to filter
@type addressdata: L{AddressData}
"""
-
+
self.addressdata = addressdata
-
+
+
def filter(self, vcard):
"""
Filter the supplied vCard object using the request information.
@param vcard: vCard object
@type vcard: L{Component} or C{str}
-
+
@return: L{Component} for the filtered vcard data
"""
-
+
# Empty element: get all data
if not self.addressdata.children:
return vcard
@@ -56,9 +57,10 @@
# Filter data based on any provided CARDAV:prop element, or use all current data
if self.addressdata.properties:
vcard = self.propFilter(self.addressdata.properties, vcard)
-
+
return vcard
+
def propFilter(self, properties, vcard):
"""
Returns a vCard component object filtered according to the properties.
@@ -81,7 +83,7 @@
name = xml_property.property_name
for vcard_property in vcard.properties(name):
result.addProperty(vcard_property)
-
+
# add required properties
for requiredProperty in ('N', 'FN', 'VERSION'):
if not result.hasProperty(requiredProperty):
@@ -89,6 +91,7 @@
return result
+
def merge(self, vcardnew, vcardold):
"""
Address-data merging does not happen
Modified: CalendarServer/trunk/twistedcaldav/datafilters/calendardata.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/datafilters/calendardata.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/datafilters/calendardata.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -14,7 +14,7 @@
# limitations under the License.
##
-from twistedcaldav.caldavxml import LimitRecurrenceSet, Expand, AllComponents,\
+from twistedcaldav.caldavxml import LimitRecurrenceSet, Expand, AllComponents, \
AllProperties
from twistedcaldav.datafilters.filter import CalendarFilter
from twistedcaldav.dateops import clipPeriod
@@ -32,26 +32,27 @@
def __init__(self, calendardata, timezone=None):
"""
-
+
@param calendardata: the XML element describing how to filter
@type calendardata: L{CalendarData}
@param timezone: the VTIMEZONE to use for floating/all-day
@type timezone: L{Component}
"""
-
+
self.calendardata = calendardata
self.timezone = timezone
-
+
+
def filter(self, ical):
"""
Filter the supplied iCalendar object using the request information.
@param ical: iCalendar object
@type ical: L{Component} or C{str}
-
+
@return: L{Component} for the filtered calendar data
"""
-
+
# Empty element: get all data
if not self.calendardata.children:
return ical
@@ -62,7 +63,7 @@
# Process the calendar data based on expand and limit options
if self.calendardata.freebusy_set:
ical = self.limitFreeBusy(ical)
-
+
if self.calendardata.recurrence_set:
if isinstance(self.calendardata.recurrence_set, LimitRecurrenceSet):
ical = self.limitRecurrence(ical)
@@ -72,9 +73,10 @@
# Filter data based on any provided CALDAV:comp element, or use all current data
if self.calendardata.component is not None:
ical = self.compFilter(self.calendardata.component, ical)
-
+
return ical
+
def compFilter(self, comp, component):
"""
Returns a calendar component object containing the data in the given
@@ -116,6 +118,7 @@
return result
+
def expandRecurrence(self, calendar, timezone=None):
"""
Expand the recurrence set into individual items.
@@ -124,7 +127,8 @@
@return: the L{Component} for the result.
"""
return calendar.expand(self.calendardata.recurrence_set.start, self.calendardata.recurrence_set.end, timezone)
-
+
+
def limitRecurrence(self, calendar):
"""
Limit the set of overridden instances returned to only those
@@ -135,7 +139,8 @@
"""
raise NotImplementedError()
return calendar
-
+
+
def limitFreeBusy(self, calendar):
"""
Limit the range of any FREEBUSY properties in the calendar, returning
@@ -143,11 +148,11 @@
@param calendar: the L{Component} for the calendar to operate on.
@return: the L{Component} for the result.
"""
-
+
# First check for any VFREEBUSYs - can ignore limit if there are none
if calendar.mainType() != "VFREEBUSY":
return calendar
-
+
# Create duplicate calendar and filter FREEBUSY properties
calendar = calendar.duplicate()
for component in calendar.subcomponents():
@@ -165,6 +170,7 @@
component.removeProperty(property)
return calendar
+
def merge(self, icalnew, icalold):
"""
Calendar-data merging does not happen
Modified: CalendarServer/trunk/twistedcaldav/datafilters/filter.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/datafilters/filter.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/datafilters/filter.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -27,25 +27,26 @@
Abstract class that defines an iCalendar filter/merge object
"""
-
def __init__(self):
pass
-
+
+
def filter(self, ical):
"""
Filter the supplied iCalendar object using the request information.
@param ical: iCalendar object
@type ical: L{Component}
-
+
@return: L{Component} for the filtered calendar data
"""
raise NotImplementedError
-
+
+
def merge(self, icalnew, icalold):
"""
Merge the old iCalendar object into the new iCalendar data using the request information.
-
+
@param icalnew: new iCalendar object to merge data into
@type icalnew: L{Component}
@param icalold: old iCalendar data to merge data from
@@ -53,6 +54,7 @@
"""
raise NotImplementedError
+
def validCalendar(self, ical):
# If we were passed a string, parse it out as a Component
@@ -61,36 +63,39 @@
ical = iComponent.fromString(ical)
except ValueError:
raise ValueError("Not a calendar: %r" % (ical,))
-
+
if ical is None or ical.name() != "VCALENDAR":
raise ValueError("Not a calendar: %r" % (ical,))
-
+
return ical
+
+
class AddressFilter(object):
"""
Abstract class that defines a vCard filter/merge object
"""
-
def __init__(self):
pass
-
+
+
def filter(self, vcard):
"""
Filter the supplied vCard object using the request information.
@param vcard: iCalendar object
@type vcard: L{Component}
-
+
@return: L{Component} for the filtered vcard data
"""
raise NotImplementedError
-
+
+
def merge(self, vcardnew, vcardold):
"""
Merge the old vcard object into the new vcard data using the request information.
-
+
@param vcardnew: new vcard object to merge data into
@type vcardnew: L{Component}
@param vcardold: old vcard data to merge data from
@@ -98,6 +103,7 @@
"""
raise NotImplementedError
+
def validAddress(self, vcard):
# If we were passed a string, parse it out as a Component
@@ -106,8 +112,8 @@
vcard = vComponent.fromString(vcard)
except ValueError:
raise ValueError("Not a vcard: %r" % (vcard,))
-
+
if vcard is None or vcard.name() != "VCARD":
raise ValueError("Not a vcard: %r" % (vcard,))
-
+
return vcard
Modified: CalendarServer/trunk/twistedcaldav/datafilters/privateevents.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/datafilters/privateevents.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/datafilters/privateevents.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -16,7 +16,7 @@
from twext.web2 import responsecode
from twext.web2.http import HTTPError, StatusResponse
-from twistedcaldav.caldavxml import Property, CalendarData, CalendarComponent,\
+from twistedcaldav.caldavxml import Property, CalendarData, CalendarComponent, \
AllProperties, AllComponents
from twistedcaldav.datafilters.calendardata import CalendarDataFilter
from twistedcaldav.datafilters.filter import CalendarFilter
@@ -33,30 +33,31 @@
def __init__(self, accessRestriction, isowner):
"""
-
+
@param accessRestriction: one of the access levels in L{Component}
@type accessRestriction: C{str}
@param isowner: whether the current user is the owner of the data
@type isowner: C{bool}
"""
-
+
self.accessRestriction = accessRestriction
self.isowner = isowner
-
+
+
def filter(self, ical):
"""
Filter the supplied iCalendar object using the request information.
@param ical: iCalendar object
@type ical: L{Component} or C{str}
-
+
@return: L{Component} for the filtered calendar data
"""
-
+
if self.isowner or self.accessRestriction == Component.ACCESS_PUBLIC or not self.accessRestriction:
# No need to filter for the owner or public event
return ical
-
+
elif self.accessRestriction == Component.ACCESS_PRIVATE:
# We should never get here because ACCESS_PRIVATE is protected via an ACL
raise HTTPError(StatusResponse(responsecode.FORBIDDEN, "Access Denied"))
@@ -66,7 +67,7 @@
elif self.accessRestriction in (Component.ACCESS_CONFIDENTIAL, Component.ACCESS_RESTRICTED):
# Create a CALDAV:calendar-data element with the appropriate iCalendar Component/Property
# filter in place for the access restriction in use
-
+
extra_access = ()
if self.accessRestriction == Component.ACCESS_RESTRICTED:
extra_access = (
@@ -76,7 +77,7 @@
calendardata = CalendarData(
CalendarComponent(
-
+
# VCALENDAR properties
Property(name="PRODID"),
Property(name="VERSION"),
@@ -99,9 +100,9 @@
Property(name="EXRULE"),
Property(name="EXDATE"),
*extra_access,
- **{"name":"VEVENT"}
+ **{"name": "VEVENT"}
),
-
+
# VTODO
CalendarComponent(
Property(name="UID"),
@@ -118,9 +119,9 @@
Property(name="EXRULE"),
Property(name="EXDATE"),
*extra_access,
- **{"name":"VTODO"}
+ **{"name": "VTODO"}
),
-
+
# VJOURNAL
CalendarComponent(
Property(name="UID"),
@@ -135,9 +136,9 @@
Property(name="EXRULE"),
Property(name="EXDATE"),
*extra_access,
- **{"name":"VJOURNAL"}
+ **{"name": "VJOURNAL"}
),
-
+
# VFREEBUSY
CalendarComponent(
Property(name="UID"),
@@ -147,9 +148,9 @@
Property(name="DURATION"),
Property(name="FREEBUSY"),
*extra_access,
- **{"name":"VFREEBUSY"}
+ **{"name": "VFREEBUSY"}
),
-
+
# VTIMEZONE
CalendarComponent(
AllProperties(),
@@ -165,7 +166,8 @@
else:
# Unknown access restriction
raise HTTPError(StatusResponse(responsecode.FORBIDDEN, "Access Denied"))
-
+
+
def merge(self, icalnew, icalold):
"""
Private event merging does not happen
Modified: CalendarServer/trunk/twistedcaldav/datafilters/test/test_calendardata.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/datafilters/test/test_calendardata.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/datafilters/test/test_calendardata.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -16,14 +16,14 @@
import twistedcaldav.test.util
from twistedcaldav.datafilters.calendardata import CalendarDataFilter
-from twistedcaldav.caldavxml import CalendarData, CalendarComponent,\
+from twistedcaldav.caldavxml import CalendarData, CalendarComponent, \
AllComponents, AllProperties, Property
from twistedcaldav.ical import Component
class CalendarDataTest (twistedcaldav.test.util.TestCase):
def test_empty(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -37,13 +37,14 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
empty = CalendarData()
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(empty).filter(item)), data)
+
def test_vcalendar_no_effect(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -57,7 +58,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
no_effect = CalendarData(
CalendarComponent(
name="VCALENDAR"
@@ -65,7 +66,7 @@
)
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(no_effect).filter(item)), data)
-
+
no_effect = CalendarData(
CalendarComponent(
AllComponents(),
@@ -76,8 +77,9 @@
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(no_effect).filter(item)), data)
+
def test_vcalendar_no_props(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -92,7 +94,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
result = """BEGIN:VCALENDAR
BEGIN:VEVENT
UID:12345-67890
@@ -104,7 +106,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
empty = CalendarData(
CalendarComponent(
AllComponents(),
@@ -114,8 +116,9 @@
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(empty).filter(item)), result)
+
def test_vcalendar_no_comp(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -130,14 +133,14 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
result = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
X-WR-CALNAME:Help
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
empty = CalendarData(
CalendarComponent(
AllProperties(),
@@ -147,8 +150,9 @@
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(empty).filter(item)), result)
+
def test_vevent_no_effect(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -162,7 +166,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
no_effect = CalendarData(
CalendarComponent(
CalendarComponent(
@@ -175,8 +179,9 @@
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(no_effect).filter(item)), data)
+
def test_vevent_other_component(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -190,13 +195,13 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
result = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
other_component = CalendarData(
CalendarComponent(
CalendarComponent(
@@ -209,8 +214,9 @@
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(other_component).filter(item)), result)
+
def test_vevent_no_props(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -229,7 +235,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
result = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -242,7 +248,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
empty = CalendarData(
CalendarComponent(
CalendarComponent(
@@ -253,14 +259,15 @@
name="VCALENDAR"
)
)
-
+
for item in (data, Component.fromString(data),):
filtered = str(CalendarDataFilter(empty).filter(item))
filtered = "".join([line for line in filtered.splitlines(True) if not line.startswith("UID:")])
self.assertEqual(filtered, result)
+
def test_vevent_no_comp(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -279,7 +286,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
result = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -293,7 +300,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
empty = CalendarData(
CalendarComponent(
CalendarComponent(
@@ -307,8 +314,9 @@
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(empty).filter(item)), result)
+
def test_vevent_some_props(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -327,7 +335,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
result = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -343,7 +351,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
empty = CalendarData(
CalendarComponent(
CalendarComponent(
@@ -363,7 +371,6 @@
name="VCALENDAR"
)
)
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(CalendarDataFilter(empty).filter(item)), result)
-
Modified: CalendarServer/trunk/twistedcaldav/datafilters/test/test_privateevents.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/datafilters/test/test_privateevents.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/datafilters/test/test_privateevents.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -22,7 +22,7 @@
class PrivateEventsTest (twistedcaldav.test.util.TestCase):
def test_public_default(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -36,13 +36,14 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_PUBLIC, True).filter(item)), data)
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_PUBLIC, False).filter(item)), data)
+
def test_public_none(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -56,13 +57,14 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(PrivateEventFilter(None, True).filter(item)), data)
self.assertEqual(str(PrivateEventFilter(None, False).filter(item)), data)
+
def test_public(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -77,13 +79,14 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_PUBLIC, True).filter(item)), data)
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_PUBLIC, False).filter(item)), data)
+
def test_private(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -98,14 +101,15 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_PRIVATE, True).filter(item)), data)
pfilter = PrivateEventFilter(Component.ACCESS_PRIVATE, False)
self.assertRaises(HTTPError, pfilter.filter, item)
+
def test_confidential(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -123,7 +127,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
filtered = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -135,13 +139,14 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_CONFIDENTIAL, True).filter(item)), data)
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_CONFIDENTIAL, False).filter(item)), filtered)
+
def test_restricted(self):
-
+
data = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -159,7 +164,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
filtered = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
@@ -173,7 +178,7 @@
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
-
+
for item in (data, Component.fromString(data),):
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_RESTRICTED, True).filter(item)), data)
self.assertEqual(str(PrivateEventFilter(Component.ACCESS_RESTRICTED, False).filter(item)), filtered)
Modified: CalendarServer/trunk/twistedcaldav/method/get.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/get.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/get.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -34,7 +34,7 @@
from twistedcaldav.customxml import calendarserver_namespace
from twistedcaldav.datafilters.hiddeninstance import HiddenInstanceFilter
from twistedcaldav.datafilters.privateevents import PrivateEventFilter
-from twistedcaldav.resource import isPseudoCalendarCollectionResource,\
+from twistedcaldav.resource import isPseudoCalendarCollectionResource, \
CalDAVResource
@inlineCallbacks
@@ -43,7 +43,7 @@
if self.exists():
# Special sharing request on a calendar or address book
if self.isCalendarCollection() or self.isAddressBookCollection():
-
+
# Check for action=share
if request.args:
action = request.args.get("action", ("",))
@@ -54,51 +54,51 @@
"Invalid action parameter: %s" % (action,),
))
action = action[0]
-
+
dispatch = {
"share" : self.directShare,
}.get(action, None)
-
+
if dispatch is None:
raise HTTPError(ErrorResponse(
responsecode.BAD_REQUEST,
(calendarserver_namespace, "supported-action"),
"Action not supported: %s" % (action,),
))
-
+
response = (yield dispatch(request))
returnValue(response)
-
+
else:
# Look for calendar access restriction on existing resource.
parentURL = parentForURL(request.uri)
parent = (yield request.locateResource(parentURL))
if isPseudoCalendarCollectionResource(parent):
-
+
# Check authorization first
yield self.authorize(request, (davxml.Read(),))
-
+
caldata = (yield self.iCalendarForUser(request))
-
- # Filter any attendee hidden instances
+
+ # Filter any attendee hidden instances
caldata = HiddenInstanceFilter().filter(caldata)
if self.accessMode:
-
+
# Non DAV:owner's have limited access to the data
isowner = (yield self.isOwner(request))
-
+
# Now "filter" the resource calendar data
caldata = PrivateEventFilter(self.accessMode, isowner).filter(caldata)
response = Response()
response.stream = MemoryStream(caldata.getTextWithTimezones(includeTimezones=not config.EnableTimezonesByReference))
response.headers.setHeader("content-type", MimeType.fromString("text/calendar; charset=utf-8"))
-
+
# Add Schedule-Tag header if property is present
if self.scheduleTag:
response.headers.setHeader("Schedule-Tag", self.scheduleTag)
-
+
returnValue(response)
# Do normal GET behavior
Modified: CalendarServer/trunk/twistedcaldav/method/mkcalendar.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/mkcalendar.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/mkcalendar.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -51,7 +51,7 @@
yield parent.authorize(request, (davxml.Bind(),))
if self.exists():
- log.err("Attempt to create collection where resource exists: %s" % (self,))
+ log.err("Attempt to create collection where resource exists: %s" % (self,))
raise HTTPError(ErrorResponse(
responsecode.FORBIDDEN,
(davxml.dav_namespace, "resource-must-be-null"),
@@ -88,7 +88,7 @@
errors = PropertyStatusResponseQueue("PROPPATCH", request.uri, responsecode.NO_CONTENT)
got_an_error = False
-
+
if makecalendar.children:
# mkcalendar -> set -> prop -> property*
for property in makecalendar.children[0].children[0].children:
@@ -103,15 +103,14 @@
got_an_error = True
else:
errors.add(responsecode.OK, property)
-
+
if got_an_error:
# Force a transaction error and proper clean-up
errors.error()
raise HTTPError(MultiStatusResponse([errors.response()]))
-
+
# When calendar collections are single component only, default MKCALENDAR is VEVENT only
if not set_supported_component_set and config.RestrictCalendarsToOneComponentType:
yield self.setSupportedComponents(("VEVENT",))
-
returnValue(responsecode.CREATED)
Modified: CalendarServer/trunk/twistedcaldav/method/report.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/report.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/report.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -7,10 +7,10 @@
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
-#
+#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
-#
+#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
@@ -83,6 +83,7 @@
else:
request.submethod = name
+
def to_method(namespace, name):
if namespace:
s = "_".join((namespace, name))
@@ -102,7 +103,7 @@
try:
method = getattr(self, method_name)
-
+
# Also double-check via supported-reports property
reports = self.supportedReports()
test = lookupElement((namespace, name))
Modified: CalendarServer/trunk/twistedcaldav/method/report_addressbook_query.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/report_addressbook_query.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/report_addressbook_query.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -63,11 +63,11 @@
xmlfilter = addressbook_query.filter
filter = addressbookqueryfilter.Filter(xmlfilter)
- query = addressbook_query.props
+ query = addressbook_query.props
limit = addressbook_query.limit
assert query is not None
-
+
if query.qname() == ("DAV:", "allprop"):
propertiesForResource = report_common.allPropertiesForResource
generate_address_data = False
@@ -78,7 +78,7 @@
elif query.qname() == ("DAV:", "prop"):
propertiesForResource = report_common.propertyListForResource
-
+
# Verify that any address-data element matches what we can handle
result, message, generate_address_data = report_common.validPropertyListAddressDataTypeVersion(query)
if not result:
@@ -88,7 +88,7 @@
(carddav_namespace, "supported-address-data"),
"Invalid address-data",
))
-
+
else:
raise AssertionError("We shouldn't be here")
@@ -101,15 +101,16 @@
"Invalid filter element",
))
- matchcount = [0,]
- max_number_of_results = [config.MaxQueryWithDataResults if generate_address_data else None,]
- limited = [False,]
-
+ matchcount = [0, ]
+ max_number_of_results = [config.MaxQueryWithDataResults if generate_address_data else None, ]
+ limited = [False, ]
+
if limit:
clientLimit = int(str(limit.childOfType(NResults)))
if max_number_of_results[0] is None or clientLimit < max_number_of_results[0]:
max_number_of_results[0] = clientLimit
+
@inlineCallbacks
def doQuery(addrresource, uri):
"""
@@ -118,15 +119,15 @@
@param addrresource: the L{CalDAVResource} for an address book collection.
@param uri: the uri for the address book collecton resource.
"""
-
+
def checkMaxResults():
matchcount[0] += 1
if max_number_of_results[0] is not None and matchcount[0] > max_number_of_results[0]:
raise NumberOfMatchesWithinLimits(max_number_of_results[0])
-
-
+
+
@inlineCallbacks
- def queryAddressBookObjectResource(resource, uri, name, vcard, query_ok = False):
+ def queryAddressBookObjectResource(resource, uri, name, vcard, query_ok=False):
"""
Run a query on the specified vcard.
@param resource: the L{CalDAVResource} for the vcard.
@@ -134,7 +135,7 @@
@param name: the name of the resource.
@param vcard: the L{Component} vcard read from the resource.
"""
-
+
if query_ok or filter.match(vcard):
# Check size of results is within limit
checkMaxResults()
@@ -143,7 +144,7 @@
href = davxml.HRef.fromString(joinURL(uri, name))
else:
href = davxml.HRef.fromString(uri)
-
+
try:
yield report_common.responseForHref(request, responses, href, resource, propertiesForResource, query, vcard=vcard)
except ConcurrentModification:
@@ -153,21 +154,21 @@
# case, we ignore the now missing resource rather
# than raise an error for the entire report.
log.err("Missing resource during sync: %s" % (href,))
-
-
+
+
@inlineCallbacks
def queryDirectoryBackedAddressBook(directoryBackedAddressBook, addressBookFilter):
"""
"""
- records, limited[0] = (yield directoryBackedAddressBook.directory.vCardRecordsForAddressBookQuery( addressBookFilter, query, max_number_of_results[0] ))
+ records, limited[0] = (yield directoryBackedAddressBook.directory.vCardRecordsForAddressBookQuery(addressBookFilter, query, max_number_of_results[0]))
for vCardRecord in records:
-
+
# match against original filter
if filter.match((yield vCardRecord.vCard())):
-
+
# Check size of results is within limit
checkMaxResults()
-
+
try:
yield report_common.responseForHref(request, responses, vCardRecord.hRef(), vCardRecord, propertiesForResource, query, vcard=(yield vCardRecord.vCard()))
except ConcurrentModification:
@@ -177,70 +178,66 @@
# case, we ignore the now missing resource rather
# than raise an error for the entire report.
log.err("Missing resource during sync: %s" % (vCardRecord.hRef(),))
-
-
+
directoryAddressBookLock = None
- try:
+ try:
if addrresource.isDirectoryBackedAddressBookCollection() and addrresource.directory.cacheQuery:
-
+
directory = addrresource.directory
if directory.liveQuery:
# if liveQuery and cacheQuery, get vCards into the directory address book on disk
- directoryAddressBookLock, limited[0] = (yield directory.cacheVCardsForAddressBookQuery( filter, query, max_number_of_results[0] ) )
-
+ directoryAddressBookLock, limited[0] = (yield directory.cacheVCardsForAddressBookQuery(filter, query, max_number_of_results[0]))
+
elif directory.maxDSQueryRecords and directory.maxDSQueryRecords < max_number_of_results[0]:
max_number_of_results[0] = directory.maxDSQueryRecords
-
-
+
elif not addrresource.isAddressBookCollection():
-
+
#do UID lookup on last part of uri
resource_name = urllib.unquote(uri[uri.rfind("/") + 1:])
if resource_name.endswith(".vcf") and len(resource_name) > 4:
-
+
# see if parent is directory backed address book
- parent = (yield addrresource.locateParent( request, uri ) )
-
+ parent = (yield addrresource.locateParent(request, uri))
+
if parent.isDirectoryBackedAddressBookCollection() and parent.directory.cacheQuery:
-
+
directory = parent.directory
if directory.liveQuery:
- vCardFilter = carddavxml.Filter( *[carddavxml.PropertyFilter(
- carddavxml.TextMatch.fromString(resource_name[:-4]),
+ vCardFilter = carddavxml.Filter(*[carddavxml.PropertyFilter(
+ carddavxml.TextMatch.fromString(resource_name[:-4]),
name="UID", # attributes
- ),] )
+ ), ])
vCardFilter = addressbookqueryfilter.Filter(vCardFilter)
-
- directoryAddressBookLock, limited[0] = (yield directory.cacheVCardsForAddressBookQuery( vCardFilter, query, max_number_of_results[0] ) )
+ directoryAddressBookLock, limited[0] = (yield directory.cacheVCardsForAddressBookQuery(vCardFilter, query, max_number_of_results[0]))
+
elif directory.maxDSQueryRecords and directory.maxDSQueryRecords < max_number_of_results[0]:
max_number_of_results[0] = directory.maxDSQueryRecords
-
-
-
+
# Check whether supplied resource is an address book or an address book object resource
if addrresource.isAddressBookCollection():
-
+
if addrresource.isDirectoryBackedAddressBookCollection() and addrresource.directory.liveQuery and not addrresource.directory.cacheQuery:
- yield maybeDeferred( queryDirectoryBackedAddressBook, addrresource, filter )
-
+ yield maybeDeferred(queryDirectoryBackedAddressBook, addrresource, filter)
+
else:
# Do some optimisation of access control calculation by determining any inherited ACLs outside of
# the child resource loop and supply those to the checkPrivileges on each child.
filteredaces = (yield addrresource.inheritedACEsforChildren(request))
-
+
# Check for disabled access
if filteredaces is not None:
# See whether the filter is valid for an index only query
index_query_ok = addrresource.index().searchValid(filter)
-
+
# Get list of children that match the search and have read access
names = [name for name, ignore_uid in (yield addrresource.index().search(filter))] #@UnusedVariable
if not names:
return
-
+
# Now determine which valid resources are readable and which are not
ok_resources = []
yield addrresource.findChildrenFaster(
@@ -255,47 +252,46 @@
)
for child, child_uri in ok_resources:
child_uri_name = child_uri[child_uri.rfind("/") + 1:]
-
+
if generate_address_data or not index_query_ok:
vcard = yield child.vCard()
assert vcard is not None, "vCard %s is missing from address book collection %r" % (child_uri_name, self)
else:
vcard = None
-
- yield queryAddressBookObjectResource(child, uri, child_uri_name, vcard, query_ok = index_query_ok)
-
+
+ yield queryAddressBookObjectResource(child, uri, child_uri_name, vcard, query_ok=index_query_ok)
+
else:
-
- handled = False;
+
+ handled = False
resource_name = urllib.unquote(uri[uri.rfind("/") + 1:])
if resource_name.endswith(".vcf") and len(resource_name) > 4:
-
+
# see if parent is directory backed address book
- parent = (yield addrresource.locateParent( request, uri ) )
-
+ parent = (yield addrresource.locateParent(request, uri))
+
if parent.isDirectoryBackedAddressBookCollection() and parent.directory.liveQuery and not parent.directory.cacheQuery:
-
- vCardFilter = carddavxml.Filter( *[carddavxml.PropertyFilter(
- carddavxml.TextMatch.fromString(resource_name[:-4]),
+
+ vCardFilter = carddavxml.Filter(*[carddavxml.PropertyFilter(
+ carddavxml.TextMatch.fromString(resource_name[:-4]),
name="UID", # attributes
- ),] )
+ ), ])
vCardFilter = addressbookqueryfilter.Filter(vCardFilter)
-
- yield maybeDeferred( queryDirectoryBackedAddressBook, parent, vCardFilter )
+
+ yield maybeDeferred(queryDirectoryBackedAddressBook, parent, vCardFilter)
handled = True
if not handled:
vcard = yield addrresource.vCard()
yield queryAddressBookObjectResource(addrresource, uri, None, vcard)
-
+
if limited[0]:
raise NumberOfMatchesWithinLimits(matchcount[0])
finally:
if directoryAddressBookLock:
yield directoryAddressBookLock.release()
-
-
+
# Run report taking depth into account
try:
depth = request.headers.getHeader("depth", "0")
@@ -309,7 +305,7 @@
#davxml.ResponseDescription("Results limited by %s at %d" % resultsWereLimited),
davxml.ResponseDescription("Results limited to %d items" % e.maxLimit()),
))
-
+
if not hasattr(request, "extendedLogItems"):
request.extendedLogItems = {}
request.extendedLogItems["responses"] = len(responses)
Modified: CalendarServer/trunk/twistedcaldav/method/report_calendar_query.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/report_calendar_query.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/report_calendar_query.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -20,7 +20,7 @@
__all__ = ["report_urn_ietf_params_xml_ns_caldav_calendar_query"]
-from twisted.internet.defer import inlineCallbacks, returnValue,\
+from twisted.internet.defer import inlineCallbacks, returnValue, \
maybeDeferred
from twext.python.log import Logger
@@ -34,7 +34,7 @@
from twistedcaldav import caldavxml
from twistedcaldav.caldavxml import caldav_namespace, MaxInstances
from twistedcaldav.config import config
-from txdav.common.icommondatastore import IndexedSearchException,\
+from txdav.common.icommondatastore import IndexedSearchException, \
ConcurrentModification
from twistedcaldav.instance import TooManyInstancesError
from twistedcaldav.method import report_common
@@ -66,10 +66,10 @@
xmlfilter = calendar_query.filter
filter = calendarqueryfilter.Filter(xmlfilter)
- props = calendar_query.props
+ props = calendar_query.props
assert props is not None
-
+
# Get the original timezone provided in the query, if any, and validate it now
query_timezone = None
query_tz = calendar_query.timezone
@@ -95,7 +95,7 @@
elif props.qname() == ("DAV:", "prop"):
propertiesForResource = report_common.propertyListForResource
-
+
# Verify that any calendar-data element matches what we can handle
result, message, generate_calendar_data = report_common.validPropertyListCalendarDataTypeVersion(props)
if not result:
@@ -105,7 +105,7 @@
(caldav_namespace, "supported-calendar-data"),
"Invalid calendar-data",
))
-
+
else:
raise AssertionError("We shouldn't be here")
@@ -119,8 +119,8 @@
))
matchcount = [0]
- max_number_of_results = [config.MaxQueryWithDataResults if generate_calendar_data else None,]
-
+ max_number_of_results = [config.MaxQueryWithDataResults if generate_calendar_data else None, ]
+
@inlineCallbacks
def doQuery(calresource, uri):
"""
@@ -129,7 +129,7 @@
@param calresource: the L{CalDAVResource} for a calendar collection.
@param uri: the uri for the calendar collection resource.
"""
-
+
@inlineCallbacks
def queryCalendarObjectResource(resource, uri, name, calendar, timezone, query_ok=False, isowner=True):
"""
@@ -139,7 +139,7 @@
@param name: the name of the resource.
@param calendar: the L{Component} calendar read from the resource.
"""
-
+
# Handle private events access restrictions
if not isowner:
access = resource.accessMode
@@ -156,7 +156,7 @@
href = davxml.HRef.fromString(joinURL(uri, name))
else:
href = davxml.HRef.fromString(uri)
-
+
try:
yield report_common.responseForHref(request, responses, href, resource, propertiesForResource, props, isowner, calendar=calendar, timezone=timezone)
except ConcurrentModification:
@@ -166,7 +166,7 @@
# case, we ignore the now missing resource rather
# than raise an error for the entire report.
log.err("Missing resource during query: %s" % (href,))
-
+
# Check whether supplied resource is a calendar or a calendar object resource
if calresource.isPseudoCalendarCollection():
# Get the timezone property from the collection if one was not set in the query,
@@ -199,7 +199,7 @@
if not names:
returnValue(True)
-
+
# Now determine which valid resources are readable and which are not
ok_resources = []
yield calresource.findChildrenFaster(
@@ -212,17 +212,17 @@
(davxml.Read(),),
inherited_aces=filteredaces
)
-
+
for child, child_uri in ok_resources:
child_uri_name = child_uri[child_uri.rfind("/") + 1:]
-
+
if generate_calendar_data or not index_query_ok:
calendar = (yield child.iCalendarForUser(request))
assert calendar is not None, "Calendar %s is missing from calendar collection %r" % (child_uri_name, self)
else:
calendar = None
-
- yield queryCalendarObjectResource(child, uri, child_uri_name, calendar, timezone, query_ok = index_query_ok, isowner=isowner)
+
+ yield queryCalendarObjectResource(child, uri, child_uri_name, calendar, timezone, query_ok=index_query_ok, isowner=isowner)
else:
# Get the timezone property from the collection if one was not set in the query,
# and store in the query object for later use
@@ -276,7 +276,7 @@
caldavxml.MaxDateTime(),
"Time-range value too far in the future. Must be on or before %s." % (str(e.limit),)
))
-
+
if not hasattr(request, "extendedLogItems"):
request.extendedLogItems = {}
request.extendedLogItems["responses"] = len(responses)
Modified: CalendarServer/trunk/twistedcaldav/method/report_freebusy.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/report_freebusy.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/report_freebusy.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -57,9 +57,9 @@
# First list is BUSY, second BUSY-TENTATIVE, third BUSY-UNAVAILABLE
fbinfo = ([], [], [])
-
+
matchcount = [0]
-
+
def generateFreeBusyInfo(calresource, uri): #@UnusedVariable
"""
Run a free busy report on the specified calendar collection
@@ -67,7 +67,7 @@
@param calresource: the L{CalDAVResource} for a calendar collection.
@param uri: the uri for the calendar collecton resource.
"""
-
+
def _gotResult(result):
matchcount[0] = result
return True
@@ -99,10 +99,10 @@
caldavxml.MaxDateTime(),
"Time-range value too far in the future. Must be on or before %s." % (str(e.limit),)
))
-
+
# Now build a new calendar object with the free busy info we have
fbcalendar = report_common.buildFreeBusyResult(fbinfo, timerange)
-
+
response = Response()
response.stream = MemoryStream(str(fbcalendar))
response.headers.setHeader("content-type", MimeType.fromString("text/calendar; charset=utf-8"))
Modified: CalendarServer/trunk/twistedcaldav/method/report_multiget_common.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/method/report_multiget_common.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/method/report_multiget_common.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,7 +38,7 @@
from twistedcaldav.config import config
from twistedcaldav.method import report_common
from txdav.common.icommondatastore import ConcurrentModification
-from twistedcaldav.method.report_common import COLLECTION_TYPE_CALENDAR,\
+from twistedcaldav.method.report_common import COLLECTION_TYPE_CALENDAR, \
COLLECTION_TYPE_ADDRESSBOOK
from twistedcaldav.query import addressbookqueryfilter
@@ -53,7 +53,7 @@
# Make sure target resource is of the right type
if not self.isCollection():
parent = (yield self.locateParent(request, request.uri))
-
+
if collection_type == COLLECTION_TYPE_CALENDAR:
if not parent.isPseudoCalendarCollection():
log.err("calendar-multiget report is not allowed on a resource outside of a calendar collection %s" % (self,))
@@ -66,12 +66,12 @@
responses = []
propertyreq = multiget.property
- resources = multiget.resources
+ resources = multiget.resources
if not hasattr(request, "extendedLogItems"):
request.extendedLogItems = {}
request.extendedLogItems["rcount"] = len(resources)
-
+
hasData = False
if propertyreq.qname() == ("DAV:", "allprop"):
propertiesForResource = report_common.allPropertiesForResource
@@ -81,7 +81,7 @@
elif propertyreq.qname() == ("DAV:", "prop"):
propertiesForResource = report_common.propertyListForResource
-
+
if collection_type == COLLECTION_TYPE_CALENDAR:
# Verify that any calendar-data element matches what we can handle
result, message, hasData = report_common.validPropertyListCalendarDataTypeVersion(propertyreq)
@@ -113,14 +113,14 @@
"""
Three possibilities exist:
-
+
1. The request-uri is a calendar collection, in which case all the hrefs
MUST be one-level below that collection and must be calendar object resources.
-
+
2. The request-uri is a regular collection, in which case all the hrefs
MUST be children of that (at any depth) but MUST also be calendar object
resources (i.e. immediate child of a calendar collection).
-
+
3. The request-uri is a resource, in which case there MUST be
a single href equal to the request-uri, and MUST be a calendar
object resource.
@@ -133,11 +133,11 @@
# Do some optimisation of access control calculation by determining any inherited ACLs outside of
# the child resource loop and supply those to the checkPrivileges on each child.
filteredaces = (yield self.inheritedACEsforChildren(request))
-
+
# Check for disabled access
if filteredaces is None:
disabled = True
-
+
# Check private events access status
isowner = (yield self.isOwner(request))
@@ -147,7 +147,7 @@
# Do some optimisation of access control calculation by determining any inherited ACLs outside of
# the child resource loop and supply those to the checkPrivileges on each child.
filteredaces = (yield self.inheritedACEsforChildren(request))
-
+
# Check for disabled access
if filteredaces is None:
disabled = True
@@ -167,7 +167,7 @@
@inlineCallbacks
def doResponse():
-
+
# Special for addressbooks
if collection_type == COLLECTION_TYPE_ADDRESSBOOK:
if self.isDirectoryBackedAddressBookCollection() and self.directory.liveQuery:
@@ -185,7 +185,7 @@
valid_names.append(name)
if not valid_names:
returnValue(None)
-
+
# Now determine which valid resources are readable and which are not
ok_resources = []
bad_resources = []
@@ -234,12 +234,12 @@
# Indicate error for all missing resources
for href in missing_resources:
responses.append(davxml.StatusResponse(davxml.HRef.fromString(href), davxml.Status.fromResponseCode(responsecode.NOT_FOUND)))
-
+
@inlineCallbacks
def doDirectoryAddressBookResponse():
-
+
directoryAddressBookLock = None
- try:
+ try:
# Verify that requested resources are immediate children of the request-URI
# and get vCardFilters ;similar to "normal" case below but do not call getChild()
vCardFilters = []
@@ -250,22 +250,22 @@
if self._isChildURI(request, resource_uri) and resource_name.endswith(".vcf") and len(resource_name) > 4:
valid_hrefs.append(href)
vCardFilters.append(carddavxml.PropertyFilter(
- carddavxml.TextMatch.fromString(resource_name[:-4]),
+ carddavxml.TextMatch.fromString(resource_name[:-4]),
name="UID", # attributes
))
elif not self.directory.cacheQuery:
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.NOT_FOUND)))
-
- # exit if not valid
+
+ # exit if not valid
if not vCardFilters or not valid_hrefs:
- returnValue( None )
-
- addressBookFilter = carddavxml.Filter( *vCardFilters )
+ returnValue(None)
+
+ addressBookFilter = carddavxml.Filter(*vCardFilters)
addressBookFilter = addressbookqueryfilter.Filter(addressBookFilter)
if self.directory.cacheQuery:
# add vcards to directory address book and run "normal case" below
limit = config.DirectoryAddressBook.MaxQueryResults
- directoryAddressBookLock, limited = (yield self.directory.cacheVCardsForAddressBookQuery(addressBookFilter, propertyreq, limit) )
+ directoryAddressBookLock, limited = (yield self.directory.cacheVCardsForAddressBookQuery(addressBookFilter, propertyreq, limit))
if limited:
log.err("Too many results in multiget report: %d" % len(resources))
raise HTTPError(ErrorResponse(
@@ -276,7 +276,7 @@
else:
#get vCards and filter
limit = config.DirectoryAddressBook.MaxQueryResults
- vCardRecords, limited = (yield self.directory.vCardRecordsForAddressBookQuery( addressBookFilter, propertyreq, limit ))
+ vCardRecords, limited = (yield self.directory.vCardRecordsForAddressBookQuery(addressBookFilter, propertyreq, limit))
if limited:
log.err("Too many results in multiget report: %d" % len(resources))
raise HTTPError(ErrorResponse(
@@ -284,13 +284,13 @@
(dav_namespace, "number-of-matches-within-limits"),
"Too many results",
))
-
+
for href in valid_hrefs:
matchingRecord = None
for vCardRecord in vCardRecords:
if href == vCardRecord.hRef(): # might need to compare urls instead - also case sens ok?
matchingRecord = vCardRecord
- break;
+ break
if matchingRecord:
yield report_common.responseForHref(request, responses, href, matchingRecord, propertiesForResource, propertyreq, vcard=matchingRecord.vCard())
@@ -304,30 +304,30 @@
yield doResponse()
else:
for href in resources:
-
+
resource_uri = str(href)
-
+
# Do href checks
if requestURIis == "calendar":
pass
elif requestURIis == "addressbook":
pass
-
+
# TODO: we can optimize this one in a similar manner to the calendar case
elif requestURIis == "collection":
name = unquote(resource_uri[resource_uri.rfind("/") + 1:])
if not self._isChildURI(request, resource_uri, False):
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.NOT_FOUND)))
continue
-
+
child = (yield request.locateResource(resource_uri))
-
+
if not child or not child.exists():
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.NOT_FOUND)))
continue
-
+
parent = (yield child.locateParent(request, resource_uri))
-
+
if collection_type == COLLECTION_TYPE_CALENDAR:
if not parent.isCalendarCollection() or not (yield parent.index().resourceExists(name)):
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.FORBIDDEN)))
@@ -336,18 +336,18 @@
if not parent.isAddressBookCollection() or not (yield parent.index().resourceExists(name)):
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.FORBIDDEN)))
continue
-
+
# Check privileges on parent - must have at least DAV:read
try:
yield parent.checkPrivileges(request, (davxml.Read(),))
except AccessDeniedError:
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.FORBIDDEN)))
continue
-
+
# Cache the last parent's inherited aces for checkPrivileges optimization
if lastParent != parent:
lastParent = parent
-
+
# Do some optimisation of access control calculation by determining any inherited ACLs outside of
# the child resource loop and supply those to the checkPrivileges on each child.
filteredaces = (yield parent.inheritedACEsforChildren(request))
@@ -359,9 +359,9 @@
if (resource_uri != request.uri) or not self.exists():
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.NOT_FOUND)))
continue
-
+
parent = (yield self.locateParent(request, resource_uri))
-
+
if collection_type == COLLECTION_TYPE_CALENDAR:
if not parent.isPseudoCalendarCollection() or not (yield parent.index().resourceExists(name)):
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.FORBIDDEN)))
@@ -371,21 +371,21 @@
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.FORBIDDEN)))
continue
child = self
-
+
# Do some optimisation of access control calculation by determining any inherited ACLs outside of
# the child resource loop and supply those to the checkPrivileges on each child.
filteredaces = (yield parent.inheritedACEsforChildren(request))
# Check private events access status
isowner = (yield parent.isOwner(request))
-
+
# Check privileges - must have at least DAV:read
try:
yield child.checkPrivileges(request, (davxml.Read(),), inherited_aces=filteredaces)
except AccessDeniedError:
responses.append(davxml.StatusResponse(href, davxml.Status.fromResponseCode(responsecode.FORBIDDEN)))
continue
-
+
yield report_common.responseForHref(request, responses, href, child, propertiesForResource, propertyreq, isowner=isowner)
returnValue(MultiStatusResponse(responses))
Modified: CalendarServer/trunk/twistedcaldav/query/addressbookquery.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/addressbookquery.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/addressbookquery.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,34 +38,38 @@
@return: a L{baseExpression} for the expression tree.
"""
# Lets assume we have a valid filter from the outset.
-
+
# Top-level filter contains zero or more prop-filter element
if len(filter.children) > 0:
return propfilterListExpression(filter.children, fields)
else:
return expression.allExpression()
+
+
def propfilterListExpression(propfilters, fields):
"""
Create an expression for a list of prop-filter elements.
-
+
@param propfilters: the C{list} of L{ComponentFilter} elements.
@return: a L{baseExpression} for the expression tree.
"""
-
+
if len(propfilters) == 1:
return propfilterExpression(propfilters[0], fields)
else:
return expression.orExpression([propfilterExpression(c, fields) for c in propfilters])
+
+
def propfilterExpression(propfilter, fields):
"""
Create an expression for a single prop-filter element.
-
+
@param propfilter: the L{PropertyFilter} element.
@return: a L{baseExpression} for the expression tree.
"""
-
+
# Only handle UID right now
if propfilter.filter_name != "UID":
raise ValueError
@@ -74,7 +78,7 @@
if not propfilter.defined:
# Test for <<field>> != "*"
return expression.isExpression(fields["UID"], "", True)
-
+
# Handle embedded parameters/text-match
params = []
for filter in propfilter.filters:
@@ -103,6 +107,8 @@
else:
return None
+
+
def sqladdressbookquery(filter, addressbookid=None, generator=sqlgenerator.sqlgenerator):
"""
Convert the supplied addressbook-query into a partial SQL statement.
Modified: CalendarServer/trunk/twistedcaldav/query/addressbookqueryfilter.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/addressbookqueryfilter.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/addressbookqueryfilter.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -37,12 +37,16 @@
def __init__(self, xml_element):
self.xmlelement = xml_element
+
def match(self, item, access=None):
raise NotImplementedError
+
def valid(self, level=0):
raise NotImplementedError
+
+
class Filter(FilterBase):
"""
Determines which matching components are returned.
@@ -55,17 +59,18 @@
filter_test = xml_element.attributes.get("test", "anyof")
if filter_test not in ("anyof", "allof"):
raise ValueError("Test must be only one of anyof, allof")
-
+
self.filter_test = filter_test
self.children = [PropertyFilter(child) for child in xml_element.children]
+
def match(self, vcard):
"""
Returns True if the given address property matches this filter, False
otherwise. Empty element means always match.
"""
-
+
if len(self.children) > 0:
allof = self.filter_test == "allof"
for propfilter in self.children:
@@ -75,14 +80,15 @@
else:
return True
+
def valid(self):
"""
Indicate whether this filter element's structure is valid wrt vCard
data object model.
-
+
@return: True if valid, False otherwise
"""
-
+
# Test each property
for propfilter in self.children:
if not propfilter.valid():
@@ -90,6 +96,8 @@
else:
return True
+
+
class FilterChildBase(FilterBase):
"""
CardDAV filter element.
@@ -104,7 +112,7 @@
for child in xml_element.children:
qname = child.qname()
-
+
if qname in (
(carddav_namespace, "is-not-defined"),
):
@@ -122,7 +130,7 @@
if qualifier and isinstance(qualifier, IsNotDefined) and (len(filters) != 0):
raise ValueError("No other tests allowed when CardDAV:is-not-defined is present")
-
+
if xml_element.qname() == (carddav_namespace, "prop-filter"):
propfilter_test = xml_element.attributes.get("test", "anyof")
if propfilter_test not in ("anyof", "allof"):
@@ -138,17 +146,20 @@
self.filter_name = self.filter_name.encode("utf-8")
self.defined = not self.qualifier or not isinstance(qualifier, IsNotDefined)
+
def match(self, item):
"""
Returns True if the given address book item (either a property or parameter value)
matches this filter, False otherwise.
"""
-
+
# Always return True for the is-not-defined case as the result of this will
# be negated by the caller
- if not self.defined: return True
+ if not self.defined:
+ return True
- if self.qualifier and not self.qualifier.match(item): return False
+ if self.qualifier and not self.qualifier.match(item):
+ return False
if len(self.filters) > 0:
allof = self.propfilter_test == "allof"
@@ -159,6 +170,8 @@
else:
return True
+
+
class PropertyFilter (FilterChildBase):
"""
Limits a search to specific properties.
@@ -167,22 +180,26 @@
def _match(self, vcard):
# At least one property must match (or is-not-defined is set)
for property in vcard.properties():
- if property.name().upper() == self.filter_name.upper() and self.match(property): break
+ if property.name().upper() == self.filter_name.upper() and self.match(property):
+ break
else:
return not self.defined
return self.defined
+
def valid(self):
"""
Indicate whether this filter element's structure is valid wrt vCard
data object model.
-
+
@return: True if valid, False otherwise
"""
-
+
# No tests
return True
+
+
class ParameterFilter (FilterChildBase):
"""
Limits a search to specific parameters.
@@ -199,6 +216,8 @@
return result
+
+
class IsNotDefined (FilterBase):
"""
Specifies that the named iCalendar item does not exist.
@@ -211,6 +230,8 @@
# is-not-defined option.
return True
+
+
class TextMatch (FilterBase):
"""
Specifies a substring match on a property or parameter value.
@@ -246,13 +267,15 @@
else:
self.match_type = "contains"
+
def _match(self, item):
"""
Match the text for the item.
If the item is a property, then match the property value,
otherwise it may be a list of parameter values - try to match anyone of those
"""
- if item is None: return False
+ if item is None:
+ return False
if isinstance(item, Property):
values = [item.strvalue()]
@@ -261,14 +284,15 @@
test = unicode(self.text, "utf-8").lower()
+
def _textCompare(s):
# Currently ignores the collation and does caseless matching
s = s.lower()
-
+
if self.match_type == "equals":
return s == test
elif self.match_type == "contains":
- return s.find(test) != -1
+ return s.find(test) != -1
elif self.match_type == "starts-with":
return s.startswith(test)
elif self.match_type == "ends-with":
@@ -286,5 +310,5 @@
else:
if _textCompare(unicode(value, "utf-8")):
return not self.negate
-
+
return self.negate
Modified: CalendarServer/trunk/twistedcaldav/query/calendarquery.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/calendarquery.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/calendarquery.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -38,67 +38,71 @@
@param filter: the L{Filter} for the calendar-query to convert.
@return: a L{baseExpression} for the expression tree.
"""
-
+
# Lets assume we have a valid filter from the outset.
-
+
# Top-level filter contains exactly one comp-filter element
assert filter.child is not None
vcalfilter = filter.child
assert isinstance(vcalfilter, calendarqueryfilter.ComponentFilter)
assert vcalfilter.filter_name == "VCALENDAR"
-
+
if len(vcalfilter.filters) > 0:
# Determine logical expression grouping
logical = expression.andExpression if vcalfilter.filter_test == "allof" else expression.orExpression
-
+
# Only comp-filters are handled
for _ignore in [x for x in vcalfilter.filters if not isinstance(x, calendarqueryfilter.ComponentFilter)]:
raise ValueError
-
+
return compfilterListExpression(vcalfilter.filters, fields, logical)
else:
return expression.allExpression()
+
+
def compfilterListExpression(compfilters, fields, logical):
"""
Create an expression for a list of comp-filter elements.
-
+
@param compfilters: the C{list} of L{ComponentFilter} elements.
@return: a L{baseExpression} for the expression tree.
"""
-
+
if len(compfilters) == 1:
return compfilterExpression(compfilters[0], fields)
else:
return logical([compfilterExpression(c, fields) for c in compfilters])
+
+
def compfilterExpression(compfilter, fields):
"""
Create an expression for a single comp-filter element.
-
+
@param compfilter: the L{ComponentFilter} element.
@return: a L{baseExpression} for the expression tree.
"""
-
+
# Handle is-not-defined case
if not compfilter.defined:
# Test for TYPE != <<component-type name>>
return expression.isnotExpression(fields["TYPE"], compfilter.filter_name, True)
-
+
# Determine logical expression grouping
logical = expression.andExpression if compfilter.filter_test == "allof" else expression.orExpression
-
+
expressions = []
if isinstance(compfilter.filter_name, str):
expressions.append(expression.isExpression(fields["TYPE"], compfilter.filter_name, True))
else:
expressions.append(expression.inExpression(fields["TYPE"], compfilter.filter_name, True))
-
- # Handle time-range
+
+ # Handle time-range
if compfilter.qualifier and isinstance(compfilter.qualifier, calendarqueryfilter.TimeRange):
start, end, startfloat, endfloat = getTimerangeArguments(compfilter.qualifier)
expressions.append(expression.timerangeExpression(start, end, startfloat, endfloat))
-
+
# Handle properties - we can only do UID right now
props = []
for p in [x for x in compfilter.filters if isinstance(x, calendarqueryfilter.PropertyFilter)]:
@@ -109,7 +113,7 @@
propsExpression = props[0]
else:
propsExpression = None
-
+
# Handle embedded components - we do not right now as our Index does not handle them
comps = []
for _ignore in [x for x in compfilter.filters if isinstance(x, calendarqueryfilter.ComponentFilter)]:
@@ -132,14 +136,16 @@
# Now build return expression
return expression.andExpression(expressions)
+
+
def propfilterExpression(propfilter, fields):
"""
Create an expression for a single prop-filter element.
-
+
@param propfilter: the L{PropertyFilter} element.
@return: a L{baseExpression} for the expression tree.
"""
-
+
# Only handle UID right now
if propfilter.filter_name != "UID":
raise ValueError
@@ -148,14 +154,14 @@
if not propfilter.defined:
# Test for <<field>> != "*"
return expression.isExpression(fields["UID"], "", True)
-
+
# Determine logical expression grouping
logical = expression.andExpression if propfilter.filter_test == "allof" else expression.orExpression
-
+
# Handle time-range - we cannot do this with our Index right now
if propfilter.qualifier and isinstance(propfilter.qualifier, calendarqueryfilter.TimeRange):
raise ValueError
-
+
# Handle text-match
tm = None
if propfilter.qualifier and isinstance(propfilter.qualifier, calendarqueryfilter.TextMatch):
@@ -168,7 +174,7 @@
elif propfilter.qualifier.match_type == "ends-with":
tm = expression.notendswithExpression if propfilter.qualifier.negate else expression.endswithExpression
tm = tm(fields[propfilter.filter_name], propfilter.qualifier.text, propfilter.qualifier.caseless)
-
+
# Handle embedded parameters - we do not right now as our Index does not handle them
params = []
for _ignore in propfilter.filters:
@@ -190,19 +196,21 @@
else:
return None
+
+
def getTimerangeArguments(timerange):
"""
Get start/end and floating start/end (adjusted for timezone offset) values from the
supplied time-range test.
-
+
@param timerange: the L{TimeRange} used in the query.
@return: C{tuple} of C{str} for start, end, startfloat, endfloat
"""
-
+
# Start/end in UTC
start = timerange.start
end = timerange.end
-
+
# Get timezone
tzinfo = timerange.tzinfo
@@ -217,9 +225,11 @@
pyCalendarTodatetime(endfloat) if endfloat else None,
)
+
+
def sqlcalendarquery(filter, calendarid=None, userid=None, freebusy=False, generator=sqlgenerator.sqlgenerator):
"""
- Convert the supplied calendar-query into a oartial SQL statement.
+ Convert the supplied calendar-query into a partial SQL statement.
@param filter: the L{Filter} for the calendar-query to convert.
@return: a C{tuple} of (C{str}, C{list}), where the C{str} is the partial SQL statement,
Modified: CalendarServer/trunk/twistedcaldav/query/calendarqueryfilter.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/calendarqueryfilter.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/calendarqueryfilter.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -42,13 +42,16 @@
def __init__(self, xml_element):
self.xmlelement = xml_element
+
def match(self, item, access=None):
raise NotImplementedError
+
def valid(self, level=0):
raise NotImplementedError
+
class Filter(FilterBase):
"""
Determines which matching components are returned.
@@ -64,6 +67,7 @@
self.child = ComponentFilter(xml_element.children[0])
+
def match(self, component, access=None):
"""
Returns True if the given calendar component matches this filter, False
@@ -95,6 +99,7 @@
# <filter> contains exactly one <comp-filter>
return self.child.match(component, access)
+
def valid(self):
"""
Indicate whether this filter element's structure is valid wrt iCalendar
@@ -105,7 +110,8 @@
# Must have one child element for VCALENDAR
return self.child.valid(0)
-
+
+
def settimezone(self, tzelement):
"""
Set the default timezone to use with this query.
@@ -119,6 +125,7 @@
self.child.settzinfo(tz)
return tz
+
def getmaxtimerange(self):
"""
Get the date farthest into the future in any time-range elements
@@ -126,6 +133,7 @@
return self.child.getmaxtimerange(None, False)
+
def getmintimerange(self):
"""
Get the date farthest into the past in any time-range elements. That is either
@@ -135,6 +143,7 @@
return self.child.getmintimerange(None, False)
+
class FilterChildBase(FilterBase):
"""
CalDAV filter element.
@@ -189,6 +198,7 @@
raise ValueError("Test must be only one of anyof, allof")
self.filter_test = filter_test
+
def match(self, item, access=None):
"""
Returns True if the given calendar item (either a component, property or parameter value)
@@ -197,9 +207,11 @@
# Always return True for the is-not-defined case as the result of this will
# be negated by the caller
- if not self.defined: return True
+ if not self.defined:
+ return True
- if self.qualifier and not self.qualifier.match(item, access): return False
+ if self.qualifier and not self.qualifier.match(item, access):
+ return False
if len(self.filters) > 0:
allof = self.filter_test == "allof"
@@ -211,6 +223,7 @@
return True
+
class ComponentFilter (FilterChildBase):
"""
Limits a search to only the chosen component types.
@@ -226,9 +239,11 @@
# Always return True for the is-not-defined case as the result of this will
# be negated by the caller
- if not self.defined: return True
+ if not self.defined:
+ return True
- if self.qualifier and not self.qualifier.matchinstance(item, self.instances): return False
+ if self.qualifier and not self.qualifier.matchinstance(item, self.instances):
+ return False
if len(self.filters) > 0:
allof = self.filter_test == "allof"
@@ -239,6 +254,7 @@
else:
return True
+
def _match(self, component, access):
# At least one subcomponent must match (or is-not-defined is set)
for subcomponent in component.subcomponents():
@@ -249,14 +265,18 @@
# Try to match the component name
if isinstance(self.filter_name, str):
- if subcomponent.name() != self.filter_name: continue
+ if subcomponent.name() != self.filter_name:
+ continue
else:
- if subcomponent.name() not in self.filter_name: continue
- if self.match(subcomponent, access): break
+ if subcomponent.name() not in self.filter_name:
+ continue
+ if self.match(subcomponent, access):
+ break
else:
return not self.defined
return self.defined
+
def setInstances(self, instances):
"""
Give the list of instances to each comp-filter element.
@@ -266,11 +286,12 @@
for compfilter in [x for x in self.filters if isinstance(x, ComponentFilter)]:
compfilter.setInstances(instances)
+
def valid(self, level):
"""
Indicate whether this filter element's structure is valid wrt iCalendar
data object model.
-
+
@param level: the nesting level of this filter element, 0 being the top comp-filter.
@return: True if valid, False otherwise
"""
@@ -326,6 +347,7 @@
return True
+
def settzinfo(self, tzinfo):
"""
Set the default timezone to use with this query.
@@ -340,10 +362,11 @@
for x in self.filters:
x.settzinfo(tzinfo)
+
def getmaxtimerange(self, currentMaximum, currentIsStartTime):
"""
Get the date farthest into the future in any time-range elements
-
+
@param currentMaximum: current future value to compare with
@type currentMaximum: L{PyCalendarDateTime}
"""
@@ -363,6 +386,7 @@
return currentMaximum, currentIsStartTime
+
def getmintimerange(self, currentMinimum, currentIsEndTime):
"""
Get the date farthest into the past in any time-range elements. That is either
@@ -385,6 +409,7 @@
return currentMinimum, currentIsEndTime
+
class PropertyFilter (FilterChildBase):
"""
Limits a search to specific properties.
@@ -405,16 +430,18 @@
# Apply access restrictions, if any.
if allowedProperties is not None and property.name().upper() not in allowedProperties:
continue
- if property.name().upper() == self.filter_name.upper() and self.match(property, access): break
+ if property.name().upper() == self.filter_name.upper() and self.match(property, access):
+ break
else:
return not self.defined
return self.defined
+
def valid(self):
"""
Indicate whether this filter element's structure is valid wrt iCalendar
data object model.
-
+
@return: True if valid, False otherwise
"""
@@ -434,6 +461,7 @@
# No other tests
return True
+
def settzinfo(self, tzinfo):
"""
Set the default timezone to use with this query.
@@ -444,10 +472,11 @@
if isinstance(self.qualifier, TimeRange):
self.qualifier.settzinfo(tzinfo)
+
def getmaxtimerange(self, currentMaximum, currentIsStartTime):
"""
- Get the date furthest into the future in any time-range elements
-
+ Get the date farthest into the future in any time-range elements
+
@param currentMaximum: current future value to compare with
@type currentMaximum: L{PyCalendarDateTime}
"""
@@ -463,6 +492,7 @@
return currentMaximum, currentIsStartTime
+
def getmintimerange(self, currentMinimum, currentIsEndTime):
"""
Get the date farthest into the past in any time-range elements. That is either
@@ -481,6 +511,7 @@
return currentMinimum, currentIsEndTime
+
class ParameterFilter (FilterChildBase):
"""
Limits a search to specific parameters.
@@ -498,6 +529,7 @@
return result
+
class IsNotDefined (FilterBase):
"""
Specifies that the named iCalendar item does not exist.
@@ -511,6 +543,7 @@
return True
+
class TextMatch (FilterBase):
"""
Specifies a substring match on a property or parameter value.
@@ -551,13 +584,15 @@
else:
self.match_type = "contains"
+
def match(self, item, access):
"""
Match the text for the item.
If the item is a property, then match the property value,
otherwise it may be a list of parameter values - try to match anyone of those
"""
- if item is None: return False
+ if item is None:
+ return False
if isinstance(item, Property):
values = [item.strvalue()]
@@ -597,6 +632,7 @@
return self.negate
+
class TimeRange (FilterBase):
"""
Specifies a time for testing components against.
@@ -614,6 +650,7 @@
self.end = PyCalendarDateTime.parseText(xml_element.attributes["end"]) if "end" in xml_element.attributes else None
self.tzinfo = None
+
def settzinfo(self, tzinfo):
"""
Set the default timezone to use with this query.
@@ -623,10 +660,11 @@
# Give tzinfo to any TimeRange we have
self.tzinfo = tzinfo
+
def valid(self, level=0):
"""
Indicate whether the time-range is valid (must be date-time in UTC).
-
+
@return: True if valid, False otherwise
"""
@@ -646,6 +684,7 @@
# No other tests
return True
+
def match(self, property, access=None):
"""
NB This is only called when doing a time-range match on a property.
@@ -655,6 +694,7 @@
else:
return property.containsTimeRange(self.start, self.end, self.tzinfo)
+
def matchinstance(self, component, instances):
"""
Test whether this time-range element causes a match to the specified component
Modified: CalendarServer/trunk/twistedcaldav/query/expression.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/expression.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/expression.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -47,61 +47,67 @@
"""
The base class for all types of expression.
"""
-
-
+
def __init__(self):
pass
-
+
+
def multi(self):
"""
Indicate whether this expression is composed of multiple sub-expressions.
-
+
@return: C{True} if this expressions contains multiple sub-expressions,
C{False} otherwise.
"""
-
+
return False
-
+
+
def _collapsedExpression(self):
return self
+
def andWith(self, other):
if isinstance(other, andExpression):
return andExpression((self._collapsedExpression(),) + tuple(other.expressions))
else:
return andExpression((self._collapsedExpression(), other._collapsedExpression(),))
+
def orWith(self, other):
if isinstance(other, orExpression):
return orExpression((self._collapsedExpression(),) + tuple(other.expressions))
else:
return orExpression((self._collapsedExpression(), other._collapsedExpression(),))
+
+
class allExpression(baseExpression):
"""
Match everything.
"""
-
-
+
def __init__(self):
pass
-
+
+
+
class logicExpression(baseExpression):
"""
An expression representing a logical operation (boolean).
"""
-
-
+
def __init__(self, expressions):
self.expressions = expressions
+
def __str__(self):
"""
Generate a suitable text descriptor of this expression.
-
+
@return: a C{str} of the text for this expression.
"""
-
+
result = ""
for e in self.expressions:
if len(result) != 0:
@@ -110,223 +116,278 @@
if len(result):
result = "(" + result + ")"
return result
-
+
+
def multi(self):
"""
Indicate whether this expression is composed of multiple expressions.
-
+
@return: C{True} if this expressions contains multiple sub-expressions,
C{False} otherwise.
"""
-
+
return True
+
def _collapsedExpression(self):
if self.multi() and len(self.expressions) == 1:
return self.expressions[0]._collapsedExpression()
else:
return self
+
+
class notExpression(logicExpression):
"""
Logical NOT operation.
"""
-
+
def __init__(self, expression):
super(notExpression, self).__init__([expression])
+
def operator(self):
return "NOT"
+
def __str__(self):
result = self.operator() + " " + str(self.expressions[0])
return result
-
+
+
def multi(self):
"""
Indicate whether this expression is composed of multiple expressions.
-
+
@return: C{True} if this expressions contains multiple sub-expressions,
C{False} otherwise.
"""
-
+
return False
+
+
class andExpression(logicExpression):
"""
Logical AND operation.
"""
-
+
def __init__(self, expressions):
super(andExpression, self).__init__(expressions)
+
def operator(self):
return "AND"
+
def andWith(self, other):
self.expressions = tuple(self.expressions) + (other._collapsedExpression(),)
return self
+
+
class orExpression(logicExpression):
"""
Logical OR operation.
"""
-
+
def __init__(self, expressions):
super(orExpression, self).__init__(expressions)
+
def operator(self):
return "OR"
+
def orWith(self, other):
self.expressions = tuple(self.expressions) + (other._collapsedExpression(),)
return self
+
+
class timerangeExpression(baseExpression):
"""
CalDAV time-range comparison expression.
"""
-
+
def __init__(self, start, end, startfloat, endfloat):
self.start = start
self.end = end
self.startfloat = startfloat
self.endfloat = endfloat
+
def __str__(self):
return "timerange(" + str(self.start) + ", " + str(self.end) + ")"
+
+
class textcompareExpression(baseExpression):
"""
Base class for text comparison expressions.
"""
-
+
def __init__(self, field, text, caseless):
self.field = field
self.text = text
self.caseless = caseless
+
def __str__(self):
return self.operator() + "(" + self.field + ", " + self.text + ", " + str(self.caseless) + ")"
+
+
class containsExpression(textcompareExpression):
"""
Text CONTAINS (sub-string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(containsExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "contains"
+
+
class notcontainsExpression(textcompareExpression):
"""
Text NOT CONTAINS (sub-string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(notcontainsExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "does not contain"
+
+
class isExpression(textcompareExpression):
"""
Text IS (exact string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(isExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "is"
+
+
class isnotExpression(textcompareExpression):
"""
Text IS NOT (exact string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(isnotExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "is not"
+
+
class startswithExpression(textcompareExpression):
"""
Text STARTSWITH (sub-string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(startswithExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "starts with"
+
+
class notstartswithExpression(textcompareExpression):
"""
Text NOT STARTSWITH (sub-string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(notstartswithExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "does not start with"
+
+
class endswithExpression(textcompareExpression):
"""
Text STARTSWITH (sub-string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(endswithExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "ends with"
+
+
class notendswithExpression(textcompareExpression):
"""
Text NOT STARTSWITH (sub-string match) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(notendswithExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "does not end with"
+
+
class inExpression(textcompareExpression):
"""
Text IN (exact string match to one of the supplied items) expression.
"""
-
+
def __init__(self, field, text_list, caseless):
super(inExpression, self).__init__(field, text_list, caseless)
+
def operator(self):
return "in"
+
def __str__(self):
return self.operator() + "(" + self.field + ", " + str(self.text) + ", " + str(self.caseless) + ")"
+
+
class notinExpression(textcompareExpression):
"""
Text NOT IN (exact string match to none of the supplied items) expression.
"""
-
+
def __init__(self, field, text, caseless):
super(notinExpression, self).__init__(field, text, caseless)
+
def operator(self):
return "not in"
+
def __str__(self):
return self.operator() + "(" + self.field + ", " + str(self.text) + ", " + str(self.caseless) + ")"
+
+
if __name__ == "__main__":
-
+
e1 = isExpression("type", "vevent", False)
e2 = timerangeExpression("20060101T120000Z", "20060101T130000Z", "20060101T120000Z", "20060101T130000Z")
e3 = containsExpression("summary", "help", True)
@@ -337,4 +398,3 @@
print(e6)
e7 = notinExpression("type", ("vevent", "vtodo",), False)
print(e7)
-
Modified: CalendarServer/trunk/twistedcaldav/query/sqlgenerator.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/sqlgenerator.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/sqlgenerator.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -30,41 +30,41 @@
import cStringIO as StringIO
class sqlgenerator(object):
-
- FROM =" from "
- WHERE =" where "
- RESOURCEDB = "RESOURCE"
- TIMESPANDB = "TIMESPAN"
- TRANSPARENCYDB = "TRANSPARENCY"
- PERUSERDB = "PERUSER"
- NOTOP = "NOT "
- ANDOP = " AND "
- OROP = " OR "
- CONTAINSOP = " GLOB "
- NOTCONTAINSOP = " NOT GLOB "
- ISOP = " == "
- ISNOTOP = " != "
- STARTSWITHOP = " GLOB "
- NOTSTARTSWITHOP = " NOT GLOB "
- ENDSWITHOP = " GLOB "
- NOTENDSWITHOP = " NOT GLOB "
- INOP = " IN "
- NOTINOP = " NOT IN "
- FIELDS = {
+ FROM = " from "
+ WHERE = " where "
+ RESOURCEDB = "RESOURCE"
+ TIMESPANDB = "TIMESPAN"
+ TRANSPARENCYDB = "TRANSPARENCY"
+ PERUSERDB = "PERUSER"
+ NOTOP = "NOT "
+ ANDOP = " AND "
+ OROP = " OR "
+ CONTAINSOP = " GLOB "
+ NOTCONTAINSOP = " NOT GLOB "
+ ISOP = " == "
+ ISNOTOP = " != "
+ STARTSWITHOP = " GLOB "
+ NOTSTARTSWITHOP = " NOT GLOB "
+ ENDSWITHOP = " GLOB "
+ NOTENDSWITHOP = " NOT GLOB "
+ INOP = " IN "
+ NOTINOP = " NOT IN "
+
+ FIELDS = {
"TYPE": "RESOURCE.TYPE",
- "UID": "RESOURCE.UID",
+ "UID": "RESOURCE.UID",
}
- TIMESPANTEST = "((TIMESPAN.FLOAT == 'N' AND TIMESPAN.START < %s AND TIMESPAN.END > %s) OR (TIMESPAN.FLOAT == 'Y' AND TIMESPAN.START < %s AND TIMESPAN.END > %s))"
- TIMESPANTEST_NOEND = "((TIMESPAN.FLOAT == 'N' AND TIMESPAN.END > %s) OR (TIMESPAN.FLOAT == 'Y' AND TIMESPAN.END > %s))"
+ TIMESPANTEST = "((TIMESPAN.FLOAT == 'N' AND TIMESPAN.START < %s AND TIMESPAN.END > %s) OR (TIMESPAN.FLOAT == 'Y' AND TIMESPAN.START < %s AND TIMESPAN.END > %s))"
+ TIMESPANTEST_NOEND = "((TIMESPAN.FLOAT == 'N' AND TIMESPAN.END > %s) OR (TIMESPAN.FLOAT == 'Y' AND TIMESPAN.END > %s))"
TIMESPANTEST_NOSTART = "((TIMESPAN.FLOAT == 'N' AND TIMESPAN.START < %s) OR (TIMESPAN.FLOAT == 'Y' AND TIMESPAN.START < %s))"
TIMESPANTEST_TAIL_PIECE = " AND TIMESPAN.RESOURCEID == RESOURCE.RESOURCEID"
TIMESPANTEST_JOIN_ON_PIECE = "TIMESPAN.INSTANCEID == TRANSPARENCY.INSTANCEID AND TRANSPARENCY.PERUSERID == %s"
def __init__(self, expr, calendarid, userid, freebusy=False):
"""
-
+
@param expr: the query expression object model
@type expr: L{twistedcaldav.query.calendarqueryfilter.Filter}
@param calendarid: resource ID - not used for file-based per-calendar indexes
@@ -79,28 +79,29 @@
self.userid = userid if userid else ""
self.freebusy = freebusy
self.usedtimespan = False
-
+
+
def generate(self):
"""
Generate the actual SQL 'where ...' expression from the passed in expression tree.
-
+
@return: a C{tuple} of (C{str}, C{list}), where the C{str} is the partial SQL statement,
and the C{list} is the list of argument substitutions to use with the SQL API execute method.
"""
-
+
# Init state
self.sout = StringIO.StringIO()
self.arguments = []
self.substitutions = []
self.usedtimespan = False
-
+
# Generate ' where ...' partial statement
self.generateExpression(self.expression)
-
+
# Prefix with ' from ...' partial statement
select = self.FROM + self.RESOURCEDB
if self.usedtimespan:
-
+
# Free busy needs transparency join
if self.freebusy:
self.frontArgument(self.userid)
@@ -121,21 +122,22 @@
if self.calendarid:
self.setArgument(self.calendarid)
select += ")%s" % (self.TIMESPANTEST_TAIL_PIECE,)
-
+
select = select % tuple(self.substitutions)
return select, self.arguments
-
+
+
def generateExpression(self, expr):
"""
Generate an expression and all it's subexpressions.
-
+
@param expr: the L{baseExpression} derived class to write out.
@return: C{True} if the TIMESPAN table is used, C{False} otherwise.
"""
-
+
# Generate based on each type of expression we might encounter
-
+
# ALL
if isinstance(expr, expression.allExpression):
# Wipe out the ' where ...' clause so everything is matched
@@ -143,12 +145,12 @@
self.arguments = []
self.substitutions = []
self.usedtimespan = False
-
+
# NOT
elif isinstance(expr, expression.notExpression):
self.sout.write(self.NOTOP)
self.generateSubExpression(expr.expressions[0])
-
+
# AND
elif isinstance(expr, expression.andExpression):
first = True
@@ -158,7 +160,7 @@
else:
self.sout.write(self.ANDOP)
self.generateSubExpression(e)
-
+
# OR
elif isinstance(expr, expression.orExpression):
first = True
@@ -168,7 +170,7 @@
else:
self.sout.write(self.OROP)
self.generateSubExpression(e)
-
+
# time-range
elif isinstance(expr, expression.timerangeExpression):
if expr.start and expr.end:
@@ -185,58 +187,58 @@
self.setArgument(expr.end)
self.setArgument(expr.endfloat)
test = self.TIMESPANTEST_NOSTART
-
+
self.sout.write(test)
self.usedtimespan = True
-
+
# CONTAINS
elif isinstance(expr, expression.containsExpression):
self.sout.write(expr.field)
self.sout.write(self.CONTAINSOP)
self.addArgument(self.containsArgument(expr.text))
-
+
# NOT CONTAINS
elif isinstance(expr, expression.notcontainsExpression):
self.sout.write(expr.field)
self.sout.write(self.NOTCONTAINSOP)
self.addArgument(self.containsArgument(expr.text))
-
+
# IS
elif isinstance(expr, expression.isExpression):
self.sout.write(expr.field)
self.sout.write(self.ISOP)
self.addArgument(expr.text)
-
+
# IS NOT
elif isinstance(expr, expression.isnotExpression):
self.sout.write(expr.field)
self.sout.write(self.ISNOTOP)
self.addArgument(expr.text)
-
+
# STARTSWITH
elif isinstance(expr, expression.startswithExpression):
self.sout.write(expr.field)
self.sout.write(self.STARTSWITHOP)
self.addArgument(self.startswithArgument(expr.text))
-
+
# NOT STARTSWITH
elif isinstance(expr, expression.notstartswithExpression):
self.sout.write(expr.field)
self.sout.write(self.NOTSTARTSWITHOP)
self.addArgument(self.startswithArgument(expr.text))
-
+
# ENDSWITH
elif isinstance(expr, expression.endswithExpression):
self.sout.write(expr.field)
self.sout.write(self.ENDSWITHOP)
self.addArgument(self.endswithArgument(expr.text))
-
+
# NOT ENDSWITH
elif isinstance(expr, expression.notendswithExpression):
self.sout.write(expr.field)
self.sout.write(self.NOTENDSWITHOP)
self.addArgument(self.endswithArgument(expr.text))
-
+
# IN
elif isinstance(expr, expression.inExpression):
self.sout.write(expr.field)
@@ -247,7 +249,7 @@
self.sout.write(", ")
self.addArgument(item)
self.sout.write(")")
-
+
# NOT IN
elif isinstance(expr, expression.notinExpression):
self.sout.write(expr.field)
@@ -259,6 +261,7 @@
self.addArgument(item)
self.sout.write(")")
+
def generateSubExpression(self, expression):
"""
Generate an SQL expression possibly in parenthesis if its a compound expression.
@@ -266,57 +269,64 @@
@param expression: the L{baseExpression} to write out.
@return: C{True} if the TIMESPAN table is used, C{False} otherwise.
"""
-
+
if expression.multi():
self.sout.write("(")
self.generateExpression(expression)
if expression.multi():
self.sout.write(")")
-
+
+
def addArgument(self, arg):
"""
-
+
@param arg: the C{str} of the argument to add
"""
-
+
# Append argument to the list and add the appropriate substitution string to the output stream.
self.arguments.append(arg)
self.substitutions.append(":" + str(len(self.arguments)))
self.sout.write("%s")
-
+
+
def setArgument(self, arg):
"""
-
+
@param arg: the C{str} of the argument to add
@return: C{str} for argument substitution text
"""
-
+
# Append argument to the list and add the appropriate substitution string to the output stream.
self.arguments.append(arg)
self.substitutions.append(":" + str(len(self.arguments)))
+
def frontArgument(self, arg):
"""
-
+
@param arg: the C{str} of the argument to add
@return: C{str} for argument substitution text
"""
-
+
# Append argument to the list and add the appropriate substitution string to the output stream.
self.arguments.insert(0, arg)
self.substitutions.append(":" + str(len(self.arguments)))
+
def containsArgument(self, arg):
return "*%s*" % (arg,)
+
def startswithArgument(self, arg):
return "%s*" % (arg,)
+
def endswithArgument(self, arg):
return "*%s" % (arg,)
+
if __name__ == "__main__":
-
+
e1 = expression.isExpression("TYPE", "VEVENT", False)
e2 = expression.timerangeExpression("20060101T120000Z", "20060101T130000Z", "20060101T080000Z", "20060101T090000Z")
e3 = expression.notcontainsExpression("SUMMARY", "help", True)
Modified: CalendarServer/trunk/twistedcaldav/query/test/test_addressbookquery.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/test/test_addressbookquery.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/test/test_addressbookquery.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -34,7 +34,7 @@
)]
)
filter = addressbookqueryfilter.Filter(filter)
-
+
sql, args = sqladdressbookquery(filter)
self.assertTrue(sql.find("UID") != -1)
self.assertTrue("*Example*" in args)
Modified: CalendarServer/trunk/twistedcaldav/query/test/test_calendarquery.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/test/test_calendarquery.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/test/test_calendarquery.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -32,18 +32,19 @@
*[caldavxml.ComponentFilter(
**{"name":("VEVENT", "VFREEBUSY", "VAVAILABILITY")}
)],
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
filter = calendarqueryfilter.Filter(filter)
filter.child.settzinfo(PyCalendarTimezone(tzid="America/New_York"))
-
+
sql, args = sqlcalendarquery(filter)
self.assertTrue(sql.find("RESOURCE") != -1)
self.assertTrue(sql.find("TIMESPAN") == -1)
self.assertTrue(sql.find("TRANSPARENCY") == -1)
self.assertTrue("VEVENT" in args)
+
def test_query_timerange(self):
"""
Basic query test - with time range
@@ -55,18 +56,19 @@
*[caldavxml.TimeRange(**{"start":"20060605T160000Z", "end":"20060605T170000Z"})],
**{"name":("VEVENT", "VFREEBUSY", "VAVAILABILITY")}
)],
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
filter = calendarqueryfilter.Filter(filter)
filter.child.settzinfo(PyCalendarTimezone(tzid="America/New_York"))
-
+
sql, args = sqlcalendarquery(filter)
self.assertTrue(sql.find("RESOURCE") != -1)
self.assertTrue(sql.find("TIMESPAN") != -1)
self.assertTrue(sql.find("TRANSPARENCY") == -1)
self.assertTrue("VEVENT" in args)
+
def test_query_not_extended(self):
"""
Query test - two terms not anyof
@@ -82,12 +84,12 @@
**{"name":("VTODO")}
),
],
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
filter = calendarqueryfilter.Filter(filter)
filter.child.settzinfo(PyCalendarTimezone(tzid="America/New_York"))
-
+
sql, args = sqlcalendarquery(filter)
self.assertTrue(sql.find("RESOURCE") != -1)
self.assertTrue(sql.find("TIMESPAN") == -1)
@@ -96,6 +98,7 @@
self.assertTrue("VEVENT" in args)
self.assertTrue("VTODO" in args)
+
def test_query_extended(self):
"""
Extended query test - two terms with anyof
@@ -105,19 +108,19 @@
caldavxml.ComponentFilter(
*[
caldavxml.ComponentFilter(
- *[caldavxml.TimeRange(**{"start":"20060605T160000Z",})],
+ *[caldavxml.TimeRange(**{"start":"20060605T160000Z", })],
**{"name":("VEVENT")}
),
caldavxml.ComponentFilter(
**{"name":("VTODO")}
),
],
- **{"name":"VCALENDAR", "test":"anyof"}
+ **{"name": "VCALENDAR", "test": "anyof"}
)
)
filter = calendarqueryfilter.Filter(filter)
filter.child.settzinfo(PyCalendarTimezone(tzid="America/New_York"))
-
+
sql, args = sqlcalendarquery(filter)
self.assertTrue(sql.find("RESOURCE") != -1)
self.assertTrue(sql.find("TIMESPAN") != -1)
@@ -125,4 +128,3 @@
self.assertTrue(sql.find(" OR ") != -1)
self.assertTrue("VEVENT" in args)
self.assertTrue("VTODO" in args)
-
Modified: CalendarServer/trunk/twistedcaldav/query/test/test_expression.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/test/test_expression.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/test/test_expression.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -88,10 +88,11 @@
"((is(A, 1, True) OR is(B, 2, True)) AND is(C, 3, True))"
),
)
-
+
for expr1, expr2, result in tests:
self.assertEqual(str(expr1.andWith(expr2)), result, msg="Failed on %s" % (result,))
+
def test_orWith(self):
tests = (
@@ -161,6 +162,6 @@
"(is(A, 1, True) OR is(B, 2, True) OR is(C, 3, True))"
),
)
-
+
for expr1, expr2, result in tests:
self.assertEqual(str(expr1.orWith(expr2)), result, msg="Failed on %s" % (result,))
Modified: CalendarServer/trunk/twistedcaldav/query/test/test_queryfilter.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/query/test/test_queryfilter.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/query/test/test_queryfilter.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -26,12 +26,13 @@
xml_element = caldavxml.Filter(
caldavxml.ComponentFilter(
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
calendarqueryfilter.Filter(xml_element)
-
+
+
def test_simpleSummaryRangeQuery(self):
xml_element = caldavxml.Filter(
@@ -39,53 +40,56 @@
caldavxml.ComponentFilter(
caldavxml.PropertyFilter(
caldavxml.TextMatch.fromString("test"),
- **{"name":"SUMMARY",}
+ **{"name": "SUMMARY", }
),
- **{"name":"VEVENT"}
+ **{"name": "VEVENT"}
),
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
calendarqueryfilter.Filter(xml_element)
-
+
+
def test_simpleTimeRangeQuery(self):
xml_element = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
- caldavxml.TimeRange(**{"start":"20060605T160000Z", "end":"20060605T170000Z"}),
- **{"name":"VEVENT"}
+ caldavxml.TimeRange(**{"start": "20060605T160000Z", "end": "20060605T170000Z"}),
+ **{"name": "VEVENT"}
),
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
calendarqueryfilter.Filter(xml_element)
-
+
+
def test_multipleTimeRangeQuery(self):
xml_element = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
- caldavxml.TimeRange(**{"start":"20060605T160000Z", "end":"20060605T170000Z"}),
- **{"name":("VEVENT", "VFREEBUSY", "VAVAILABILITY")}
+ caldavxml.TimeRange(**{"start": "20060605T160000Z", "end": "20060605T170000Z"}),
+ **{"name": ("VEVENT", "VFREEBUSY", "VAVAILABILITY")}
),
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
calendarqueryfilter.Filter(xml_element)
+
def test_queryWithTimezone(self):
xml_element = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
- caldavxml.TimeRange(**{"start":"20060605T160000Z", "end":"20060605T170000Z"}),
- **{"name":"VEVENT"}
+ caldavxml.TimeRange(**{"start": "20060605T160000Z", "end": "20060605T170000Z"}),
+ **{"name": "VEVENT"}
),
- **{"name":"VCALENDAR"}
+ **{"name": "VCALENDAR"}
)
)
Modified: CalendarServer/trunk/twistedcaldav/test/data/makelargecalendars.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/data/makelargecalendars.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/data/makelargecalendars.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -40,20 +40,23 @@
print "Unrecognized option: %s" % (option,)
raise ValueError
-
- for ctr in (xrange(user_one, user_one + 1) if user_one else xrange(1, user_max + 1)):
+ for ctr in (xrange(user_one, user_one + 1) if user_one else xrange(1, user_max + 1)):
path = os.path.join(document_root, "calendars/__uids__/us/er/user%02d" % (ctr,))
-
- try: os.makedirs(path)
- except OSError: pass
-
- try: os.makedirs(path)
- except OSError: pass
-
+
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
for calendar in calendars:
if not os.path.isdir(os.path.join(path, calendar)):
print "Expanding %s to %s" % (calendar, path)
- cmd = "tar -C %r -zx -f %r" % (path,
- os.path.join(wd,
+ cmd = "tar -C %r -zx -f %r" % (path,
+ os.path.join(wd,
calendar + ".tgz"))
os.system(cmd)
Modified: CalendarServer/trunk/twistedcaldav/test/data/makelargefbset.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/data/makelargefbset.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/data/makelargefbset.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -34,15 +34,19 @@
print "Unrecognized option: %s" % (option,)
raise ValueError
- for ctr in xrange(1, user_max + 1):
+ for ctr in xrange(1, user_max + 1):
path = "calendars/users/user%02d" % (ctr,)
-
- try: os.makedirs(path)
- except OSError: pass
-
- try: os.makedirs(os.path.join(path, "calendar"))
- except OSError: pass
-
+
+ try:
+ os.makedirs(path)
+ except OSError:
+ pass
+
+ try:
+ os.makedirs(os.path.join(path, "calendar"))
+ except OSError:
+ pass
+
inboxname = os.path.join(path, "inbox")
attrs = xattr.xattr(inboxname)
attrs["WebDAV:{urn:ietf:params:xml:ns:caldav}calendar-free-busy-set"] = """<?xml version='1.0' encoding='UTF-8'?>
Modified: CalendarServer/trunk/twistedcaldav/test/test_accounting.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_accounting.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_accounting.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -29,64 +29,71 @@
def setUp(self):
super(AccountingITIP, self).setUp()
config.AccountingCategories.iTIP = True
- config.AccountingPrincipals = ["*",]
+ config.AccountingPrincipals = ["*", ]
os.mkdir(config.AccountingLogRoot)
+
class _Principal(object):
-
+
class _Record(object):
-
+
def __init__(self, guid):
self.guid = guid
-
+
def __init__(self, guid):
-
+
self.record = self._Record(guid)
+
def test_permissions_makedirs(self):
"""
Test permissions when creating accounting
"""
-
+
# Make log root non-writeable
os.chmod(config.AccountingLogRoot, stat.S_IRUSR)
-
+
emitAccounting("iTIP", self._Principal("1234-5678"), "bogus")
+
def test_file_instead_of_directory(self):
"""
Test permissions when creating accounting
"""
-
+
# Make log root a file
config.AccountingLogRoot = "other"
open(config.AccountingLogRoot, "w").close()
emitAccounting("iTIP", self._Principal("1234-5678"), "bogus")
+
+
class AccountingHTTP (twistedcaldav.test.util.TestCase):
def setUp(self):
-
+
super(AccountingHTTP, self).setUp()
config.AccountingCategories.HTTP = True
- config.AccountingPrincipals = ["*",]
+ config.AccountingPrincipals = ["*", ]
+
def test_channel_request(self):
"""
Test permissions when creating accounting
"""
-
+
# Make channel request object
channelRequest = HTTPLoggingChannelRequest(HTTPChannel())
self.assertTrue(channelRequest != None)
+
def test_logging(self):
"""
Test permissions when creating accounting
"""
-
+
class FakeRequest(object):
-
+
def handleContentChunk(self, data):
pass
def handleContentComplete(self):
@@ -101,6 +108,6 @@
channelRequest.lineReceived("Content-Length:5")
channelRequest.handleContentChunk("Bogus")
channelRequest.handleContentComplete()
- channelRequest.writeHeaders(200, http_headers.Headers({"Content-Type":http_headers.MimeType('text', 'plain'),"Content-Length":"4"}))
+ channelRequest.writeHeaders(200, http_headers.Headers({"Content-Type": http_headers.MimeType('text', 'plain'), "Content-Length": "4"}))
channelRequest.transport.write("Data")
channelRequest.finish()
Modified: CalendarServer/trunk/twistedcaldav/test/test_cache.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_cache.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_cache.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -40,11 +40,14 @@
setattr(self, '_called', called + 1)
return token
+
+
class StubDirectoryRecord(object):
-
+
def __init__(self, uid):
self.uid = uid
+
def cacheToken(self):
"""
Generate a token that can be uniquely used to identify the state of this record for use
@@ -55,24 +58,33 @@
self.uid,
))
+
+
class StubDirectory(object):
-
+
def recordWithShortName(self, recordType, recordName):
return StubDirectoryRecord(recordName)
+
+
class StubSiteResource(object):
def __init__(self):
self.directory = StubDirectory()
-
+
+
def getDirectory(self):
return self.directory
+
+
class StubSite(object):
-
+
def __init__(self):
self.resource = StubSiteResource()
+
+
class StubRequest(object):
resources = {}
@@ -87,7 +99,7 @@
self.body = body
self.stream = MemoryStream(body)
-
+
self.site = StubSite()
@@ -113,6 +125,7 @@
if record is not None:
self.record = record
+
def url(self):
return self._url
@@ -130,6 +143,7 @@
self.ccn,
MemcacheChangeNotifier)
+
def assertToken(self, expectedToken):
token = self.memcache._cache['cacheToken::memory:'][1]
self.assertEquals(token, expectedToken)
@@ -335,14 +349,12 @@
'/principals/__uids__/dreid/',
'/principals/__uids__/dreid/'))
-
d1.addCallback(self.assertResponse,
(expected_response.code,
expected_response.headers,
expected_response.body))
return d1
-
d = self.rc.cacheResponseForRequest(
StubRequest('PROPFIND',
'/principals/__uids__/dreid/',
@@ -403,17 +415,19 @@
'principalToken0',
StubDirectoryRecord('cdaboo').cacheToken(),
'uriToken0',
- {'/calendars/__uids__/cdaboo/calendars/':'childToken0'},
+ {'/calendars/__uids__/cdaboo/calendars/': 'childToken0'},
(self.expected_response[0],
dict(list(self.expected_response[1].getAllRawHeaders())),
self.expected_response[2]))))
self.memcacheStub = memcacheStub
+
def tearDown(self):
for call in self.memcacheStub._timeouts.itervalues():
call.cancel()
+
def test_givenURIsForKeys(self):
expected_response = (200, Headers({}), "Foobarbaz")
@@ -433,7 +447,7 @@
'principalToken0',
StubDirectoryRecord('cdaboo').cacheToken(),
'uriToken0',
- {'/calendars/__uids__/cdaboo/calendars/':'childToken0'},
+ {'/calendars/__uids__/cdaboo/calendars/': 'childToken0'},
(expected_response[0],
dict(list(expected_response[1].getAllRawHeaders())),
expected_response[2]))))
@@ -490,6 +504,7 @@
self.resource = TestCachingResource(StubResponse(200, {}, "foobar"))
self.responseCache = StubResponseCacheResource()
+
def test_DAVHeaderCached(self):
"""
Test that the DAV header set in renderHTTP is cached.
Modified: CalendarServer/trunk/twistedcaldav/test/test_caldavxml.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_caldavxml.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_caldavxml.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -21,32 +21,32 @@
def test_TimeRange(self):
-
+
self.assertRaises(ValueError, caldavxml.CalDAVTimeRangeElement)
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z")
self.assertTrue(tr.valid())
-
+
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000")
self.assertFalse(tr.valid())
-
+
tr = caldavxml.CalDAVTimeRangeElement(start="20110201")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(end="20110201T120000Z")
self.assertTrue(tr.valid())
-
+
tr = caldavxml.CalDAVTimeRangeElement(end="20110201T120000")
self.assertFalse(tr.valid())
-
+
tr = caldavxml.CalDAVTimeRangeElement(end="20110201")
self.assertFalse(tr.valid())
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z", end="20110202T120000Z")
self.assertTrue(tr.valid())
-
+
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z", end="20110202T120000")
self.assertFalse(tr.valid())
-
+
tr = caldavxml.CalDAVTimeRangeElement(start="20110201T120000Z", end="20110202")
self.assertFalse(tr.valid())
Modified: CalendarServer/trunk/twistedcaldav/test/test_config.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_config.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_config.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -19,7 +19,7 @@
from twistedcaldav.config import config, ConfigDict, mergeData
from twistedcaldav.resource import CalDAVResource
-from twistedcaldav.stdconfig import DEFAULT_CONFIG, PListConfigProvider,\
+from twistedcaldav.stdconfig import DEFAULT_CONFIG, PListConfigProvider, \
RELATIVE_PATHS
from twistedcaldav.test.util import TestCase
import socket
@@ -70,6 +70,7 @@
testCase.assertEquals(config.ResponseCompression, False)
+
class ConfigTests(TestCase):
def setUp(self):
TestCase.setUp(self)
@@ -77,10 +78,12 @@
self.testConfig = self.mktemp()
open(self.testConfig, "w").write(testConfig)
+
def tearDown(self):
config.setDefaults(DEFAULT_CONFIG)
config.reset()
+
def testDefaults(self):
for key, value in DEFAULT_CONFIG.iteritems():
if key in ("ServerHostName", "Notifications", "MultiProcess",
@@ -103,6 +106,7 @@
% (key, getattr(config, key), value)
)
+
def testLoadConfig(self):
self.assertEquals(config.ResponseCompression, True)
@@ -110,6 +114,7 @@
self.assertEquals(config.ResponseCompression, False)
+
def testScoping(self):
self.assertEquals(config.ResponseCompression, True)
@@ -119,10 +124,12 @@
_testResponseCompression(self)
+
def _myUpdateHook(self, data, reloading=False):
# A stub hook to record the value of reloading=
self._reloadingValue = reloading
+
def testReloading(self):
self.assertEquals(config.HTTPPort, 0)
@@ -141,6 +148,7 @@
self.assertEquals(config.HTTPPort, 0)
+
def testUpdateAndReload(self):
self.assertEquals(config.HTTPPort, 0)
@@ -156,6 +164,7 @@
self.assertEquals(config.HTTPPort, 8008)
+
def testPreserveAcrossReload(self):
self.assertEquals(config.Scheduling.iMIP.Sending.Password, "")
self.assertEquals(config.Scheduling.iMIP.Receiving.Password, "")
@@ -172,6 +181,7 @@
self.assertEquals(config.Scheduling.iMIP.Sending.Password, "sending")
self.assertEquals(config.Scheduling.iMIP.Receiving.Password, "receiving")
+
def testSetAttr(self):
self.assertNotIn("BindAddresses", config.__dict__)
@@ -181,6 +191,7 @@
self.assertEquals(config.BindAddresses, ["127.0.0.1"])
+
def testDirty(self):
config.__dict__["_dirty"] = False
self.assertEquals(config.__dict__["_dirty"], False)
@@ -202,6 +213,7 @@
self.assertEquals(config.SSLPort, 8443)
+
def testMerge(self):
self.assertEquals(config.MultiProcess.StaggeredStartup.Enabled, False)
@@ -209,6 +221,7 @@
self.assertEquals(config.MultiProcess.StaggeredStartup.Enabled, False)
+
def testDirectoryService_noChange(self):
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
@@ -218,6 +231,7 @@
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
+
def testDirectoryService_sameType(self):
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
@@ -227,6 +241,7 @@
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
+
def testDirectoryService_newType(self):
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
@@ -237,6 +252,7 @@
self.assertNotIn("xmlFile", config.DirectoryService.params)
self.assertEquals(config.DirectoryService.params.node, "/Search")
+
def testDirectoryService_newParam(self):
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
@@ -246,6 +262,7 @@
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.appleopendirectory.OpenDirectoryService")
self.assertEquals(config.DirectoryService.params.node, "/Search")
+
def testDirectoryService_unknownType(self):
self.assertEquals(config.DirectoryService.type, "twistedcaldav.directory.xmlfile.XMLDirectoryService")
self.assertEquals(config.DirectoryService.params.xmlFile, "accounts.xml")
@@ -274,11 +291,13 @@
config.updateDefaults({"SSLPort": 0})
+
def testMergeDefaults(self):
config.updateDefaults({"MultiProcess": {}})
self.assertEquals(config._provider.getDefaults().MultiProcess.StaggeredStartup.Enabled, False)
+
def testSetDefaults(self):
config.updateDefaults({"SSLPort": 8443})
@@ -288,20 +307,23 @@
self.assertEquals(config.SSLPort, 0)
+
def testCopiesDefaults(self):
config.updateDefaults({"Foo": "bar"})
self.assertNotIn("Foo", DEFAULT_CONFIG)
+
def testComplianceClasses(self):
resource = CalDAVResource()
-
+
config.EnableProxyPrincipals = True
self.assertTrue("calendar-proxy" in resource.davComplianceClasses())
-
+
config.EnableProxyPrincipals = False
self.assertTrue("calendar-proxy" not in resource.davComplianceClasses())
+
def test_logging(self):
"""
Logging module configures properly.
@@ -323,6 +345,7 @@
self.assertEquals(logLevelForNamespace(None), "warn")
self.assertEquals(logLevelForNamespace("some.namespace"), "warn")
+
def test_ConfigDict(self):
configDict = ConfigDict({
"a": "A",
@@ -372,6 +395,7 @@
configDict._x = "X"
self.assertEquals(configDict._x, "X")
+
def test_mergeData(self):
"""
Verify we don't lose keys which are present in the old but not
@@ -406,6 +430,7 @@
self.assertEquals(old.Scheduling.iMIP.Receiving.Server, "example.com")
self.assertEquals(old.Scheduling.iMIP.Sending.Username, "plugh")
+
def test_SimpleInclude(self):
testConfigMaster = """<?xml version="1.0" encoding="UTF-8"?>
@@ -435,7 +460,7 @@
<key>some.namespace</key>
<string>debug</string>
</dict>
-
+
<key>Includes</key>
<array>
<string>%s</string>
@@ -469,6 +494,7 @@
self.assertEquals(config.HTTPPort, 9008)
self.assertEquals(config.SSLPort, 8443)
+
def test_FQDNInclude(self):
testConfigMaster = """<?xml version="1.0" encoding="UTF-8"?>
@@ -498,7 +524,7 @@
<key>some.namespace</key>
<string>debug</string>
</dict>
-
+
<key>Includes</key>
<array>
<string>%s.$</string>
@@ -533,6 +559,7 @@
self.assertEquals(config.HTTPPort, 9008)
self.assertEquals(config.SSLPort, 8443)
+
def test_HostnameInclude(self):
testConfigMaster = """<?xml version="1.0" encoding="UTF-8"?>
@@ -562,7 +589,7 @@
<key>some.namespace</key>
<string>debug</string>
</dict>
-
+
<key>Includes</key>
<array>
<string>%s.#</string>
Modified: CalendarServer/trunk/twistedcaldav/test/test_customxml.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_customxml.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_customxml.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -22,8 +22,8 @@
def test_DTStamp(self):
-
+
dtstamp = customxml.DTStamp()
now = time.time()
- now_tm = time.gmtime( now )
+ now_tm = time.gmtime(now)
self.assertEqual(str(dtstamp)[:4], "%s" % (now_tm.tm_year,))
Modified: CalendarServer/trunk/twistedcaldav/test/test_database.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_database.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_database.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -26,9 +26,9 @@
"""
Test abstract SQL DB class
"""
-
+
class TestDB(ADBAPISqliteMixin, AbstractADBAPIDatabase):
-
+
def __init__(self, path, persistent=False, version="1"):
self.version = version
self.dbpath = path
@@ -39,19 +39,19 @@
@return: the schema version assigned to this index.
"""
return self.version
-
+
def _db_type(self):
"""
@return: the collection type assigned to this index.
"""
return "TESTTYPE"
-
+
def _db_init_data_tables(self):
"""
Initialise the underlying database tables.
@param q: a database cursor to use.
"""
-
+
#
# TESTTYPE table
#
@@ -67,8 +67,9 @@
def _db_remove_data_tables(self):
return self._db_execute("drop table TESTTYPE")
+
class TestDBRecreateUpgrade(TestDB):
-
+
class RecreateDBException(Exception):
pass
class UpgradeDBException(Exception):
@@ -80,8 +81,9 @@
def _db_recreate(self):
raise self.RecreateDBException()
+
class TestDBCreateIndexOnUpgrade(TestDB):
-
+
def __init__(self, path, persistent=False):
super(Database.TestDBCreateIndexOnUpgrade, self).__init__(path, persistent, version="2")
@@ -92,13 +94,15 @@
"""
)
+
class TestDBPauseInInit(TestDB):
-
+
def _db_init(self):
-
+
time.sleep(1)
super(Database.TestDBPauseInInit, self)._db_init()
+
@inlineCallbacks
def inlineCallbackRaises(self, exc, f, *args, **kwargs):
try:
@@ -110,6 +114,7 @@
else:
self.fail("%s not raised" % (exc,))
+
@inlineCallbacks
def test_connect(self):
"""
@@ -120,6 +125,7 @@
yield db.open()
self.assertTrue(db.initialized)
+
@inlineCallbacks
def test_connectFailure(self):
"""
@@ -127,7 +133,7 @@
"""
db = Database.TestDB(self.mktemp())
# Make _db_init fail
- db._db_init = lambda : 1/0
+ db._db_init = lambda : 1 / 0
self.assertFalse(db.initialized)
try:
yield db.open()
@@ -136,6 +142,7 @@
self.assertFalse(db.initialized)
self.assertEquals(db.pool, None)
+
@inlineCallbacks
def test_readwrite(self):
"""
@@ -148,6 +155,7 @@
items = (yield db.queryList("SELECT * from TESTTYPE"))
self.assertEqual(items, ("FOO",))
+
@inlineCallbacks
def test_close(self):
"""
@@ -159,13 +167,14 @@
db.close()
self.assertFalse(db.initialized)
db.close()
-
+
+
@inlineCallbacks
def test_version_upgrade_nonpersistent(self):
"""
Connect to database and create table
"""
-
+
db_file = self.mktemp()
db = Database.TestDB(db_file)
@@ -181,6 +190,7 @@
items = (yield db.query("SELECT * from TESTTYPE"))
self.assertEqual(items, ())
+
def test_version_upgrade_persistent(self):
"""
Connect to database and create table
@@ -205,6 +215,7 @@
items = (yield db.query("SELECT * from TESTTYPE"))
self.assertEqual(items, (("FOO", "BAR")))
+
def test_version_upgrade_persistent_add_index(self):
"""
Connect to database and create table
Modified: CalendarServer/trunk/twistedcaldav/test/test_dateops.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_dateops.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_dateops.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -18,8 +18,8 @@
from twisted.trial.unittest import SkipTest
from pycalendar.datetime import PyCalendarDateTime
-from twistedcaldav.dateops import parseSQLTimestampToPyCalendar,\
- parseSQLDateToPyCalendar, pyCalendarTodatetime,\
+from twistedcaldav.dateops import parseSQLTimestampToPyCalendar, \
+ parseSQLDateToPyCalendar, pyCalendarTodatetime, \
normalizeForExpand, normalizeForIndex, normalizeToUTC, timeRangesOverlap
import datetime
@@ -41,14 +41,14 @@
"""
Test that dateops.normalizeForIndex works correctly on all four types of date/time: date only, floating, UTC and local time.
"""
-
+
data = (
(PyCalendarDateTime(2012, 1, 1), PyCalendarDateTime(2012, 1, 1, 0, 0, 0)),
(PyCalendarDateTime(2012, 1, 1, 10, 0, 0), PyCalendarDateTime(2012, 1, 1, 10, 0, 0)),
(PyCalendarDateTime(2012, 1, 1, 11, 0, 0, tzid=PyCalendarTimezone(utc=True)), PyCalendarDateTime(2012, 1, 1, 11, 0, 0, tzid=PyCalendarTimezone(utc=True))),
(PyCalendarDateTime(2012, 1, 1, 12, 0, 0, tzid=PyCalendarTimezone(tzid="America/New_York")), PyCalendarDateTime(2012, 1, 1, 17, 0, 0, tzid=PyCalendarTimezone(utc=True))),
)
-
+
for value, result in data:
self.assertEqual(normalizeForIndex(value), result)
@@ -57,14 +57,14 @@
"""
Test that dateops.normalizeToUTC works correctly on all four types of date/time: date only, floating, UTC and local time.
"""
-
+
data = (
(PyCalendarDateTime(2012, 1, 1), PyCalendarDateTime(2012, 1, 1, 0, 0, 0, tzid=PyCalendarTimezone(utc=True))),
(PyCalendarDateTime(2012, 1, 1, 10, 0, 0), PyCalendarDateTime(2012, 1, 1, 10, 0, 0, tzid=PyCalendarTimezone(utc=True))),
(PyCalendarDateTime(2012, 1, 1, 11, 0, 0, tzid=PyCalendarTimezone(utc=True)), PyCalendarDateTime(2012, 1, 1, 11, 0, 0, tzid=PyCalendarTimezone(utc=True))),
(PyCalendarDateTime(2012, 1, 1, 12, 0, 0, tzid=PyCalendarTimezone(tzid="America/New_York")), PyCalendarDateTime(2012, 1, 1, 17, 0, 0, tzid=PyCalendarTimezone(utc=True))),
)
-
+
for value, result in data:
self.assertEqual(normalizeToUTC(value), result)
@@ -73,14 +73,14 @@
"""
Test that dateops.normalizeForExpand works correctly on all four types of date/time: date only, floating, UTC and local time.
"""
-
+
data = (
(PyCalendarDateTime(2012, 1, 1), PyCalendarDateTime(2012, 1, 1)),
(PyCalendarDateTime(2012, 1, 1, 10, 0, 0), PyCalendarDateTime(2012, 1, 1, 10, 0, 0)),
(PyCalendarDateTime(2012, 1, 1, 11, 0, 0, tzid=PyCalendarTimezone(utc=True)), PyCalendarDateTime(2012, 1, 1, 11, 0, 0, tzid=PyCalendarTimezone(utc=True))),
(PyCalendarDateTime(2012, 1, 1, 12, 0, 0, tzid=PyCalendarTimezone(tzid="America/New_York")), PyCalendarDateTime(2012, 1, 1, 17, 0, 0, tzid=PyCalendarTimezone(utc=True))),
)
-
+
for value, result in data:
self.assertEqual(normalizeForExpand(value), result)
@@ -88,17 +88,21 @@
def test_floatoffset(self):
raise SkipTest("test unimplemented")
+
def test_adjustFloatingToTimezone(self):
raise SkipTest("test unimplemented")
+
def test_compareDateTime(self):
raise SkipTest("test unimplemented")
+
def test_differenceDateTime(self):
raise SkipTest("test unimplemented")
+
def test_timeRangesOverlap(self):
-
+
data = (
# Timed
(
@@ -165,7 +169,7 @@
PyCalendarDateTime(2012, 1, 3, 0, 0, 0, tzid=PyCalendarTimezone(utc=True)),
True,
),
-
+
# All day
(
"All day: Start within, end within - overlap",
@@ -232,14 +236,15 @@
True,
),
)
-
+
for title, start1, end1, start2, end2, result in data:
self.assertEqual(timeRangesOverlap(start1, end1, start2, end2), result, msg="Failed: %s" % (title,))
-
+
def test_normalizePeriodList(self):
raise SkipTest("test unimplemented")
+
def test_clipPeriod(self):
raise SkipTest("test unimplemented")
@@ -269,11 +274,12 @@
for sqlStr, result in tests:
self.assertEqual(parseSQLTimestampToPyCalendar(sqlStr), result)
+
def test_parseSQLDateToPyCalendar(self):
"""
dateops.parseSQLDateToPyCalendar
"""
-
+
tests = (
("2012-04-04", PyCalendarDateTime(2012, 4, 4)),
("2012-12-31 00:00:00", PyCalendarDateTime(2012, 12, 31)),
@@ -282,6 +288,6 @@
for sqlStr, result in tests:
self.assertEqual(parseSQLDateToPyCalendar(sqlStr), result)
+
def test_datetimeMktime(self):
raise SkipTest("test unimplemented")
-
Modified: CalendarServer/trunk/twistedcaldav/test/test_freebusyquery.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_freebusyquery.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_freebusyquery.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -47,6 +47,7 @@
"""
raise SkipTest("test unimplemented")
+
def test_free_busy_recurring(self):
"""
Free-busy on recurring events.
@@ -54,6 +55,7 @@
"""
raise SkipTest("test unimplemented")
+
def test_free_busy_statustransp(self):
"""
SFree-busy on events with different STATUS/TRANSP property values.
@@ -61,6 +63,7 @@
"""
raise SkipTest("test unimplemented")
+
def test_free_busy_free_busy(self):
"""
Free-busy on free busy components.
@@ -68,8 +71,9 @@
"""
raise SkipTest("test unimplemented")
+
def simple_free_busy_query(self, cal_uri, start, end):
-
+
query_timerange = caldavxml.TimeRange(
start=start,
end=end,
@@ -107,4 +111,3 @@
got_calendar
)
returnValue(result)
-
Modified: CalendarServer/trunk/twistedcaldav/test/test_kerberos.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_kerberos.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_kerberos.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -32,6 +32,7 @@
def test_BasicKerberosCredentials(self):
authkerb.BasicKerberosCredentials("test", "test", "HTTP/example.com at EXAMPLE.COM", "EXAMPLE.COM")
+
@inlineCallbacks
def test_BasicKerberosCredentialFactory(self):
factory = authkerb.BasicKerberosCredentialFactory(principal="HTTP/server.example.com at EXAMPLE.COM")
@@ -41,6 +42,7 @@
self.assertTrue(challenge == expected_challenge,
msg="BasicKerberosCredentialFactory challenge %s != %s" % (challenge, expected_challenge))
+
def test_BasicKerberosCredentialFactoryInvalidPrincipal(self):
self.assertRaises(
ValueError,
@@ -48,9 +50,11 @@
principal="HTTP/server.example.com/EXAMPLE.COM"
)
+
def test_NegotiateCredentials(self):
authkerb.NegotiateCredentials("test at EXAMPLE.COM", "test")
+
@inlineCallbacks
def test_NegotiateCredentialFactory(self):
factory = authkerb.NegotiateCredentialFactory(principal="HTTP/server.example.com at EXAMPLE.COM")
@@ -70,11 +74,13 @@
else:
self.fail(msg="NegotiateCredentialFactory decode did not fail")
+
def test_NegotiateCredentialFactoryDifferentRealm(self):
factory = authkerb.NegotiateCredentialFactory(principal="HTTP/server.example.com at EXAMPLE.COM")
self.assertEquals(factory.realm, "EXAMPLE.COM")
self.assertEquals(factory.service, "HTTP at SERVER.EXAMPLE.COM")
+
def test_NegotiateCredentialFactoryInvalidPrincipal(self):
self.assertRaises(
ValueError,
Modified: CalendarServer/trunk/twistedcaldav/test/test_localization.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_localization.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_localization.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -61,30 +61,33 @@
self.assertEquals(_("All day"), "Allway ayday")
self.assertEquals(_("%(startTime)s to %(endTime)s") %
- { 'startTime' : 'a', 'endTime' : 'b' },
+ {'startTime': 'a', 'endTime': 'b'},
"a otay b"
)
+
def test_TimeFormattingAMPM(self):
with translationTo('en', localeDir=localeDir) as t:
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 0, 0, 0)), "12:00 AM")
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 12, 0, 0)), "12:00 PM")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 0, 0, 0)), "12:00 AM")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 12, 0, 0)), "12:00 PM")
self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 23, 59, 0)), "11:59 PM")
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 6, 5, 0)), "6:05 AM")
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 16, 5, 0)), "4:05 PM")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 6, 5, 0)), "6:05 AM")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 16, 5, 0)), "4:05 PM")
+
def test_TimeFormatting24Hour(self):
with translationTo('pig', localeDir=localeDir) as t:
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 0, 0, 0)), "00:00")
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 12, 0, 0)), "12:00")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 0, 0, 0)), "00:00")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 12, 0, 0)), "12:00")
self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 23, 59, 0)), "23:59")
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 6, 5, 0)), "06:05")
- self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 16, 5, 0)), "16:05")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 6, 5, 0)), "06:05")
+ self.assertEquals(t.dtTime(PyCalendarDateTime(2000, 1, 1, 16, 5, 0)), "16:05")
+
def test_CalendarFormatting(self):
with translationTo('en', localeDir=localeDir) as t:
@@ -159,9 +162,9 @@
self.assertEquals(t.time(comp),
(u'11:05 otay 12:15 (PDT)', u'2 aysday 1 ourhay 10 inutesmay'))
-
self.assertEquals(t.monthAbbreviation(1), "ANJAY")
+
def test_getLanguage(self):
"""
Test that getLanguage( ) examines config.
Modified: CalendarServer/trunk/twistedcaldav/test/test_memcachelock.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_memcachelock.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_memcachelock.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -21,10 +21,10 @@
"""
class FakedMemcacheLock(MemcacheLock):
-
+
def __init__(self, faked, namespace, locktoken, timeout=5.0, retry_interval=0.1, expire_time=0):
"""
-
+
@param namespace: a unique namespace for this lock's tokens
@type namespace: C{str}
@param locktoken: the name of the locktoken
@@ -36,14 +36,15 @@
@param expiryTime: the time in seconds for the lock to expire. Zero: no expiration.
@type expiryTime: C{float}
"""
-
+
super(MemCacheTestCase.FakedMemcacheLock, self).__init__(namespace, locktoken, timeout, retry_interval, expire_time)
self.faked = faked
def _getMemcacheProtocol(self):
-
+
return self.faked
-
+
+
def setUp(self):
"""
Create a memcache client, connect it to a string protocol, and make it
@@ -82,6 +83,7 @@
self.proto.dataReceived(recv)
return d
+
def test_get(self):
"""
L{MemCacheProtocol.get} should return a L{Deferred} which is
@@ -96,6 +98,7 @@
"bar"
)
+
def test_set(self):
"""
L{MemCacheProtocol.get} should return a L{Deferred} which is
@@ -110,6 +113,7 @@
True
)
+
@inlineCallbacks
def test_acquire(self):
"""
@@ -126,6 +130,7 @@
)
self.assertTrue(lock._hasLock)
+
@inlineCallbacks
def test_acquire_ok_timeout_0(self):
"""
@@ -142,6 +147,7 @@
)
self.assertTrue(lock._hasLock)
+
@inlineCallbacks
def test_acquire_fails_timeout_0(self):
"""
@@ -165,6 +171,7 @@
self.fail("No timeout exception thrown")
self.assertFalse(lock._hasLock)
+
@inlineCallbacks
def test_acquire_release(self):
"""
@@ -188,6 +195,7 @@
)
self.assertFalse(lock._hasLock)
+
@inlineCallbacks
def test_acquire_clean(self):
"""
@@ -209,6 +217,7 @@
True
)
+
@inlineCallbacks
def test_acquire_unicode(self):
"""
@@ -225,6 +234,7 @@
)
self.assertTrue(lock._hasLock)
+
@inlineCallbacks
def test_acquire_invalid_token1(self):
"""
@@ -232,7 +242,7 @@
called back with the value and the flag associated with the given key
if the server returns a successful result.
"""
-
+
try:
lock = MemCacheTestCase.FakedMemcacheLock(self.proto, "lock", 1)
yield lock.acquire()
Modified: CalendarServer/trunk/twistedcaldav/test/test_memcachepool.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_memcachepool.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_memcachepool.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -45,7 +45,6 @@
self.shutdown_requested = False
-
def clientFree(self, client):
"""
Record a C{'free'} call for C{client}.
@@ -79,7 +78,6 @@
A L{IConnector.connect} implementation that doesn't do anything.
"""
-
def stopConnecting(self):
"""
A L{IConnector.stopConnecting} that doesn't do anything.
@@ -104,10 +102,11 @@
return StubConnector()
- def addSystemEventTrigger(*args, **kwds):
+ def addSystemEventTrigger(self, *args, **kwds):
pass
+
class PooledMemCacheProtocolTests(TestCase):
"""
Tests for the L{PooledMemCacheProtocol}
@@ -127,6 +126,7 @@
return d
+
class MemCacheClientFactoryTests(TestCase):
"""
Tests for the L{MemCacheClientFactory}
@@ -149,6 +149,7 @@
self.factory.connectionPool = self.pool
self.protocol = self.factory.buildProtocol(None)
+
def test_clientConnectionFailedNotifiesPool(self):
"""
Test that L{MemCacheClientFactory.clientConnectionFailed} notifies
@@ -303,7 +304,7 @@
d = self.pool.performRequest('get', 'foo')
d.addCallback(results.append)
- args, kwargs = self.reactor.calls.pop()
+ args, _ignore_kwargs = self.reactor.calls.pop()
self.assertEquals(args[:2], (MC_ADDRESS.host, MC_ADDRESS.port))
@@ -374,12 +375,11 @@
p1 = InMemoryMemcacheProtocol()
p1.set('foo', 'baz')
-
self.pool.clientBusy(p)
self.pool.performRequest('get', 'foo')
- args, kwargs = self.reactor.calls.pop()
+ args, _ignore_kwargs = self.reactor.calls.pop()
self.assertEquals(args[:2], (MC_ADDRESS.host, MC_ADDRESS.port))
@@ -394,7 +394,7 @@
self.pool.performRequest('get', 'foo')
- args, kwargs = self.reactor.calls.pop()
+ args, _ignore_kwargs = self.reactor.calls.pop()
self.assertEquals(args[:2], (MC_ADDRESS.host, MC_ADDRESS.port))
Modified: CalendarServer/trunk/twistedcaldav/test/test_memcacheprops.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_memcacheprops.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_memcacheprops.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -45,43 +45,54 @@
for childName in childNames:
self.children[childName] = StubResource(self, path, childName)
+
def listChildren(self):
return self.children.iterkeys()
+
def getChild(self, childName):
return self.children[childName]
+
def propertyCollection(self):
if not hasattr(self, "_propertyCollection"):
self._propertyCollection = MemcachePropertyCollection(self)
return self._propertyCollection
+
class StubResource(object):
def __init__(self, parent, path, name):
self.parent = parent
self.fp = StubFP(os.path.join(path, name))
+
def deadProperties(self):
if not hasattr(self, "_dead_properties"):
self._dead_properties = self.parent.propertyCollection().propertyStoreForChild(self, InMemoryPropertyStore())
return self._dead_properties
+
+
class StubFP(object):
def __init__(self, path):
self.path = path
+
def child(self, childName):
class _Child(object):
def __init__(self, path):
self.path = path
return _Child(os.path.join(self.path, childName))
+
def basename(self):
return os.path.basename(self.path)
+
+
class StubProperty(object):
def __init__(self, ns, name, value=None):
@@ -89,13 +100,16 @@
self.name = name
self.value = value
+
def qname(self):
return self.ns, self.name
+
def __repr__(self):
return "%s = %s" % (encodeXMLName(self.ns, self.name), self.value)
+
class MemcachePropertyCollectionTestCase(TestCase):
"""
Test MemcacheProprtyCollection
@@ -104,6 +118,7 @@
def getColl(self):
return StubCollection("calendars", ["a", "b", "c"])
+
def test_setget(self):
child1 = self.getColl().getChild("a")
@@ -121,6 +136,7 @@
self.assertEquals(child1.deadProperties().get(("ns1:", "prop1")).value,
"val2")
+
def test_merge(self):
child1 = self.getColl().getChild("a")
child2 = self.getColl().getChild("a")
@@ -156,6 +172,7 @@
self.assertEquals(child1.deadProperties().get(("ns1:", "prop3")).value,
"val3")
+
def test_delete(self):
child1 = self.getColl().getChild("a")
child2 = self.getColl().getChild("a")
@@ -174,7 +191,7 @@
child1.deadProperties().delete(("ns1:", "prop1"))
self.assertRaises(HTTPError, child1.deadProperties().get, ("ns1:", "prop1"))
- self.assertFalse(child1.deadProperties().contains(("ns1:", "prop1")))
+ self.assertFalse(child1.deadProperties().contains(("ns1:", "prop1")))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop2")).value,
"val0")
self.assertEquals(child1.deadProperties().get(("ns1:", "prop3")).value,
@@ -184,30 +201,32 @@
child2 = self.getColl().getChild("a")
# verify properties
- self.assertFalse(child2.deadProperties().contains(("ns1:", "prop1")))
+ self.assertFalse(child2.deadProperties().contains(("ns1:", "prop1")))
self.assertEquals(child2.deadProperties().get(("ns1:", "prop2")).value,
"val0")
self.assertEquals(child2.deadProperties().get(("ns1:", "prop3")).value,
"val0")
+
def test_setget_uids(self):
for uid in (None, "123", "456"):
child1 = self.getColl().getChild("a")
child1.deadProperties().set(StubProperty("ns1:", "prop1", value="val1%s" % (uid if uid else "",)), uid=uid)
-
+
child2 = self.getColl().getChild("a")
self.assertEquals(child2.deadProperties().get(("ns1:", "prop1"), uid=uid).value,
"val1%s" % (uid if uid else "",))
-
+
child2.deadProperties().set(StubProperty("ns1:", "prop1", value="val2%s" % (uid if uid else "",)), uid=uid)
-
+
# force memcache to be consulted (once per collection per request)
child1 = self.getColl().getChild("a")
-
+
self.assertEquals(child1.deadProperties().get(("ns1:", "prop1"), uid=uid).value,
"val2%s" % (uid if uid else "",))
+
def test_merge_uids(self):
for uid in (None, "123", "456"):
@@ -216,20 +235,20 @@
child1.deadProperties().set(StubProperty("ns1:", "prop1", value="val0%s" % (uid if uid else "",)), uid=uid)
child1.deadProperties().set(StubProperty("ns1:", "prop2", value="val0%s" % (uid if uid else "",)), uid=uid)
child1.deadProperties().set(StubProperty("ns1:", "prop3", value="val0%s" % (uid if uid else "",)), uid=uid)
-
+
self.assertEquals(child2.deadProperties().get(("ns1:", "prop1"), uid=uid).value,
"val0%s" % (uid if uid else "",))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop2"), uid=uid).value,
"val0%s" % (uid if uid else "",))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop3"), uid=uid).value,
"val0%s" % (uid if uid else "",))
-
+
child2.deadProperties().set(StubProperty("ns1:", "prop1", value="val1%s" % (uid if uid else "",)), uid=uid)
child1.deadProperties().set(StubProperty("ns1:", "prop3", value="val3%s" % (uid if uid else "",)), uid=uid)
-
+
# force memcache to be consulted (once per collection per request)
child2 = self.getColl().getChild("a")
-
+
# verify properties
self.assertEquals(child2.deadProperties().get(("ns1:", "prop1"), uid=uid).value,
"val1%s" % (uid if uid else "",))
@@ -237,7 +256,7 @@
"val0%s" % (uid if uid else "",))
self.assertEquals(child2.deadProperties().get(("ns1:", "prop3"), uid=uid).value,
"val3%s" % (uid if uid else "",))
-
+
self.assertEquals(child1.deadProperties().get(("ns1:", "prop1"), uid=uid).value,
"val1%s" % (uid if uid else "",))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop2"), uid=uid).value,
@@ -245,6 +264,7 @@
self.assertEquals(child1.deadProperties().get(("ns1:", "prop3"), uid=uid).value,
"val3%s" % (uid if uid else "",))
+
def test_delete_uids(self):
for uid in (None, "123", "456"):
@@ -253,29 +273,29 @@
child1.deadProperties().set(StubProperty("ns1:", "prop1", value="val0%s" % (uid if uid else "",)), uid=uid)
child1.deadProperties().set(StubProperty("ns1:", "prop2", value="val0%s" % (uid if uid else "",)), uid=uid)
child1.deadProperties().set(StubProperty("ns1:", "prop3", value="val0%s" % (uid if uid else "",)), uid=uid)
-
+
self.assertEquals(child2.deadProperties().get(("ns1:", "prop1"), uid=uid).value,
"val0%s" % (uid if uid else "",))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop2"), uid=uid).value,
"val0%s" % (uid if uid else "",))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop3"), uid=uid).value,
"val0%s" % (uid if uid else "",))
-
+
child2.deadProperties().set(StubProperty("ns1:", "prop1", value="val1%s" % (uid if uid else "",)), uid=uid)
child1.deadProperties().delete(("ns1:", "prop1"), uid=uid)
self.assertRaises(HTTPError, child1.deadProperties().get, ("ns1:", "prop1"), uid=uid)
-
- self.assertFalse(child1.deadProperties().contains(("ns1:", "prop1"), uid=uid))
+
+ self.assertFalse(child1.deadProperties().contains(("ns1:", "prop1"), uid=uid))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop2"), uid=uid).value,
"val0%s" % (uid if uid else "",))
self.assertEquals(child1.deadProperties().get(("ns1:", "prop3"), uid=uid).value,
"val0%s" % (uid if uid else "",))
-
+
# force memcache to be consulted (once per collection per request)
child2 = self.getColl().getChild("a")
-
+
# verify properties
- self.assertFalse(child2.deadProperties().contains(("ns1:", "prop1"), uid=uid))
+ self.assertFalse(child2.deadProperties().contains(("ns1:", "prop1"), uid=uid))
self.assertEquals(child2.deadProperties().get(("ns1:", "prop2"), uid=uid).value,
"val0%s" % (uid if uid else "",))
self.assertEquals(child2.deadProperties().get(("ns1:", "prop3"), uid=uid).value,
@@ -288,6 +308,7 @@
for key, value in values.iteritems():
self.results[key] = value
+
def test_splitSetMulti(self):
self.callCount = 0
@@ -328,6 +349,7 @@
result[key] = self.expected[key]
return result
+
def test_splitGetsMulti(self):
self.callCount = 0
@@ -343,6 +365,7 @@
self.assertEquals(self.callCount, 3)
self.assertEquals(self.expected, result)
+
def test_splitGetsMultiWithChunksize(self):
self.callCount = 0
Modified: CalendarServer/trunk/twistedcaldav/test/test_memcacher.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_memcacher.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_memcacher.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -23,7 +23,7 @@
config.ProcessType = processType
cacher = Memcacher("testing")
-
+
result = yield cacher.set("akey", "avalue")
self.assertTrue(result)
@@ -33,6 +33,7 @@
else:
self.assertEquals("avalue", result)
+
@inlineCallbacks
def test_missingget(self):
@@ -40,10 +41,11 @@
config.ProcessType = processType
cacher = Memcacher("testing")
-
+
result = yield cacher.get("akey")
self.assertEquals(None, result)
+
@inlineCallbacks
def test_delete(self):
@@ -51,22 +53,23 @@
config.ProcessType = processType
cacher = Memcacher("testing")
-
+
result = yield cacher.set("akey", "avalue")
self.assertTrue(result)
-
+
result = yield cacher.get("akey")
if isinstance(cacher._memcacheProtocol, Memcacher.nullCacher):
self.assertEquals(None, result)
else:
self.assertEquals("avalue", result)
-
+
result = yield cacher.delete("akey")
self.assertTrue(result)
-
+
result = yield cacher.get("akey")
self.assertEquals(None, result)
+
@inlineCallbacks
def test_all_pickled(self):
@@ -74,22 +77,23 @@
config.ProcessType = processType
cacher = Memcacher("testing", pickle=True)
-
- result = yield cacher.set("akey", ["1", "2", "3",])
+
+ result = yield cacher.set("akey", ["1", "2", "3", ])
self.assertTrue(result)
-
+
result = yield cacher.get("akey")
if isinstance(cacher._memcacheProtocol, Memcacher.nullCacher):
self.assertEquals(None, result)
else:
- self.assertEquals(["1", "2", "3",], result)
-
+ self.assertEquals(["1", "2", "3", ], result)
+
result = yield cacher.delete("akey")
self.assertTrue(result)
-
+
result = yield cacher.get("akey")
self.assertEquals(None, result)
+
@inlineCallbacks
def test_all_noinvalidation(self):
@@ -97,35 +101,37 @@
config.ProcessType = processType
cacher = Memcacher("testing", no_invalidation=True)
-
- result = yield cacher.set("akey", ["1", "2", "3",])
+
+ result = yield cacher.set("akey", ["1", "2", "3", ])
self.assertTrue(result)
-
+
result = yield cacher.get("akey")
- self.assertEquals(["1", "2", "3",], result)
-
+ self.assertEquals(["1", "2", "3", ], result)
+
result = yield cacher.delete("akey")
self.assertTrue(result)
-
+
result = yield cacher.get("akey")
self.assertEquals(None, result)
+
def test_keynormalization(self):
for processType in ("Single", "Combined",):
config.ProcessType = processType
cacher = Memcacher("testing")
-
+
self.assertTrue(len(cacher._normalizeKey("A" * 100)) <= 250)
self.assertTrue(len(cacher._normalizeKey("A" * 512)) <= 250)
-
+
key = cacher._normalizeKey(" \n\t\r" * 20)
self.assertTrue(" " not in key)
self.assertTrue("\n" not in key)
self.assertTrue("\t" not in key)
self.assertTrue("\r" not in key)
+
@inlineCallbacks
def test_expiration(self):
@@ -149,6 +155,7 @@
result = yield cacher.get("akey")
self.assertEquals(None, result)
+
@inlineCallbacks
def test_checkAndSet(self):
Modified: CalendarServer/trunk/twistedcaldav/test/test_options.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_options.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_options.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -32,7 +32,8 @@
response = IResponse(response)
dav = response.headers.getHeader("dav")
- if not dav: self.fail("no DAV header: %s" % (response.headers,))
+ if not dav:
+ self.fail("no DAV header: %s" % (response.headers,))
self.assertIn("1", dav, "no DAV level 1 header")
self.assertIn("access-control", dav, "no DAV access-control header")
self.assertIn("calendar-access", dav, "no DAV calendar-access header")
@@ -45,6 +46,7 @@
return self.send(request, do_test)
+
def test_dav_header_implicit_caldav(self):
"""
DAV header advertises CalDAV
@@ -53,7 +55,8 @@
response = IResponse(response)
dav = response.headers.getHeader("dav")
- if not dav: self.fail("no DAV header: %s" % (response.headers,))
+ if not dav:
+ self.fail("no DAV header: %s" % (response.headers,))
self.assertIn("1", dav, "no DAV level 1 header")
self.assertIn("access-control", dav, "no DAV access-control header")
self.assertIn("calendar-access", dav, "no DAV calendar-access header")
@@ -65,6 +68,7 @@
return self.send(request, do_test)
+
def test_allow_header_caldav(self):
"""
Allow header advertises MKCALENDAR
@@ -73,13 +77,15 @@
response = IResponse(response)
allow = response.headers.getHeader("allow")
- if not allow: self.fail("no Allow header: %s" % (response.headers,))
+ if not allow:
+ self.fail("no Allow header: %s" % (response.headers,))
self.assertNotIn("MKCALENDAR", allow, "no MKCALENDAR support")
request = SimpleRequest(self.site, "OPTIONS", "/")
return self.send(request, do_test)
+
def test_allow_header_acl(self):
"""
Allow header advertises ACL
@@ -88,13 +94,15 @@
response = IResponse(response)
allow = response.headers.getHeader("allow")
- if not allow: self.fail("no Allow header: %s" % (response.headers,))
+ if not allow:
+ self.fail("no Allow header: %s" % (response.headers,))
self.assertIn("ACL", allow, "no ACL support")
request = SimpleRequest(self.site, "OPTIONS", "/")
return self.send(request, do_test)
+
def test_allow_header_deltav(self):
"""
Allow header advertises REPORT
@@ -103,13 +111,15 @@
response = IResponse(response)
allow = response.headers.getHeader("allow")
- if not allow: self.fail("no Allow header: %s" % (response.headers,))
+ if not allow:
+ self.fail("no Allow header: %s" % (response.headers,))
self.assertIn("REPORT", allow, "no REPORT support")
request = SimpleRequest(self.site, "OPTIONS", "/")
return self.send(request, do_test)
+
def test_dav_header_caldav_disabled(self):
"""
DAV header does not advertise CalDAV
@@ -118,7 +128,8 @@
response = IResponse(response)
dav = response.headers.getHeader("dav")
- if not dav: self.fail("no DAV header: %s" % (response.headers,))
+ if not dav:
+ self.fail("no DAV header: %s" % (response.headers,))
self.assertIn("1", dav, "no DAV level 1 header")
self.assertNotIn("calendar-access", dav, "DAV calendar-access header")
self.patch(config, "EnableCalDAV", False)
@@ -126,6 +137,7 @@
return self.send(request, do_test)
+
def test_dav_header_carddav_disabled(self):
"""
DAV header does not advertise CardDAV
@@ -134,7 +146,8 @@
response = IResponse(response)
dav = response.headers.getHeader("dav")
- if not dav: self.fail("no DAV header: %s" % (response.headers,))
+ if not dav:
+ self.fail("no DAV header: %s" % (response.headers,))
self.assertIn("1", dav, "no DAV level 1 header")
self.assertNotIn("addressbook", dav, "DAV addressbook header")
@@ -142,4 +155,3 @@
request = SimpleRequest(self.site, "OPTIONS", "/")
return self.send(request, do_test)
-
Modified: CalendarServer/trunk/twistedcaldav/test/test_sql.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_sql.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_sql.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -26,9 +26,9 @@
"""
Test abstract SQL DB class
"""
-
+
class TestDB(AbstractSQLDatabase):
-
+
def __init__(self, path, persistent=False, autocommit=False, version="1"):
self.version = version
super(SQL.TestDB, self).__init__(path, persistent, autocommit=autocommit)
@@ -38,19 +38,19 @@
@return: the schema version assigned to this index.
"""
return self.version
-
+
def _db_type(self):
"""
@return: the collection type assigned to this index.
"""
return "TESTTYPE"
-
+
def _db_init_data_tables(self, q):
"""
Initialise the underlying database tables.
@param q: a database cursor to use.
"""
-
+
#
# TESTTYPE table
#
@@ -63,8 +63,9 @@
"""
)
+
class TestDBRecreateUpgrade(TestDB):
-
+
class RecreateDBException(Exception):
pass
class UpgradeDBException(Exception):
@@ -76,8 +77,9 @@
def _db_recreate(self, do_commit=True):
raise self.RecreateDBException()
+
class TestDBCreateIndexOnUpgrade(TestDB):
-
+
def __init__(self, path, persistent=False, autocommit=False):
super(SQL.TestDBCreateIndexOnUpgrade, self).__init__(path, persistent, autocommit=autocommit, version="2")
@@ -88,13 +90,15 @@
"""
)
+
class TestDBPauseInInit(TestDB):
-
+
def _db_init(self, db_filename, q):
-
+
time.sleep(1)
super(SQL.TestDBPauseInInit, self)._db_init(db_filename, q)
+
def test_connect(self):
"""
Connect to database and create table
@@ -104,6 +108,7 @@
self.assertTrue(db._db() is not None)
self.assertTrue(db._db_connection is not None)
+
def test_connect_autocommit(self):
"""
Connect to database and create table
@@ -113,6 +118,7 @@
self.assertTrue(db._db() is not None)
self.assertTrue(db._db_connection is not None)
+
def test_readwrite(self):
"""
Add a record, search for it
@@ -124,6 +130,7 @@
items = [i for i in q.fetchall()]
self.assertEqual(items, [("FOO", "BAR")])
+
def test_readwrite_autocommit(self):
"""
Add a record, search for it
@@ -134,6 +141,7 @@
items = [i for i in q.fetchall()]
self.assertEqual(items, [("FOO", "BAR")])
+
def test_readwrite_cursor(self):
"""
Add a record, search for it
@@ -143,6 +151,7 @@
items = db._db_execute("SELECT * from TESTTYPE")
self.assertEqual(items, [("FOO", "BAR")])
+
def test_readwrite_cursor_autocommit(self):
"""
Add a record, search for it
@@ -152,6 +161,7 @@
items = db._db_execute("SELECT * from TESTTYPE")
self.assertEqual(items, [("FOO", "BAR")])
+
def test_readwrite_rollback(self):
"""
Add a record, search for it
@@ -162,6 +172,7 @@
items = db._db_execute("SELECT * from TESTTYPE")
self.assertEqual(items, [])
+
def test_close(self):
"""
Close database
@@ -172,12 +183,13 @@
db._db_close()
self.assertFalse(hasattr(db, "_db_connection"))
db._db_close()
-
+
+
def test_duplicate_create(self):
dbname = self.mktemp()
-
+
class DBThread(Thread):
-
+
def run(self):
try:
db = SQL.TestDBPauseInInit(dbname)
@@ -195,6 +207,7 @@
self.assertTrue(t1.result)
self.assertTrue(t2.result)
+
def test_version_upgrade_nonpersistent(self):
"""
Connect to database and create table
@@ -212,6 +225,7 @@
items = db._db_execute("SELECT * from TESTTYPE")
self.assertEqual(items, [])
+
def test_version_upgrade_persistent(self):
"""
Connect to database and create table
@@ -236,6 +250,7 @@
items = db._db_execute("SELECT * from TESTTYPE")
self.assertEqual(items, [("FOO", "BAR")])
+
def test_version_upgrade_persistent_add_index(self):
"""
Connect to database and create table
Modified: CalendarServer/trunk/twistedcaldav/test/test_xml.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_xml.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_xml.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -20,7 +20,7 @@
from twistedcaldav.ical import Component
from twistedcaldav.query import calendarqueryfilter
import twistedcaldav.test.util
-from twistedcaldav.caldavxml import ComponentFilter, PropertyFilter, TextMatch,\
+from twistedcaldav.caldavxml import ComponentFilter, PropertyFilter, TextMatch, \
Filter, TimeRange
class XML (twistedcaldav.test.util.TestCase):
@@ -41,8 +41,10 @@
("VEVENT", True),
("VTODO", False),
):
- if has: no = "no "
- else: no = ""
+ if has:
+ no = "no "
+ else:
+ no = ""
if has != calendarqueryfilter.ComponentFilter(
ComponentFilter(
@@ -54,6 +56,7 @@
).match(self.calendar, None):
self.fail("Calendar has %s%s?" % (no, component_name))
+
def test_PropertyFilter(self):
"""
Property filter element.
@@ -62,8 +65,10 @@
("UID", True),
("BOOGER", False),
):
- if has: no = "no "
- else: no = ""
+ if has:
+ no = "no "
+ else:
+ no = ""
if has != calendarqueryfilter.ComponentFilter(
ComponentFilter(
@@ -78,12 +83,14 @@
).match(self.calendar, None):
self.fail("Calendar has %sVEVENT with %s?" % (no, property_name))
+
def test_ParameterFilter(self):
"""
Parameter filter element.
"""
raise SkipTest("test unimplemented")
+
def test_TextMatch(self):
"""
Text match element.
@@ -94,8 +101,10 @@
("BOOGER", False, False),
("BOOGER", True, False),
):
- if has: no = "no "
- else: no = ""
+ if has:
+ no = "no "
+ else:
+ no = ""
if has != calendarqueryfilter.ComponentFilter(
ComponentFilter(
@@ -111,6 +120,7 @@
).match(self.calendar, None):
self.fail("Calendar has %sVEVENT with UID %s? (caseless=%s)" % (no, uid, caseless))
+
def test_TimeRange(self):
"""
Time range match element.
@@ -133,8 +143,10 @@
("20030102", "20030103", False),
("20021201", "20030101", False), # End is non-inclusive
):
- if has: no = "no "
- else: no = ""
+ if has:
+ no = "no "
+ else:
+ no = ""
if has != calendarqueryfilter.Filter(
Filter(
Modified: CalendarServer/trunk/twistedcaldav/test/test_xmlutil.py
===================================================================
--- CalendarServer/trunk/twistedcaldav/test/test_xmlutil.py 2013-05-16 02:38:34 UTC (rev 11199)
+++ CalendarServer/trunk/twistedcaldav/test/test_xmlutil.py 2013-05-16 15:40:19 UTC (rev 11200)
@@ -16,14 +16,14 @@
import twistedcaldav.test.util
from cStringIO import StringIO
-from twistedcaldav.xmlutil import readXML, writeXML, addSubElement,\
+from twistedcaldav.xmlutil import readXML, writeXML, addSubElement, \
changeSubElementText, createElement, elementToXML, readXMLString
class XMLUtil(twistedcaldav.test.util.TestCase):
"""
XML Util tests
"""
-
+
data1 = """<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE test SYSTEM "test.dtd">
<test>
@@ -87,53 +87,61 @@
writeXML(xmlfile, node)
newdata = open(xmlfile).read()
self.assertEqual(newdata, data)
-
+
+
def test_readXML_noverify(self):
-
+
io = StringIO(XMLUtil.data1)
etree, root = readXML(io)
self.assertEqual(etree.getroot(), root)
self.assertEqual(root.tag, "test")
+
def test_readXML_verify_ok(self):
-
+
io = StringIO(XMLUtil.data1)
etree, root = readXML(io, expectedRootTag="test")
self.assertEqual(etree.getroot(), root)
self.assertEqual(root.tag, "test")
+
def test_readXML_verify_bad(self):
-
+
io = StringIO(XMLUtil.data1)
self.assertRaises(ValueError, readXML, io, "test1")
+
def test_readXML_data_bad(self):
-
+
io = StringIO(XMLUtil.data2)
self.assertRaises(ValueError, readXML, io)
+
def test_writeXML(self):
-
+
io = StringIO(XMLUtil.data1)
_ignore_etree, root = readXML(io)
self._checkXML(root, XMLUtil.data3)
+
def test_addElement(self):
-
+
io = StringIO(XMLUtil.data1)
_ignore_etree, root = readXML(io)
addSubElement(root, "added", "added text")
self._checkXML(root, XMLUtil.data4)
+
def test_changeElement_existing(self):
-
+
io = StringIO(XMLUtil.data1)
_ignore_etree, root = readXML(io)
changeSubElementText(root, "help", "changed text")
self._checkXML(root, XMLUtil.data5)
+
def test_changeElement_new(self):
-
+
io = StringIO(XMLUtil.data1)
_ignore_etree, root = readXML(io)
changeSubElementText(root, "new", "new text")
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.macosforge.org/pipermail/calendarserver-changes/attachments/20130516/1592278c/attachment-0001.html>
More information about the calendarserver-changes
mailing list