source: trunk/pgdb.py @ 839

Last change on this file since 839 was 839, checked in by cito, 3 years ago

Cast to proper type when adapting datetime values

Otherwise PostgreSQL needs to guess the type from the context
which is not always possible.

  • Property svn:keywords set to Id
File size: 53.3 KB
Line 
1#! /usr/bin/python
2#
3# pgdb.py
4#
5# Written by D'Arcy J.M. Cain
6#
7# $Id: pgdb.py 839 2016-02-08 16:00:17Z cito $
8#
9
10"""pgdb - DB-API 2.0 compliant module for PygreSQL.
11
12(c) 1999, Pascal Andre <andre@via.ecp.fr>.
13See package documentation for further information on copyright.
14
15Inline documentation is sparse.
16See DB-API 2.0 specification for usage information:
17http://www.python.org/peps/pep-0249.html
18
19Basic usage:
20
21    pgdb.connect(connect_string) # open a connection
22    # connect_string = 'host:database:user:password:opt'
23    # All parts are optional. You may also pass host through
24    # password as keyword arguments. To pass a port,
25    # pass it in the host keyword parameter:
26    connection = pgdb.connect(host='localhost:5432')
27
28    cursor = connection.cursor() # open a cursor
29
30    cursor.execute(query[, params])
31    # Execute a query, binding params (a dictionary) if they are
32    # passed. The binding syntax is the same as the % operator
33    # for dictionaries, and no quoting is done.
34
35    cursor.executemany(query, list of params)
36    # Execute a query many times, binding each param dictionary
37    # from the list.
38
39    cursor.fetchone() # fetch one row, [value, value, ...]
40
41    cursor.fetchall() # fetch all rows, [[value, value, ...], ...]
42
43    cursor.fetchmany([size])
44    # returns size or cursor.arraysize number of rows,
45    # [[value, value, ...], ...] from result set.
46    # Default cursor.arraysize is 1.
47
48    cursor.description # returns information about the columns
49    #   [(column_name, type_name, display_size,
50    #           internal_size, precision, scale, null_ok), ...]
51    # Note that display_size, precision, scale and null_ok
52    # are not implemented.
53
54    cursor.rowcount # number of rows available in the result set
55    # Available after a call to execute.
56
57    connection.commit() # commit transaction
58
59    connection.rollback() # or rollback transaction
60
61    cursor.close() # close the cursor
62
63    connection.close() # close the connection
64"""
65
66from __future__ import print_function
67
68from _pg import *
69
70__version__ = version
71
72from datetime import date, time, datetime, timedelta
73from time import localtime
74from decimal import Decimal
75from uuid import UUID
76from math import isnan, isinf
77from collections import namedtuple
78from functools import partial
79from re import compile as regex
80from json import loads as jsondecode, dumps as jsonencode
81
82try:
83    long
84except NameError:  # Python >= 3.0
85    long = int
86
87try:
88    unicode
89except NameError:  # Python >= 3.0
90    unicode = str
91
92try:
93    basestring
94except NameError:  # Python >= 3.0
95    basestring = (str, bytes)
96
97from collections import Iterable
98
99
100### Module Constants
101
102# compliant with DB API 2.0
103apilevel = '2.0'
104
105# module may be shared, but not connections
106threadsafety = 1
107
108# this module use extended python format codes
109paramstyle = 'pyformat'
110
111# shortcut methods have been excluded from DB API 2 and
112# are not recommended by the DB SIG, but they can be handy
113shortcutmethods = 1
114
115
116### Internal Type Handling
117
118try:
119    from inspect import signature
120except ImportError:  # Python < 3.3
121    from inspect import getargspec
122
123    def get_args(func):
124        return getargspec(func).args
125else:
126
127    def get_args(func):
128        return list(signature(func).parameters)
129
130try:
131    if datetime.strptime('+0100', '%z') is None:
132        raise ValueError
133except ValueError:  # Python < 3.2
134    timezones = None
135else:
136    # time zones used in Postgres timestamptz output
137    timezones = dict(CET='+0100', EET='+0200', EST='-0500',
138        GMT='+0000', HST='-1000', MET='+0100', MST='-0700',
139        UCT='+0000', UTC='+0000', WET='+0000')
140
141
142def decimal_type(decimal_type=None):
143    """Get or set global type to be used for decimal values.
144
145    Note that connections cache cast functions. To be sure a global change
146    is picked up by a running connection, call con.type_cache.reset_typecast().
147    """
148    global Decimal
149    if decimal_type is not None:
150        Decimal = decimal_type
151        set_typecast('numeric', decimal_type)
152    return Decimal
153
154
155def cast_bool(value):
156    """Cast boolean value in database format to bool."""
157    if value:
158        return value[0] in ('t', 'T')
159
160
161def cast_money(value):
162    """Cast money value in database format to Decimal."""
163    if value:
164        value = value.replace('(', '-')
165        return Decimal(''.join(c for c in value if c.isdigit() or c in '.-'))
166
167
168def cast_int2vector(value):
169    """Cast an int2vector value."""
170    return [int(v) for v in value.split()]
171
172
173def cast_date(value, connection):
174    """Cast a date value."""
175    # The output format depends on the server setting DateStyle.  The default
176    # setting ISO and the setting for German are actually unambiguous.  The
177    # order of days and months in the other two settings is however ambiguous,
178    # so at least here we need to consult the setting to properly parse values.
179    if value == '-infinity':
180        return date.min
181    if value == 'infinity':
182        return date.max
183    value = value.split()
184    if value[-1] == 'BC':
185        return date.min
186    value = value[0]
187    if len(value) > 10:
188        return date.max
189    fmt = connection.date_format()
190    return datetime.strptime(value, fmt).date()
191
192
193def cast_time(value):
194    """Cast a time value."""
195    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
196    return datetime.strptime(value, fmt).time()
197
198
199_re_timezone = regex('(.*)([+-].*)')
200
201
202def cast_timetz(value):
203    """Cast a timetz value."""
204    tz = _re_timezone.match(value)
205    if tz:
206        value, tz = tz.groups()
207    else:
208        tz = '+0000'
209    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
210    if timezones:
211        if tz.startswith(('+', '-')):
212            if len(tz) < 5:
213                tz += '00'
214            else:
215                tz = tz.replace(':', '')
216        elif tz in timezones:
217            tz = timezones[tz]
218        else:
219            tz = '+0000'
220        value += tz
221        fmt += '%z'
222    return datetime.strptime(value, fmt).timetz()
223
224
225def cast_timestamp(value, connection):
226    """Cast a timestamp value."""
227    if value == '-infinity':
228        return datetime.min
229    if value == 'infinity':
230        return datetime.max
231    value = value.split()
232    if value[-1] == 'BC':
233        return datetime.min
234    fmt = connection.date_format()
235    if fmt.endswith('-%Y') and len(value) > 2:
236        value = value[1:5]
237        if len(value[3]) > 4:
238            return datetime.max
239        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
240            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
241    else:
242        if len(value[0]) > 10:
243            return datetime.max
244        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
245    return datetime.strptime(' '.join(value), ' '.join(fmt))
246
247
248def cast_timestamptz(value, connection):
249    """Cast a timestamptz value."""
250    if value == '-infinity':
251        return datetime.min
252    if value == 'infinity':
253        return datetime.max
254    value = value.split()
255    if value[-1] == 'BC':
256        return datetime.min
257    fmt = connection.date_format()
258    if fmt.endswith('-%Y') and len(value) > 2:
259        value = value[1:]
260        if len(value[3]) > 4:
261            return datetime.max
262        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
263            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
264        value, tz = value[:-1], value[-1]
265    else:
266        if fmt.startswith('%Y-'):
267            tz = _re_timezone.match(value[1])
268            if tz:
269                value[1], tz = tz.groups()
270            else:
271                tz = '+0000'
272        else:
273            value, tz = value[:-1], value[-1]
274        if len(value[0]) > 10:
275            return datetime.max
276        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
277    if timezones:
278        if tz.startswith(('+', '-')):
279            if len(tz) < 5:
280                tz += '00'
281            else:
282                tz = tz.replace(':', '')
283        elif tz in timezones:
284            tz = timezones[tz]
285        else:
286            tz = '+0000'
287        value.append(tz)
288        fmt.append('%z')
289    return datetime.strptime(' '.join(value), ' '.join(fmt))
290
291_re_interval_sql_standard = regex(
292    '(?:([+-])?([0-9]+)-([0-9]+) ?)?'
293    '(?:([+-]?[0-9]+)(?!:) ?)?'
294    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
295
296_re_interval_postgres = regex(
297    '(?:([+-]?[0-9]+) ?years? ?)?'
298    '(?:([+-]?[0-9]+) ?mons? ?)?'
299    '(?:([+-]?[0-9]+) ?days? ?)?'
300    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
301
302_re_interval_postgres_verbose = regex(
303    '@ ?(?:([+-]?[0-9]+) ?years? ?)?'
304    '(?:([+-]?[0-9]+) ?mons? ?)?'
305    '(?:([+-]?[0-9]+) ?days? ?)?'
306    '(?:([+-]?[0-9]+) ?hours? ?)?'
307    '(?:([+-]?[0-9]+) ?mins? ?)?'
308    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))? ?secs?)? ?(ago)?')
309
310_re_interval_iso_8601 = regex(
311    'P(?:([+-]?[0-9]+)Y)?'
312    '(?:([+-]?[0-9]+)M)?'
313    '(?:([+-]?[0-9]+)D)?'
314    '(?:T(?:([+-]?[0-9]+)H)?'
315    '(?:([+-]?[0-9]+)M)?'
316    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))?S)?)?')
317
318
319def cast_interval(value):
320    """Cast an interval value."""
321    # The output format depends on the server setting IntervalStyle, but it's
322    # not necessary to consult this setting to parse it.  It's faster to just
323    # check all possible formats, and there is no ambiguity here.
324    m = _re_interval_iso_8601.match(value)
325    if m:
326        m = [d or '0' for d in m.groups()]
327        secs_ago = m.pop(5) == '-'
328        m = [int(d) for d in m]
329        years, mons, days, hours, mins, secs, usecs = m
330        if secs_ago:
331            secs = -secs
332            usecs = -usecs
333    else:
334        m = _re_interval_postgres_verbose.match(value)
335        if m:
336            m, ago = [d or '0' for d in m.groups()[:8]], m.group(9)
337            secs_ago = m.pop(5) == '-'
338            m = [-int(d) for d in m] if ago else [int(d) for d in m]
339            years, mons, days, hours, mins, secs, usecs = m
340            if secs_ago:
341                secs = - secs
342                usecs = -usecs
343        else:
344            m = _re_interval_postgres.match(value)
345            if m and any(m.groups()):
346                m = [d or '0' for d in m.groups()]
347                hours_ago = m.pop(3) == '-'
348                m = [int(d) for d in m]
349                years, mons, days, hours, mins, secs, usecs = m
350                if hours_ago:
351                    hours = -hours
352                    mins = -mins
353                    secs = -secs
354                    usecs = -usecs
355            else:
356                m = _re_interval_sql_standard.match(value)
357                if m and any(m.groups()):
358                    m = [d or '0' for d in m.groups()]
359                    years_ago = m.pop(0) == '-'
360                    hours_ago = m.pop(3) == '-'
361                    m = [int(d) for d in m]
362                    years, mons, days, hours, mins, secs, usecs = m
363                    if years_ago:
364                        years = -years
365                        mons = -mons
366                    if hours_ago:
367                        hours = -hours
368                        mins = -mins
369                        secs = -secs
370                        usecs = -usecs
371                else:
372                    raise ValueError('Cannot parse interval: %s' % value)
373    days += 365 * years + 30 * mons
374    return timedelta(days=days, hours=hours, minutes=mins,
375        seconds=secs, microseconds=usecs)
376
377
378class Typecasts(dict):
379    """Dictionary mapping database types to typecast functions.
380
381    The cast functions get passed the string representation of a value in
382    the database which they need to convert to a Python object.  The
383    passed string will never be None since NULL values are already be
384    handled before the cast function is called.
385    """
386
387    # the default cast functions
388    # (str functions are ignored but have been added for faster access)
389    defaults = {'char': str, 'bpchar': str, 'name': str,
390        'text': str, 'varchar': str,
391        'bool': cast_bool, 'bytea': unescape_bytea,
392        'int2': int, 'int4': int, 'serial': int, 'int8': long, 'oid': int,
393        'hstore': cast_hstore, 'json': jsondecode, 'jsonb': jsondecode,
394        'float4': float, 'float8': float,
395        'numeric': Decimal, 'money': cast_money,
396        'date': cast_date, 'interval': cast_interval,
397        'time': cast_time, 'timetz': cast_timetz,
398        'timestamp': cast_timestamp, 'timestamptz': cast_timestamptz,
399        'int2vector': cast_int2vector, 'uuid': UUID,
400        'anyarray': cast_array, 'record': cast_record}
401
402    connection = None  # will be set in local connection specific instances
403
404    def __missing__(self, typ):
405        """Create a cast function if it is not cached.
406
407        Note that this class never raises a KeyError,
408        but returns None when no special cast function exists.
409        """
410        if not isinstance(typ, str):
411            raise TypeError('Invalid type: %s' % typ)
412        cast = self.defaults.get(typ)
413        if cast:
414            # store default for faster access
415            cast = self._add_connection(cast)
416            self[typ] = cast
417        elif typ.startswith('_'):
418            # create array cast
419            base_cast = self[typ[1:]]
420            cast = self.create_array_cast(base_cast)
421            if base_cast:
422                # store only if base type exists
423                self[typ] = cast
424        return cast
425
426    @staticmethod
427    def _needs_connection(func):
428        """Check if a typecast function needs a connection argument."""
429        try:
430            args = get_args(func)
431        except (TypeError, ValueError):
432            return False
433        else:
434            return 'connection' in args[1:]
435
436    def _add_connection(self, cast):
437        """Add a connection argument to the typecast function if necessary."""
438        if not self.connection or not self._needs_connection(cast):
439            return cast
440        return partial(cast, connection=self.connection)
441
442    def get(self, typ, default=None):
443        """Get the typecast function for the given database type."""
444        return self[typ] or default
445
446    def set(self, typ, cast):
447        """Set a typecast function for the specified database type(s)."""
448        if isinstance(typ, basestring):
449            typ = [typ]
450        if cast is None:
451            for t in typ:
452                self.pop(t, None)
453                self.pop('_%s' % t, None)
454        else:
455            if not callable(cast):
456                raise TypeError("Cast parameter must be callable")
457            for t in typ:
458                self[t] = self._add_connection(cast)
459                self.pop('_%s' % t, None)
460
461    def reset(self, typ=None):
462        """Reset the typecasts for the specified type(s) to their defaults.
463
464        When no type is specified, all typecasts will be reset.
465        """
466        defaults = self.defaults
467        if typ is None:
468            self.clear()
469            self.update(defaults)
470        else:
471            if isinstance(typ, basestring):
472                typ = [typ]
473            for t in typ:
474                cast = defaults.get(t)
475                if cast:
476                    self[t] = self._add_connection(cast)
477                    t = '_%s' % t
478                    cast = defaults.get(t)
479                    if cast:
480                        self[t] = self._add_connection(cast)
481                    else:
482                        self.pop(t, None)
483                else:
484                    self.pop(t, None)
485                    self.pop('_%s' % t, None)
486
487    def create_array_cast(self, basecast):
488        """Create an array typecast for the given base cast."""
489        def cast(v):
490            return cast_array(v, basecast)
491        return cast
492
493    def create_record_cast(self, name, fields, casts):
494        """Create a named record typecast for the given fields and casts."""
495        record = namedtuple(name, fields)
496        def cast(v):
497            return record(*cast_record(v, casts))
498        return cast
499
500
501_typecasts = Typecasts()  # this is the global typecast dictionary
502
503
504def get_typecast(typ):
505    """Get the global typecast function for the given database type(s)."""
506    return _typecasts.get(typ)
507
508
509def set_typecast(typ, cast):
510    """Set a global typecast function for the given database type(s).
511
512    Note that connections cache cast functions. To be sure a global change
513    is picked up by a running connection, call con.type_cache.reset_typecast().
514    """
515    _typecasts.set(typ, cast)
516
517
518def reset_typecast(typ=None):
519    """Reset the global typecasts for the given type(s) to their default.
520
521    When no type is specified, all typecasts will be reset.
522
523    Note that connections cache cast functions. To be sure a global change
524    is picked up by a running connection, call con.type_cache.reset_typecast().
525    """
526    _typecasts.reset(typ)
527
528
529class LocalTypecasts(Typecasts):
530    """Map typecasts, including local composite types, to cast functions."""
531
532    defaults = _typecasts
533
534    connection = None  # will be set in a connection specific instance
535
536    def __missing__(self, typ):
537        """Create a cast function if it is not cached."""
538        if typ.startswith('_'):
539            base_cast = self[typ[1:]]
540            cast = self.create_array_cast(base_cast)
541            if base_cast:
542                self[typ] = cast
543        else:
544            cast = self.defaults.get(typ)
545            if cast:
546                cast = self._add_connection(cast)
547                self[typ] = cast
548            else:
549                fields = self.get_fields(typ)
550                if fields:
551                    casts = [self[field.type] for field in fields]
552                    fields = [field.name for field in fields]
553                    cast = self.create_record_cast(typ, fields, casts)
554                    self[typ] = cast
555        return cast
556
557    def get_fields(self, typ):
558        """Return the fields for the given record type.
559
560        This method will be replaced with a method that looks up the fields
561        using the type cache of the connection.
562        """
563        return []
564
565
566class TypeCode(str):
567    """Class representing the type_code used by the DB-API 2.0.
568
569    TypeCode objects are strings equal to the PostgreSQL type name,
570    but carry some additional information.
571    """
572
573    @classmethod
574    def create(cls, oid, name, len, type, category, delim, relid):
575        """Create a type code for a PostgreSQL data type."""
576        self = cls(name)
577        self.oid = oid
578        self.len = len
579        self.type = type
580        self.category = category
581        self.delim = delim
582        self.relid = relid
583        return self
584
585FieldInfo = namedtuple('FieldInfo', ['name', 'type'])
586
587
588class TypeCache(dict):
589    """Cache for database types.
590
591    This cache maps type OIDs and names to TypeCode strings containing
592    important information on the associated database type.
593    """
594
595    def __init__(self, cnx):
596        """Initialize type cache for connection."""
597        super(TypeCache, self).__init__()
598        self._escape_string = cnx.escape_string
599        self._src = cnx.source()
600        self._typecasts = LocalTypecasts()
601        self._typecasts.get_fields = self.get_fields
602        self._typecasts.connection = cnx
603
604    def __missing__(self, key):
605        """Get the type info from the database if it is not cached."""
606        if isinstance(key, int):
607            oid = key
608        else:
609            if '.' not in key and '"' not in key:
610                key = '"%s"' % key
611            oid = "'%s'::regtype" % self._escape_string(key)
612        try:
613            self._src.execute("SELECT oid, typname,"
614                 " typlen, typtype, typcategory, typdelim, typrelid"
615                " FROM pg_type WHERE oid=%s" % oid)
616        except ProgrammingError:
617            res = None
618        else:
619            res = self._src.fetch(1)
620        if not res:
621            raise KeyError('Type %s could not be found' % key)
622        res = res[0]
623        type_code = TypeCode.create(int(res[0]), res[1],
624            int(res[2]), res[3], res[4], res[5], int(res[6]))
625        self[type_code.oid] = self[str(type_code)] = type_code
626        return type_code
627
628    def get(self, key, default=None):
629        """Get the type even if it is not cached."""
630        try:
631            return self[key]
632        except KeyError:
633            return default
634
635    def get_fields(self, typ):
636        """Get the names and types of the fields of composite types."""
637        if not isinstance(typ, TypeCode):
638            typ = self.get(typ)
639            if not typ:
640                return None
641        if not typ.relid:
642            return None  # this type is not composite
643        self._src.execute("SELECT attname, atttypid"
644            " FROM pg_attribute WHERE attrelid=%s AND attnum>0"
645            " AND NOT attisdropped ORDER BY attnum" % typ.relid)
646        return [FieldInfo(name, self.get(int(oid)))
647            for name, oid in self._src.fetch(-1)]
648
649    def get_typecast(self, typ):
650        """Get the typecast function for the given database type."""
651        return self._typecasts.get(typ)
652
653    def set_typecast(self, typ, cast):
654        """Set a typecast function for the specified database type(s)."""
655        self._typecasts.set(typ, cast)
656
657    def reset_typecast(self, typ=None):
658        """Reset the typecast function for the specified database type(s)."""
659        self._typecasts.reset(typ)
660
661    def typecast(self, value, typ):
662        """Cast the given value according to the given database type."""
663        if value is None:
664            # for NULL values, no typecast is necessary
665            return None
666        cast = self.get_typecast(typ)
667        if not cast or cast is str:
668            # no typecast is necessary
669            return value
670        return cast(value)
671
672
673class _quotedict(dict):
674    """Dictionary with auto quoting of its items.
675
676    The quote attribute must be set to the desired quote function.
677    """
678
679    def __getitem__(self, key):
680        return self.quote(super(_quotedict, self).__getitem__(key))
681
682
683### Error messages
684
685def _db_error(msg, cls=DatabaseError):
686    """Return DatabaseError with empty sqlstate attribute."""
687    error = cls(msg)
688    error.sqlstate = None
689    return error
690
691
692def _op_error(msg):
693    """Return OperationalError."""
694    return _db_error(msg, OperationalError)
695
696
697### Cursor Object
698
699class Cursor(object):
700    """Cursor object."""
701
702    def __init__(self, dbcnx):
703        """Create a cursor object for the database connection."""
704        self.connection = self._dbcnx = dbcnx
705        self._cnx = dbcnx._cnx
706        self.type_cache = dbcnx.type_cache
707        self._src = self._cnx.source()
708        # the official attribute for describing the result columns
709        self._description = None
710        if self.row_factory is Cursor.row_factory:
711            # the row factory needs to be determined dynamically
712            self.row_factory = None
713        else:
714            self.build_row_factory = None
715        self.rowcount = -1
716        self.arraysize = 1
717        self.lastrowid = None
718
719    def __iter__(self):
720        """Make cursor compatible to the iteration protocol."""
721        return self
722
723    def __enter__(self):
724        """Enter the runtime context for the cursor object."""
725        return self
726
727    def __exit__(self, et, ev, tb):
728        """Exit the runtime context for the cursor object."""
729        self.close()
730
731    def _quote(self, value):
732        """Quote value depending on its type."""
733        if value is None:
734            return 'NULL'
735        if isinstance(value, (Hstore, Json, UUID)):
736            value = str(value)
737        if isinstance(value, basestring):
738            if isinstance(value, Binary):
739                value = self._cnx.escape_bytea(value)
740                if bytes is not str:  # Python >= 3.0
741                    value = value.decode('ascii')
742            else:
743                value = self._cnx.escape_string(value)
744            return "'%s'" % value
745        if isinstance(value, float):
746            if isinf(value):
747                return "'-Infinity'" if value < 0 else "'Infinity'"
748            if isnan(value):
749                return "'NaN'"
750            return value
751        if isinstance(value, (int, long, Decimal, Literal)):
752            return value
753        if isinstance(value, datetime):
754            if value.tzinfo:
755                return "'%s'::timestamptz" % value
756            return "'%s'::timestamp" % value
757        if isinstance(value, date):
758            return "'%s'::date" % value
759        if isinstance(value, time):
760            if value.tzinfo:
761                return "'%s'::timetz" % value
762            return "'%s'::time" % value
763        if isinstance(value, timedelta):
764            return "'%s'::interval" % value
765        if isinstance(value, list):
766            # Quote value as an ARRAY constructor. This is better than using
767            # an array literal because it carries the information that this is
768            # an array and not a string.  One issue with this syntax is that
769            # you need to add an explicit typecast when passing empty arrays.
770            # The ARRAY keyword is actually only necessary at the top level.
771            q = self._quote
772            return 'ARRAY[%s]' % ','.join(str(q(v)) for v in value)
773        if isinstance(value, tuple):
774            # Quote as a ROW constructor.  This is better than using a record
775            # literal because it carries the information that this is a record
776            # and not a string.  We don't use the keyword ROW in order to make
777            # this usable with the IN syntax as well.  It is only necessary
778            # when the records has a single column which is not really useful.
779            q = self._quote
780            return '(%s)' % ','.join(str(q(v)) for v in value)
781        try:
782            value = value.__pg_repr__()
783        except AttributeError:
784            raise InterfaceError(
785                'Do not know how to adapt type %s' % type(value))
786        if isinstance(value, (tuple, list)):
787            value = self._quote(value)
788        return value
789
790    def _quoteparams(self, string, parameters):
791        """Quote parameters.
792
793        This function works for both mappings and sequences.
794        """
795        if isinstance(parameters, dict):
796            parameters = _quotedict(parameters)
797            parameters.quote = self._quote
798        else:
799            parameters = tuple(map(self._quote, parameters))
800        return string % parameters
801
802    def _make_description(self, info):
803        """Make the description tuple for the given field info."""
804        name, typ, size, mod = info[1:]
805        type_code = self.type_cache[typ]
806        if mod > 0:
807            mod -= 4
808        if type_code == 'numeric':
809            precision, scale = mod >> 16, mod & 0xffff
810            size = precision
811        else:
812            if not size:
813                size = type_code.size
814            if size == -1:
815                size = mod
816            precision = scale = None
817        return CursorDescription(name, type_code,
818            None, size, precision, scale, None)
819
820    @property
821    def description(self):
822        """Read-only attribute describing the result columns."""
823        descr = self._description
824        if self._description is True:
825            make = self._make_description
826            descr = [make(info) for info in self._src.listinfo()]
827            self._description = descr
828        return descr
829
830    @property
831    def colnames(self):
832        """Unofficial convenience method for getting the column names."""
833        return [d[0] for d in self.description]
834
835    @property
836    def coltypes(self):
837        """Unofficial convenience method for getting the column types."""
838        return [d[1] for d in self.description]
839
840    def close(self):
841        """Close the cursor object."""
842        self._src.close()
843        self._description = None
844        self.rowcount = -1
845        self.lastrowid = None
846
847    def execute(self, operation, parameters=None):
848        """Prepare and execute a database operation (query or command)."""
849        # The parameters may also be specified as list of tuples to e.g.
850        # insert multiple rows in a single operation, but this kind of
851        # usage is deprecated.  We make several plausibility checks because
852        # tuples can also be passed with the meaning of ROW constructors.
853        if (parameters and isinstance(parameters, list)
854                and len(parameters) > 1
855                and all(isinstance(p, tuple) for p in parameters)
856                and all(len(p) == len(parameters[0]) for p in parameters[1:])):
857            return self.executemany(operation, parameters)
858        else:
859            # not a list of tuples
860            return self.executemany(operation, [parameters])
861
862    def executemany(self, operation, seq_of_parameters):
863        """Prepare operation and execute it against a parameter sequence."""
864        if not seq_of_parameters:
865            # don't do anything without parameters
866            return
867        self._description = None
868        self.rowcount = -1
869        # first try to execute all queries
870        rowcount = 0
871        sql = "BEGIN"
872        try:
873            if not self._dbcnx._tnx:
874                try:
875                    self._cnx.source().execute(sql)
876                except DatabaseError:
877                    raise  # database provides error message
878                except Exception:
879                    raise _op_error("Can't start transaction")
880                self._dbcnx._tnx = True
881            for parameters in seq_of_parameters:
882                sql = operation
883                if parameters:
884                    sql = self._quoteparams(sql, parameters)
885                rows = self._src.execute(sql)
886                if rows:  # true if not DML
887                    rowcount += rows
888                else:
889                    self.rowcount = -1
890        except DatabaseError:
891            raise  # database provides error message
892        except Error as err:
893            raise _db_error(
894                "Error in '%s': '%s' " % (sql, err), InterfaceError)
895        except Exception as err:
896            raise _op_error("Internal error in '%s': %s" % (sql, err))
897        # then initialize result raw count and description
898        if self._src.resulttype == RESULT_DQL:
899            self._description = True  # fetch on demand
900            self.rowcount = self._src.ntuples
901            self.lastrowid = None
902            if self.build_row_factory:
903                self.row_factory = self.build_row_factory()
904        else:
905            self.rowcount = rowcount
906            self.lastrowid = self._src.oidstatus()
907        # return the cursor object, so you can write statements such as
908        # "cursor.execute(...).fetchall()" or "for row in cursor.execute(...)"
909        return self
910
911    def fetchone(self):
912        """Fetch the next row of a query result set."""
913        res = self.fetchmany(1, False)
914        try:
915            return res[0]
916        except IndexError:
917            return None
918
919    def fetchall(self):
920        """Fetch all (remaining) rows of a query result."""
921        return self.fetchmany(-1, False)
922
923    def fetchmany(self, size=None, keep=False):
924        """Fetch the next set of rows of a query result.
925
926        The number of rows to fetch per call is specified by the
927        size parameter. If it is not given, the cursor's arraysize
928        determines the number of rows to be fetched. If you set
929        the keep parameter to true, this is kept as new arraysize.
930        """
931        if size is None:
932            size = self.arraysize
933        if keep:
934            self.arraysize = size
935        try:
936            result = self._src.fetch(size)
937        except DatabaseError:
938            raise
939        except Error as err:
940            raise _db_error(str(err))
941        typecast = self.type_cache.typecast
942        return [self.row_factory([typecast(value, typ)
943            for typ, value in zip(self.coltypes, row)]) for row in result]
944
945    def callproc(self, procname, parameters=None):
946        """Call a stored database procedure with the given name.
947
948        The sequence of parameters must contain one entry for each input
949        argument that the procedure expects. The result of the call is the
950        same as this input sequence; replacement of output and input/output
951        parameters in the return value is currently not supported.
952
953        The procedure may also provide a result set as output. These can be
954        requested through the standard fetch methods of the cursor.
955        """
956        n = parameters and len(parameters) or 0
957        query = 'select * from "%s"(%s)' % (procname, ','.join(n * ['%s']))
958        self.execute(query, parameters)
959        return parameters
960
961    def copy_from(self, stream, table,
962            format=None, sep=None, null=None, size=None, columns=None):
963        """Copy data from an input stream to the specified table.
964
965        The input stream can be a file-like object with a read() method or
966        it can also be an iterable returning a row or multiple rows of input
967        on each iteration.
968
969        The format must be text, csv or binary. The sep option sets the
970        column separator (delimiter) used in the non binary formats.
971        The null option sets the textual representation of NULL in the input.
972
973        The size option sets the size of the buffer used when reading data
974        from file-like objects.
975
976        The copy operation can be restricted to a subset of columns. If no
977        columns are specified, all of them will be copied.
978        """
979        binary_format = format == 'binary'
980        try:
981            read = stream.read
982        except AttributeError:
983            if size:
984                raise ValueError("Size must only be set for file-like objects")
985            if binary_format:
986                input_type = bytes
987                type_name = 'byte strings'
988            else:
989                input_type = basestring
990                type_name = 'strings'
991
992            if isinstance(stream, basestring):
993                if not isinstance(stream, input_type):
994                    raise ValueError("The input must be %s" % type_name)
995                if not binary_format:
996                    if isinstance(stream, str):
997                        if not stream.endswith('\n'):
998                            stream += '\n'
999                    else:
1000                        if not stream.endswith(b'\n'):
1001                            stream += b'\n'
1002
1003                def chunks():
1004                    yield stream
1005
1006            elif isinstance(stream, Iterable):
1007
1008                def chunks():
1009                    for chunk in stream:
1010                        if not isinstance(chunk, input_type):
1011                            raise ValueError(
1012                                "Input stream must consist of %s" % type_name)
1013                        if isinstance(chunk, str):
1014                            if not chunk.endswith('\n'):
1015                                chunk += '\n'
1016                        else:
1017                            if not chunk.endswith(b'\n'):
1018                                chunk += b'\n'
1019                        yield chunk
1020
1021            else:
1022                raise TypeError("Need an input stream to copy from")
1023        else:
1024            if size is None:
1025                size = 8192
1026            elif not isinstance(size, int):
1027                raise TypeError("The size option must be an integer")
1028            if size > 0:
1029
1030                def chunks():
1031                    while True:
1032                        buffer = read(size)
1033                        yield buffer
1034                        if not buffer or len(buffer) < size:
1035                            break
1036
1037            else:
1038
1039                def chunks():
1040                    yield read()
1041
1042        if not table or not isinstance(table, basestring):
1043            raise TypeError("Need a table to copy to")
1044        if table.lower().startswith('select'):
1045                raise ValueError("Must specify a table, not a query")
1046        else:
1047            table = '"%s"' % (table,)
1048        operation = ['copy %s' % (table,)]
1049        options = []
1050        params = []
1051        if format is not None:
1052            if not isinstance(format, basestring):
1053                raise TypeError("The frmat option must be be a string")
1054            if format not in ('text', 'csv', 'binary'):
1055                raise ValueError("Invalid format")
1056            options.append('format %s' % (format,))
1057        if sep is not None:
1058            if not isinstance(sep, basestring):
1059                raise TypeError("The sep option must be a string")
1060            if format == 'binary':
1061                raise ValueError(
1062                    "The sep option is not allowed with binary format")
1063            if len(sep) != 1:
1064                raise ValueError(
1065                    "The sep option must be a single one-byte character")
1066            options.append('delimiter %s')
1067            params.append(sep)
1068        if null is not None:
1069            if not isinstance(null, basestring):
1070                raise TypeError("The null option must be a string")
1071            options.append('null %s')
1072            params.append(null)
1073        if columns:
1074            if not isinstance(columns, basestring):
1075                columns = ','.join('"%s"' % (col,) for col in columns)
1076            operation.append('(%s)' % (columns,))
1077        operation.append("from stdin")
1078        if options:
1079            operation.append('(%s)' % ','.join(options))
1080        operation = ' '.join(operation)
1081
1082        putdata = self._src.putdata
1083        self.execute(operation, params)
1084
1085        try:
1086            for chunk in chunks():
1087                putdata(chunk)
1088        except BaseException as error:
1089            self.rowcount = -1
1090            # the following call will re-raise the error
1091            putdata(error)
1092        else:
1093            self.rowcount = putdata(None)
1094
1095        # return the cursor object, so you can chain operations
1096        return self
1097
1098    def copy_to(self, stream, table,
1099            format=None, sep=None, null=None, decode=None, columns=None):
1100        """Copy data from the specified table to an output stream.
1101
1102        The output stream can be a file-like object with a write() method or
1103        it can also be None, in which case the method will return a generator
1104        yielding a row on each iteration.
1105
1106        Output will be returned as byte strings unless you set decode to true.
1107
1108        Note that you can also use a select query instead of the table name.
1109
1110        The format must be text, csv or binary. The sep option sets the
1111        column separator (delimiter) used in the non binary formats.
1112        The null option sets the textual representation of NULL in the output.
1113
1114        The copy operation can be restricted to a subset of columns. If no
1115        columns are specified, all of them will be copied.
1116        """
1117        binary_format = format == 'binary'
1118        if stream is not None:
1119            try:
1120                write = stream.write
1121            except AttributeError:
1122                raise TypeError("Need an output stream to copy to")
1123        if not table or not isinstance(table, basestring):
1124            raise TypeError("Need a table to copy to")
1125        if table.lower().startswith('select'):
1126            if columns:
1127                raise ValueError("Columns must be specified in the query")
1128            table = '(%s)' % (table,)
1129        else:
1130            table = '"%s"' % (table,)
1131        operation = ['copy %s' % (table,)]
1132        options = []
1133        params = []
1134        if format is not None:
1135            if not isinstance(format, basestring):
1136                raise TypeError("The format option must be a string")
1137            if format not in ('text', 'csv', 'binary'):
1138                raise ValueError("Invalid format")
1139            options.append('format %s' % (format,))
1140        if sep is not None:
1141            if not isinstance(sep, basestring):
1142                raise TypeError("The sep option must be a string")
1143            if binary_format:
1144                raise ValueError(
1145                    "The sep option is not allowed with binary format")
1146            if len(sep) != 1:
1147                raise ValueError(
1148                    "The sep option must be a single one-byte character")
1149            options.append('delimiter %s')
1150            params.append(sep)
1151        if null is not None:
1152            if not isinstance(null, basestring):
1153                raise TypeError("The null option must be a string")
1154            options.append('null %s')
1155            params.append(null)
1156        if decode is None:
1157            if format == 'binary':
1158                decode = False
1159            else:
1160                decode = str is unicode
1161        else:
1162            if not isinstance(decode, (int, bool)):
1163                raise TypeError("The decode option must be a boolean")
1164            if decode and binary_format:
1165                raise ValueError(
1166                    "The decode option is not allowed with binary format")
1167        if columns:
1168            if not isinstance(columns, basestring):
1169                columns = ','.join('"%s"' % (col,) for col in columns)
1170            operation.append('(%s)' % (columns,))
1171
1172        operation.append("to stdout")
1173        if options:
1174            operation.append('(%s)' % ','.join(options))
1175        operation = ' '.join(operation)
1176
1177        getdata = self._src.getdata
1178        self.execute(operation, params)
1179
1180        def copy():
1181            self.rowcount = 0
1182            while True:
1183                row = getdata(decode)
1184                if isinstance(row, int):
1185                    if self.rowcount != row:
1186                        self.rowcount = row
1187                    break
1188                self.rowcount += 1
1189                yield row
1190
1191        if stream is None:
1192            # no input stream, return the generator
1193            return copy()
1194
1195        # write the rows to the file-like input stream
1196        for row in copy():
1197            write(row)
1198
1199        # return the cursor object, so you can chain operations
1200        return self
1201
1202    def __next__(self):
1203        """Return the next row (support for the iteration protocol)."""
1204        res = self.fetchone()
1205        if res is None:
1206            raise StopIteration
1207        return res
1208
1209    # Note that since Python 2.6 the iterator protocol uses __next()__
1210    # instead of next(), we keep it only for backward compatibility of pgdb.
1211    next = __next__
1212
1213    @staticmethod
1214    def nextset():
1215        """Not supported."""
1216        raise NotSupportedError("The nextset() method is not supported")
1217
1218    @staticmethod
1219    def setinputsizes(sizes):
1220        """Not supported."""
1221        pass  # unsupported, but silently passed
1222
1223    @staticmethod
1224    def setoutputsize(size, column=0):
1225        """Not supported."""
1226        pass  # unsupported, but silently passed
1227
1228    @staticmethod
1229    def row_factory(row):
1230        """Process rows before they are returned.
1231
1232        You can overwrite this statically with a custom row factory, or
1233        you can build a row factory dynamically with build_row_factory().
1234
1235        For example, you can create a Cursor class that returns rows as
1236        Python dictionaries like this:
1237
1238            class DictCursor(pgdb.Cursor):
1239
1240                def row_factory(self, row):
1241                    return {desc[0]: value
1242                        for desc, value in zip(self.description, row)}
1243
1244            cur = DictCursor(con)  # get one DictCursor instance or
1245            con.cursor_type = DictCursor  # always use DictCursor instances
1246        """
1247        raise NotImplementedError
1248
1249    def build_row_factory(self):
1250        """Build a row factory based on the current description.
1251
1252        This implementation builds a row factory for creating named tuples.
1253        You can overwrite this method if you want to dynamically create
1254        different row factories whenever the column description changes.
1255        """
1256        colnames = self.colnames
1257        if colnames:
1258            try:
1259                try:
1260                    return namedtuple('Row', colnames, rename=True)._make
1261                except TypeError:  # Python 2.6 and 3.0 do not support rename
1262                    colnames = [v if v.isalnum() else 'column_%d' % n
1263                             for n, v in enumerate(colnames)]
1264                    return namedtuple('Row', colnames)._make
1265            except ValueError:  # there is still a problem with the field names
1266                colnames = ['column_%d' % n for n in range(len(colnames))]
1267                return namedtuple('Row', colnames)._make
1268
1269
1270CursorDescription = namedtuple('CursorDescription',
1271    ['name', 'type_code', 'display_size', 'internal_size',
1272     'precision', 'scale', 'null_ok'])
1273
1274
1275### Connection Objects
1276
1277class Connection(object):
1278    """Connection object."""
1279
1280    # expose the exceptions as attributes on the connection object
1281    Error = Error
1282    Warning = Warning
1283    InterfaceError = InterfaceError
1284    DatabaseError = DatabaseError
1285    InternalError = InternalError
1286    OperationalError = OperationalError
1287    ProgrammingError = ProgrammingError
1288    IntegrityError = IntegrityError
1289    DataError = DataError
1290    NotSupportedError = NotSupportedError
1291
1292    def __init__(self, cnx):
1293        """Create a database connection object."""
1294        self._cnx = cnx  # connection
1295        self._tnx = False  # transaction state
1296        self.type_cache = TypeCache(cnx)
1297        self.cursor_type = Cursor
1298        try:
1299            self._cnx.source()
1300        except Exception:
1301            raise _op_error("Invalid connection")
1302
1303    def __enter__(self):
1304        """Enter the runtime context for the connection object.
1305
1306        The runtime context can be used for running transactions.
1307        """
1308        return self
1309
1310    def __exit__(self, et, ev, tb):
1311        """Exit the runtime context for the connection object.
1312
1313        This does not close the connection, but it ends a transaction.
1314        """
1315        if et is None and ev is None and tb is None:
1316            self.commit()
1317        else:
1318            self.rollback()
1319
1320    def close(self):
1321        """Close the connection object."""
1322        if self._cnx:
1323            if self._tnx:
1324                try:
1325                    self.rollback()
1326                except DatabaseError:
1327                    pass
1328            self._cnx.close()
1329            self._cnx = None
1330        else:
1331            raise _op_error("Connection has been closed")
1332
1333    def commit(self):
1334        """Commit any pending transaction to the database."""
1335        if self._cnx:
1336            if self._tnx:
1337                self._tnx = False
1338                try:
1339                    self._cnx.source().execute("COMMIT")
1340                except DatabaseError:
1341                    raise
1342                except Exception:
1343                    raise _op_error("Can't commit")
1344        else:
1345            raise _op_error("Connection has been closed")
1346
1347    def rollback(self):
1348        """Roll back to the start of any pending transaction."""
1349        if self._cnx:
1350            if self._tnx:
1351                self._tnx = False
1352                try:
1353                    self._cnx.source().execute("ROLLBACK")
1354                except DatabaseError:
1355                    raise
1356                except Exception:
1357                    raise _op_error("Can't rollback")
1358        else:
1359            raise _op_error("Connection has been closed")
1360
1361    def cursor(self):
1362        """Return a new cursor object using the connection."""
1363        if self._cnx:
1364            try:
1365                return self.cursor_type(self)
1366            except Exception:
1367                raise _op_error("Invalid connection")
1368        else:
1369            raise _op_error("Connection has been closed")
1370
1371    if shortcutmethods:  # otherwise do not implement and document this
1372
1373        def execute(self, operation, params=None):
1374            """Shortcut method to run an operation on an implicit cursor."""
1375            cursor = self.cursor()
1376            cursor.execute(operation, params)
1377            return cursor
1378
1379        def executemany(self, operation, param_seq):
1380            """Shortcut method to run an operation against a sequence."""
1381            cursor = self.cursor()
1382            cursor.executemany(operation, param_seq)
1383            return cursor
1384
1385
1386### Module Interface
1387
1388_connect = connect
1389
1390def connect(dsn=None,
1391        user=None, password=None,
1392        host=None, database=None):
1393    """Connect to a database."""
1394    # first get params from DSN
1395    dbport = -1
1396    dbhost = ""
1397    dbbase = ""
1398    dbuser = ""
1399    dbpasswd = ""
1400    dbopt = ""
1401    try:
1402        params = dsn.split(":")
1403        dbhost = params[0]
1404        dbbase = params[1]
1405        dbuser = params[2]
1406        dbpasswd = params[3]
1407        dbopt = params[4]
1408    except (AttributeError, IndexError, TypeError):
1409        pass
1410
1411    # override if necessary
1412    if user is not None:
1413        dbuser = user
1414    if password is not None:
1415        dbpasswd = password
1416    if database is not None:
1417        dbbase = database
1418    if host is not None:
1419        try:
1420            params = host.split(":")
1421            dbhost = params[0]
1422            dbport = int(params[1])
1423        except (AttributeError, IndexError, TypeError, ValueError):
1424            pass
1425
1426    # empty host is localhost
1427    if dbhost == "":
1428        dbhost = None
1429    if dbuser == "":
1430        dbuser = None
1431
1432    # open the connection
1433    cnx = _connect(dbbase, dbhost, dbport, dbopt, dbuser, dbpasswd)
1434    return Connection(cnx)
1435
1436
1437### Types Handling
1438
1439class Type(frozenset):
1440    """Type class for a couple of PostgreSQL data types.
1441
1442    PostgreSQL is object-oriented: types are dynamic.
1443    We must thus use type names as internal type codes.
1444    """
1445
1446    def __new__(cls, values):
1447        if isinstance(values, basestring):
1448            values = values.split()
1449        return super(Type, cls).__new__(cls, values)
1450
1451    def __eq__(self, other):
1452        if isinstance(other, basestring):
1453            if other.startswith('_'):
1454                other = other[1:]
1455            return other in self
1456        else:
1457            return super(Type, self).__eq__(other)
1458
1459    def __ne__(self, other):
1460        if isinstance(other, basestring):
1461            if other.startswith('_'):
1462                other = other[1:]
1463            return other not in self
1464        else:
1465            return super(Type, self).__ne__(other)
1466
1467
1468class ArrayType:
1469    """Type class for PostgreSQL array types."""
1470
1471    def __eq__(self, other):
1472        if isinstance(other, basestring):
1473            return other.startswith('_')
1474        else:
1475            return isinstance(other, ArrayType)
1476
1477    def __ne__(self, other):
1478        if isinstance(other, basestring):
1479            return not other.startswith('_')
1480        else:
1481            return not isinstance(other, ArrayType)
1482
1483
1484class RecordType:
1485    """Type class for PostgreSQL record types."""
1486
1487    def __eq__(self, other):
1488        if isinstance(other, TypeCode):
1489            return other.type == 'c'
1490        elif isinstance(other, basestring):
1491            return other == 'record'
1492        else:
1493            return isinstance(other, RecordType)
1494
1495    def __ne__(self, other):
1496        if isinstance(other, TypeCode):
1497            return other.type != 'c'
1498        elif isinstance(other, basestring):
1499            return other != 'record'
1500        else:
1501            return not isinstance(other, RecordType)
1502
1503
1504# Mandatory type objects defined by DB-API 2 specs:
1505
1506STRING = Type('char bpchar name text varchar')
1507BINARY = Type('bytea')
1508NUMBER = Type('int2 int4 serial int8 float4 float8 numeric money')
1509DATETIME = Type('date time timetz timestamp timestamptz interval'
1510    ' abstime reltime')  # these are very old
1511ROWID = Type('oid')
1512
1513
1514# Additional type objects (more specific):
1515
1516BOOL = Type('bool')
1517SMALLINT = Type('int2')
1518INTEGER = Type('int2 int4 int8 serial')
1519LONG = Type('int8')
1520FLOAT = Type('float4 float8')
1521NUMERIC = Type('numeric')
1522MONEY = Type('money')
1523DATE = Type('date')
1524TIME = Type('time timetz')
1525TIMESTAMP = Type('timestamp timestamptz')
1526INTERVAL = Type('interval')
1527HSTORE = Type('hstore')
1528JSON = Type('json jsonb')
1529
1530# Type object for arrays (also equate to their base types):
1531
1532ARRAY = ArrayType()
1533
1534# Type object for records (encompassing all composite types):
1535
1536RECORD = RecordType()
1537
1538
1539# Mandatory type helpers defined by DB-API 2 specs:
1540
1541def Date(year, month, day):
1542    """Construct an object holding a date value."""
1543    return date(year, month, day)
1544
1545
1546def Time(hour, minute=0, second=0, microsecond=0, tzinfo=None):
1547    """Construct an object holding a time value."""
1548    return time(hour, minute, second, microsecond, tzinfo)
1549
1550
1551def Timestamp(year, month, day, hour=0, minute=0, second=0, microsecond=0,
1552        tzinfo=None):
1553    """Construct an object holding a time stamp value."""
1554    return datetime(year, month, day, hour, minute, second, microsecond, tzinfo)
1555
1556
1557def DateFromTicks(ticks):
1558    """Construct an object holding a date value from the given ticks value."""
1559    return Date(*localtime(ticks)[:3])
1560
1561
1562def TimeFromTicks(ticks):
1563    """Construct an object holding a time value from the given ticks value."""
1564    return Time(*localtime(ticks)[3:6])
1565
1566
1567def TimestampFromTicks(ticks):
1568    """Construct an object holding a time stamp from the given ticks value."""
1569    return Timestamp(*localtime(ticks)[:6])
1570
1571
1572class Binary(bytes):
1573    """Construct an object capable of holding a binary (long) string value."""
1574
1575
1576# Additional type helpers for PyGreSQL:
1577
1578def Interval(days, hours=0, minutes=0, seconds=0, microseconds=0):
1579    """Construct an object holding a time inverval value."""
1580    return timedelta(days, hours=hours, minutes=minutes, seconds=seconds,
1581        microseconds=microseconds)
1582
1583
1584class Hstore(dict):
1585    """Wrapper class for marking hstore values."""
1586
1587    _re_quote = regex('^[Nn][Uu][Ll][Ll]$|[ ,=>]')
1588
1589    @classmethod
1590    def _quote(cls, s):
1591        if s is None:
1592            return 'NULL'
1593        if not s:
1594            return '""'
1595        s = s.replace('"', '\\"')
1596        if cls._re_quote.search(s):
1597            s = '"%s"' % s
1598        return s
1599
1600    def __str__(self):
1601        q = self._quote
1602        return ','.join('%s=>%s' % (q(k), q(v)) for k, v in self.items())
1603
1604
1605class Json:
1606    """Construct a wrapper for holding an object serializable to JSON."""
1607
1608    def __init__(self, obj, encode=None):
1609        self.obj = obj
1610        self.encode = encode or jsonencode
1611
1612    def __str__(self):
1613        obj = self.obj
1614        if isinstance(obj, basestring):
1615            return obj
1616        return self.encode(obj)
1617
1618
1619class Literal:
1620    """Construct a wrapper for holding a literal SQL string."""
1621
1622    def __init__(self, sql):
1623        self.sql = sql
1624
1625    def __str__(self):
1626        return self.sql
1627
1628    __pg_repr__ = __str__
1629
1630# If run as script, print some information:
1631
1632if __name__ == '__main__':
1633    print('PyGreSQL version', version)
1634    print('')
1635    print(__doc__)
Note: See TracBrowser for help on using the repository browser.