source: trunk/pgdb.py @ 843

Last change on this file since 843 was 843, checked in by cito, 3 years ago

Silently accept unescaped quotes

  • Property svn:keywords set to Id
File size: 54.0 KB
Line 
1#! /usr/bin/python
2#
3# pgdb.py
4#
5# Written by D'Arcy J.M. Cain
6#
7# $Id: pgdb.py 843 2016-02-08 21:23:14Z cito $
8#
9
10"""pgdb - DB-API 2.0 compliant module for PygreSQL.
11
12(c) 1999, Pascal Andre <andre@via.ecp.fr>.
13See package documentation for further information on copyright.
14
15Inline documentation is sparse.
16See DB-API 2.0 specification for usage information:
17http://www.python.org/peps/pep-0249.html
18
19Basic usage:
20
21    pgdb.connect(connect_string) # open a connection
22    # connect_string = 'host:database:user:password:opt'
23    # All parts are optional. You may also pass host through
24    # password as keyword arguments. To pass a port,
25    # pass it in the host keyword parameter:
26    connection = pgdb.connect(host='localhost:5432')
27
28    cursor = connection.cursor() # open a cursor
29
30    cursor.execute(query[, params])
31    # Execute a query, binding params (a dictionary) if they are
32    # passed. The binding syntax is the same as the % operator
33    # for dictionaries, and no quoting is done.
34
35    cursor.executemany(query, list of params)
36    # Execute a query many times, binding each param dictionary
37    # from the list.
38
39    cursor.fetchone() # fetch one row, [value, value, ...]
40
41    cursor.fetchall() # fetch all rows, [[value, value, ...], ...]
42
43    cursor.fetchmany([size])
44    # returns size or cursor.arraysize number of rows,
45    # [[value, value, ...], ...] from result set.
46    # Default cursor.arraysize is 1.
47
48    cursor.description # returns information about the columns
49    #   [(column_name, type_name, display_size,
50    #           internal_size, precision, scale, null_ok), ...]
51    # Note that display_size, precision, scale and null_ok
52    # are not implemented.
53
54    cursor.rowcount # number of rows available in the result set
55    # Available after a call to execute.
56
57    connection.commit() # commit transaction
58
59    connection.rollback() # or rollback transaction
60
61    cursor.close() # close the cursor
62
63    connection.close() # close the connection
64"""
65
66from __future__ import print_function
67
68from _pg import *
69
70__version__ = version
71
72from datetime import date, time, datetime, timedelta
73from time import localtime
74from decimal import Decimal
75from uuid import UUID as Uuid
76from math import isnan, isinf
77from collections import namedtuple
78from functools import partial
79from re import compile as regex
80from json import loads as jsondecode, dumps as jsonencode
81
82try:
83    long
84except NameError:  # Python >= 3.0
85    long = int
86
87try:
88    unicode
89except NameError:  # Python >= 3.0
90    unicode = str
91
92try:
93    basestring
94except NameError:  # Python >= 3.0
95    basestring = (str, bytes)
96
97from collections import Iterable
98
99
100### Module Constants
101
102# compliant with DB API 2.0
103apilevel = '2.0'
104
105# module may be shared, but not connections
106threadsafety = 1
107
108# this module use extended python format codes
109paramstyle = 'pyformat'
110
111# shortcut methods have been excluded from DB API 2 and
112# are not recommended by the DB SIG, but they can be handy
113shortcutmethods = 1
114
115
116### Internal Type Handling
117
118try:
119    from inspect import signature
120except ImportError:  # Python < 3.3
121    from inspect import getargspec
122
123    def get_args(func):
124        return getargspec(func).args
125else:
126
127    def get_args(func):
128        return list(signature(func).parameters)
129
130try:
131    if datetime.strptime('+0100', '%z') is None:
132        raise ValueError
133except ValueError:  # Python < 3.2
134    timezones = None
135else:
136    # time zones used in Postgres timestamptz output
137    timezones = dict(CET='+0100', EET='+0200', EST='-0500',
138        GMT='+0000', HST='-1000', MET='+0100', MST='-0700',
139        UCT='+0000', UTC='+0000', WET='+0000')
140
141
142def decimal_type(decimal_type=None):
143    """Get or set global type to be used for decimal values.
144
145    Note that connections cache cast functions. To be sure a global change
146    is picked up by a running connection, call con.type_cache.reset_typecast().
147    """
148    global Decimal
149    if decimal_type is not None:
150        Decimal = decimal_type
151        set_typecast('numeric', decimal_type)
152    return Decimal
153
154
155def cast_bool(value):
156    """Cast boolean value in database format to bool."""
157    if value:
158        return value[0] in ('t', 'T')
159
160
161def cast_money(value):
162    """Cast money value in database format to Decimal."""
163    if value:
164        value = value.replace('(', '-')
165        return Decimal(''.join(c for c in value if c.isdigit() or c in '.-'))
166
167
168def cast_int2vector(value):
169    """Cast an int2vector value."""
170    return [int(v) for v in value.split()]
171
172
173def cast_date(value, connection):
174    """Cast a date value."""
175    # The output format depends on the server setting DateStyle.  The default
176    # setting ISO and the setting for German are actually unambiguous.  The
177    # order of days and months in the other two settings is however ambiguous,
178    # so at least here we need to consult the setting to properly parse values.
179    if value == '-infinity':
180        return date.min
181    if value == 'infinity':
182        return date.max
183    value = value.split()
184    if value[-1] == 'BC':
185        return date.min
186    value = value[0]
187    if len(value) > 10:
188        return date.max
189    fmt = connection.date_format()
190    return datetime.strptime(value, fmt).date()
191
192
193def cast_time(value):
194    """Cast a time value."""
195    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
196    return datetime.strptime(value, fmt).time()
197
198
199_re_timezone = regex('(.*)([+-].*)')
200
201
202def cast_timetz(value):
203    """Cast a timetz value."""
204    tz = _re_timezone.match(value)
205    if tz:
206        value, tz = tz.groups()
207    else:
208        tz = '+0000'
209    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
210    if timezones:
211        if tz.startswith(('+', '-')):
212            if len(tz) < 5:
213                tz += '00'
214            else:
215                tz = tz.replace(':', '')
216        elif tz in timezones:
217            tz = timezones[tz]
218        else:
219            tz = '+0000'
220        value += tz
221        fmt += '%z'
222    return datetime.strptime(value, fmt).timetz()
223
224
225def cast_timestamp(value, connection):
226    """Cast a timestamp value."""
227    if value == '-infinity':
228        return datetime.min
229    if value == 'infinity':
230        return datetime.max
231    value = value.split()
232    if value[-1] == 'BC':
233        return datetime.min
234    fmt = connection.date_format()
235    if fmt.endswith('-%Y') and len(value) > 2:
236        value = value[1:5]
237        if len(value[3]) > 4:
238            return datetime.max
239        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
240            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
241    else:
242        if len(value[0]) > 10:
243            return datetime.max
244        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
245    return datetime.strptime(' '.join(value), ' '.join(fmt))
246
247
248def cast_timestamptz(value, connection):
249    """Cast a timestamptz value."""
250    if value == '-infinity':
251        return datetime.min
252    if value == 'infinity':
253        return datetime.max
254    value = value.split()
255    if value[-1] == 'BC':
256        return datetime.min
257    fmt = connection.date_format()
258    if fmt.endswith('-%Y') and len(value) > 2:
259        value = value[1:]
260        if len(value[3]) > 4:
261            return datetime.max
262        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
263            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
264        value, tz = value[:-1], value[-1]
265    else:
266        if fmt.startswith('%Y-'):
267            tz = _re_timezone.match(value[1])
268            if tz:
269                value[1], tz = tz.groups()
270            else:
271                tz = '+0000'
272        else:
273            value, tz = value[:-1], value[-1]
274        if len(value[0]) > 10:
275            return datetime.max
276        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
277    if timezones:
278        if tz.startswith(('+', '-')):
279            if len(tz) < 5:
280                tz += '00'
281            else:
282                tz = tz.replace(':', '')
283        elif tz in timezones:
284            tz = timezones[tz]
285        else:
286            tz = '+0000'
287        value.append(tz)
288        fmt.append('%z')
289    return datetime.strptime(' '.join(value), ' '.join(fmt))
290
291_re_interval_sql_standard = regex(
292    '(?:([+-])?([0-9]+)-([0-9]+) ?)?'
293    '(?:([+-]?[0-9]+)(?!:) ?)?'
294    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
295
296_re_interval_postgres = regex(
297    '(?:([+-]?[0-9]+) ?years? ?)?'
298    '(?:([+-]?[0-9]+) ?mons? ?)?'
299    '(?:([+-]?[0-9]+) ?days? ?)?'
300    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
301
302_re_interval_postgres_verbose = regex(
303    '@ ?(?:([+-]?[0-9]+) ?years? ?)?'
304    '(?:([+-]?[0-9]+) ?mons? ?)?'
305    '(?:([+-]?[0-9]+) ?days? ?)?'
306    '(?:([+-]?[0-9]+) ?hours? ?)?'
307    '(?:([+-]?[0-9]+) ?mins? ?)?'
308    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))? ?secs?)? ?(ago)?')
309
310_re_interval_iso_8601 = regex(
311    'P(?:([+-]?[0-9]+)Y)?'
312    '(?:([+-]?[0-9]+)M)?'
313    '(?:([+-]?[0-9]+)D)?'
314    '(?:T(?:([+-]?[0-9]+)H)?'
315    '(?:([+-]?[0-9]+)M)?'
316    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))?S)?)?')
317
318
319def cast_interval(value):
320    """Cast an interval value."""
321    # The output format depends on the server setting IntervalStyle, but it's
322    # not necessary to consult this setting to parse it.  It's faster to just
323    # check all possible formats, and there is no ambiguity here.
324    m = _re_interval_iso_8601.match(value)
325    if m:
326        m = [d or '0' for d in m.groups()]
327        secs_ago = m.pop(5) == '-'
328        m = [int(d) for d in m]
329        years, mons, days, hours, mins, secs, usecs = m
330        if secs_ago:
331            secs = -secs
332            usecs = -usecs
333    else:
334        m = _re_interval_postgres_verbose.match(value)
335        if m:
336            m, ago = [d or '0' for d in m.groups()[:8]], m.group(9)
337            secs_ago = m.pop(5) == '-'
338            m = [-int(d) for d in m] if ago else [int(d) for d in m]
339            years, mons, days, hours, mins, secs, usecs = m
340            if secs_ago:
341                secs = - secs
342                usecs = -usecs
343        else:
344            m = _re_interval_postgres.match(value)
345            if m and any(m.groups()):
346                m = [d or '0' for d in m.groups()]
347                hours_ago = m.pop(3) == '-'
348                m = [int(d) for d in m]
349                years, mons, days, hours, mins, secs, usecs = m
350                if hours_ago:
351                    hours = -hours
352                    mins = -mins
353                    secs = -secs
354                    usecs = -usecs
355            else:
356                m = _re_interval_sql_standard.match(value)
357                if m and any(m.groups()):
358                    m = [d or '0' for d in m.groups()]
359                    years_ago = m.pop(0) == '-'
360                    hours_ago = m.pop(3) == '-'
361                    m = [int(d) for d in m]
362                    years, mons, days, hours, mins, secs, usecs = m
363                    if years_ago:
364                        years = -years
365                        mons = -mons
366                    if hours_ago:
367                        hours = -hours
368                        mins = -mins
369                        secs = -secs
370                        usecs = -usecs
371                else:
372                    raise ValueError('Cannot parse interval: %s' % value)
373    days += 365 * years + 30 * mons
374    return timedelta(days=days, hours=hours, minutes=mins,
375        seconds=secs, microseconds=usecs)
376
377
378class Typecasts(dict):
379    """Dictionary mapping database types to typecast functions.
380
381    The cast functions get passed the string representation of a value in
382    the database which they need to convert to a Python object.  The
383    passed string will never be None since NULL values are already be
384    handled before the cast function is called.
385    """
386
387    # the default cast functions
388    # (str functions are ignored but have been added for faster access)
389    defaults = {'char': str, 'bpchar': str, 'name': str,
390        'text': str, 'varchar': str,
391        'bool': cast_bool, 'bytea': unescape_bytea,
392        'int2': int, 'int4': int, 'serial': int, 'int8': long, 'oid': int,
393        'hstore': cast_hstore, 'json': jsondecode, 'jsonb': jsondecode,
394        'float4': float, 'float8': float,
395        'numeric': Decimal, 'money': cast_money,
396        'date': cast_date, 'interval': cast_interval,
397        'time': cast_time, 'timetz': cast_timetz,
398        'timestamp': cast_timestamp, 'timestamptz': cast_timestamptz,
399        'int2vector': cast_int2vector, 'uuid': Uuid,
400        'anyarray': cast_array, 'record': cast_record}
401
402    connection = None  # will be set in local connection specific instances
403
404    def __missing__(self, typ):
405        """Create a cast function if it is not cached.
406
407        Note that this class never raises a KeyError,
408        but returns None when no special cast function exists.
409        """
410        if not isinstance(typ, str):
411            raise TypeError('Invalid type: %s' % typ)
412        cast = self.defaults.get(typ)
413        if cast:
414            # store default for faster access
415            cast = self._add_connection(cast)
416            self[typ] = cast
417        elif typ.startswith('_'):
418            # create array cast
419            base_cast = self[typ[1:]]
420            cast = self.create_array_cast(base_cast)
421            if base_cast:
422                # store only if base type exists
423                self[typ] = cast
424        return cast
425
426    @staticmethod
427    def _needs_connection(func):
428        """Check if a typecast function needs a connection argument."""
429        try:
430            args = get_args(func)
431        except (TypeError, ValueError):
432            return False
433        else:
434            return 'connection' in args[1:]
435
436    def _add_connection(self, cast):
437        """Add a connection argument to the typecast function if necessary."""
438        if not self.connection or not self._needs_connection(cast):
439            return cast
440        return partial(cast, connection=self.connection)
441
442    def get(self, typ, default=None):
443        """Get the typecast function for the given database type."""
444        return self[typ] or default
445
446    def set(self, typ, cast):
447        """Set a typecast function for the specified database type(s)."""
448        if isinstance(typ, basestring):
449            typ = [typ]
450        if cast is None:
451            for t in typ:
452                self.pop(t, None)
453                self.pop('_%s' % t, None)
454        else:
455            if not callable(cast):
456                raise TypeError("Cast parameter must be callable")
457            for t in typ:
458                self[t] = self._add_connection(cast)
459                self.pop('_%s' % t, None)
460
461    def reset(self, typ=None):
462        """Reset the typecasts for the specified type(s) to their defaults.
463
464        When no type is specified, all typecasts will be reset.
465        """
466        defaults = self.defaults
467        if typ is None:
468            self.clear()
469            self.update(defaults)
470        else:
471            if isinstance(typ, basestring):
472                typ = [typ]
473            for t in typ:
474                cast = defaults.get(t)
475                if cast:
476                    self[t] = self._add_connection(cast)
477                    t = '_%s' % t
478                    cast = defaults.get(t)
479                    if cast:
480                        self[t] = self._add_connection(cast)
481                    else:
482                        self.pop(t, None)
483                else:
484                    self.pop(t, None)
485                    self.pop('_%s' % t, None)
486
487    def create_array_cast(self, basecast):
488        """Create an array typecast for the given base cast."""
489        def cast(v):
490            return cast_array(v, basecast)
491        return cast
492
493    def create_record_cast(self, name, fields, casts):
494        """Create a named record typecast for the given fields and casts."""
495        record = namedtuple(name, fields)
496        def cast(v):
497            return record(*cast_record(v, casts))
498        return cast
499
500
501_typecasts = Typecasts()  # this is the global typecast dictionary
502
503
504def get_typecast(typ):
505    """Get the global typecast function for the given database type(s)."""
506    return _typecasts.get(typ)
507
508
509def set_typecast(typ, cast):
510    """Set a global typecast function for the given database type(s).
511
512    Note that connections cache cast functions. To be sure a global change
513    is picked up by a running connection, call con.type_cache.reset_typecast().
514    """
515    _typecasts.set(typ, cast)
516
517
518def reset_typecast(typ=None):
519    """Reset the global typecasts for the given type(s) to their default.
520
521    When no type is specified, all typecasts will be reset.
522
523    Note that connections cache cast functions. To be sure a global change
524    is picked up by a running connection, call con.type_cache.reset_typecast().
525    """
526    _typecasts.reset(typ)
527
528
529class LocalTypecasts(Typecasts):
530    """Map typecasts, including local composite types, to cast functions."""
531
532    defaults = _typecasts
533
534    connection = None  # will be set in a connection specific instance
535
536    def __missing__(self, typ):
537        """Create a cast function if it is not cached."""
538        if typ.startswith('_'):
539            base_cast = self[typ[1:]]
540            cast = self.create_array_cast(base_cast)
541            if base_cast:
542                self[typ] = cast
543        else:
544            cast = self.defaults.get(typ)
545            if cast:
546                cast = self._add_connection(cast)
547                self[typ] = cast
548            else:
549                fields = self.get_fields(typ)
550                if fields:
551                    casts = [self[field.type] for field in fields]
552                    fields = [field.name for field in fields]
553                    cast = self.create_record_cast(typ, fields, casts)
554                    self[typ] = cast
555        return cast
556
557    def get_fields(self, typ):
558        """Return the fields for the given record type.
559
560        This method will be replaced with a method that looks up the fields
561        using the type cache of the connection.
562        """
563        return []
564
565
566class TypeCode(str):
567    """Class representing the type_code used by the DB-API 2.0.
568
569    TypeCode objects are strings equal to the PostgreSQL type name,
570    but carry some additional information.
571    """
572
573    @classmethod
574    def create(cls, oid, name, len, type, category, delim, relid):
575        """Create a type code for a PostgreSQL data type."""
576        self = cls(name)
577        self.oid = oid
578        self.len = len
579        self.type = type
580        self.category = category
581        self.delim = delim
582        self.relid = relid
583        return self
584
585FieldInfo = namedtuple('FieldInfo', ['name', 'type'])
586
587
588class TypeCache(dict):
589    """Cache for database types.
590
591    This cache maps type OIDs and names to TypeCode strings containing
592    important information on the associated database type.
593    """
594
595    def __init__(self, cnx):
596        """Initialize type cache for connection."""
597        super(TypeCache, self).__init__()
598        self._escape_string = cnx.escape_string
599        self._src = cnx.source()
600        self._typecasts = LocalTypecasts()
601        self._typecasts.get_fields = self.get_fields
602        self._typecasts.connection = cnx
603
604    def __missing__(self, key):
605        """Get the type info from the database if it is not cached."""
606        if isinstance(key, int):
607            oid = key
608        else:
609            if '.' not in key and '"' not in key:
610                key = '"%s"' % key
611            oid = "'%s'::regtype" % self._escape_string(key)
612        try:
613            self._src.execute("SELECT oid, typname,"
614                 " typlen, typtype, typcategory, typdelim, typrelid"
615                " FROM pg_type WHERE oid=%s" % oid)
616        except ProgrammingError:
617            res = None
618        else:
619            res = self._src.fetch(1)
620        if not res:
621            raise KeyError('Type %s could not be found' % key)
622        res = res[0]
623        type_code = TypeCode.create(int(res[0]), res[1],
624            int(res[2]), res[3], res[4], res[5], int(res[6]))
625        self[type_code.oid] = self[str(type_code)] = type_code
626        return type_code
627
628    def get(self, key, default=None):
629        """Get the type even if it is not cached."""
630        try:
631            return self[key]
632        except KeyError:
633            return default
634
635    def get_fields(self, typ):
636        """Get the names and types of the fields of composite types."""
637        if not isinstance(typ, TypeCode):
638            typ = self.get(typ)
639            if not typ:
640                return None
641        if not typ.relid:
642            return None  # this type is not composite
643        self._src.execute("SELECT attname, atttypid"
644            " FROM pg_attribute WHERE attrelid=%s AND attnum>0"
645            " AND NOT attisdropped ORDER BY attnum" % typ.relid)
646        return [FieldInfo(name, self.get(int(oid)))
647            for name, oid in self._src.fetch(-1)]
648
649    def get_typecast(self, typ):
650        """Get the typecast function for the given database type."""
651        return self._typecasts.get(typ)
652
653    def set_typecast(self, typ, cast):
654        """Set a typecast function for the specified database type(s)."""
655        self._typecasts.set(typ, cast)
656
657    def reset_typecast(self, typ=None):
658        """Reset the typecast function for the specified database type(s)."""
659        self._typecasts.reset(typ)
660
661    def typecast(self, value, typ):
662        """Cast the given value according to the given database type."""
663        if value is None:
664            # for NULL values, no typecast is necessary
665            return None
666        cast = self.get_typecast(typ)
667        if not cast or cast is str:
668            # no typecast is necessary
669            return value
670        return cast(value)
671
672
673class _quotedict(dict):
674    """Dictionary with auto quoting of its items.
675
676    The quote attribute must be set to the desired quote function.
677    """
678
679    def __getitem__(self, key):
680        return self.quote(super(_quotedict, self).__getitem__(key))
681
682
683### Error messages
684
685def _db_error(msg, cls=DatabaseError):
686    """Return DatabaseError with empty sqlstate attribute."""
687    error = cls(msg)
688    error.sqlstate = None
689    return error
690
691
692def _op_error(msg):
693    """Return OperationalError."""
694    return _db_error(msg, OperationalError)
695
696
697### Cursor Object
698
699class Cursor(object):
700    """Cursor object."""
701
702    def __init__(self, dbcnx):
703        """Create a cursor object for the database connection."""
704        self.connection = self._dbcnx = dbcnx
705        self._cnx = dbcnx._cnx
706        self.type_cache = dbcnx.type_cache
707        self._src = self._cnx.source()
708        # the official attribute for describing the result columns
709        self._description = None
710        if self.row_factory is Cursor.row_factory:
711            # the row factory needs to be determined dynamically
712            self.row_factory = None
713        else:
714            self.build_row_factory = None
715        self.rowcount = -1
716        self.arraysize = 1
717        self.lastrowid = None
718
719    def __iter__(self):
720        """Make cursor compatible to the iteration protocol."""
721        return self
722
723    def __enter__(self):
724        """Enter the runtime context for the cursor object."""
725        return self
726
727    def __exit__(self, et, ev, tb):
728        """Exit the runtime context for the cursor object."""
729        self.close()
730
731    def _quote(self, value):
732        """Quote value depending on its type."""
733        if value is None:
734            return 'NULL'
735        if isinstance(value, (Hstore, Json)):
736            value = str(value)
737        if isinstance(value, basestring):
738            if isinstance(value, Binary):
739                value = self._cnx.escape_bytea(value)
740                if bytes is not str:  # Python >= 3.0
741                    value = value.decode('ascii')
742            else:
743                value = self._cnx.escape_string(value)
744            return "'%s'" % value
745        if isinstance(value, float):
746            if isinf(value):
747                return "'-Infinity'" if value < 0 else "'Infinity'"
748            if isnan(value):
749                return "'NaN'"
750            return value
751        if isinstance(value, (int, long, Decimal, Literal)):
752            return value
753        if isinstance(value, datetime):
754            if value.tzinfo:
755                return "'%s'::timestamptz" % value
756            return "'%s'::timestamp" % value
757        if isinstance(value, date):
758            return "'%s'::date" % value
759        if isinstance(value, time):
760            if value.tzinfo:
761                return "'%s'::timetz" % value
762            return "'%s'::time" % value
763        if isinstance(value, timedelta):
764            return "'%s'::interval" % value
765        if isinstance(value, Uuid):
766            return "'%s'::uuid" % value
767        if isinstance(value, list):
768            # Quote value as an ARRAY constructor. This is better than using
769            # an array literal because it carries the information that this is
770            # an array and not a string.  One issue with this syntax is that
771            # you need to add an explicit typecast when passing empty arrays.
772            # The ARRAY keyword is actually only necessary at the top level.
773            if not value:  # exception for empty array
774                return "'{}'"
775            q = self._quote
776            return 'ARRAY[%s]' % ','.join(str(q(v)) for v in value)
777        if isinstance(value, tuple):
778            # Quote as a ROW constructor.  This is better than using a record
779            # literal because it carries the information that this is a record
780            # and not a string.  We don't use the keyword ROW in order to make
781            # this usable with the IN syntax as well.  It is only necessary
782            # when the records has a single column which is not really useful.
783            q = self._quote
784            return '(%s)' % ','.join(str(q(v)) for v in value)
785        try:
786            value = value.__pg_repr__()
787        except AttributeError:
788            raise InterfaceError(
789                'Do not know how to adapt type %s' % type(value))
790        if isinstance(value, (tuple, list)):
791            value = self._quote(value)
792        return value
793
794    def _quoteparams(self, string, parameters):
795        """Quote parameters.
796
797        This function works for both mappings and sequences.
798
799        The function should be used even when there are no parameters,
800        so that we have a consistent behavior regarding percent signs.
801        """
802        if not parameters:
803            try:
804                return string % ()  # unescape literal quotes if possible
805            except (TypeError, ValueError):
806                return string  # silently accept unescaped quotes
807        if isinstance(parameters, dict):
808            parameters = _quotedict(parameters)
809            parameters.quote = self._quote
810        else:
811            parameters = tuple(map(self._quote, parameters))
812        return string % parameters
813
814    def _make_description(self, info):
815        """Make the description tuple for the given field info."""
816        name, typ, size, mod = info[1:]
817        type_code = self.type_cache[typ]
818        if mod > 0:
819            mod -= 4
820        if type_code == 'numeric':
821            precision, scale = mod >> 16, mod & 0xffff
822            size = precision
823        else:
824            if not size:
825                size = type_code.size
826            if size == -1:
827                size = mod
828            precision = scale = None
829        return CursorDescription(name, type_code,
830            None, size, precision, scale, None)
831
832    @property
833    def description(self):
834        """Read-only attribute describing the result columns."""
835        descr = self._description
836        if self._description is True:
837            make = self._make_description
838            descr = [make(info) for info in self._src.listinfo()]
839            self._description = descr
840        return descr
841
842    @property
843    def colnames(self):
844        """Unofficial convenience method for getting the column names."""
845        return [d[0] for d in self.description]
846
847    @property
848    def coltypes(self):
849        """Unofficial convenience method for getting the column types."""
850        return [d[1] for d in self.description]
851
852    def close(self):
853        """Close the cursor object."""
854        self._src.close()
855        self._description = None
856        self.rowcount = -1
857        self.lastrowid = None
858
859    def execute(self, operation, parameters=None):
860        """Prepare and execute a database operation (query or command)."""
861        # The parameters may also be specified as list of tuples to e.g.
862        # insert multiple rows in a single operation, but this kind of
863        # usage is deprecated.  We make several plausibility checks because
864        # tuples can also be passed with the meaning of ROW constructors.
865        if (parameters and isinstance(parameters, list)
866                and len(parameters) > 1
867                and all(isinstance(p, tuple) for p in parameters)
868                and all(len(p) == len(parameters[0]) for p in parameters[1:])):
869            return self.executemany(operation, parameters)
870        else:
871            # not a list of tuples
872            return self.executemany(operation, [parameters])
873
874    def executemany(self, operation, seq_of_parameters):
875        """Prepare operation and execute it against a parameter sequence."""
876        if not seq_of_parameters:
877            # don't do anything without parameters
878            return
879        self._description = None
880        self.rowcount = -1
881        # first try to execute all queries
882        rowcount = 0
883        sql = "BEGIN"
884        try:
885            if not self._dbcnx._tnx:
886                try:
887                    self._cnx.source().execute(sql)
888                except DatabaseError:
889                    raise  # database provides error message
890                except Exception:
891                    raise _op_error("Can't start transaction")
892                self._dbcnx._tnx = True
893            for parameters in seq_of_parameters:
894                sql = operation
895                sql = self._quoteparams(sql, parameters)
896                rows = self._src.execute(sql)
897                if rows:  # true if not DML
898                    rowcount += rows
899                else:
900                    self.rowcount = -1
901        except DatabaseError:
902            raise  # database provides error message
903        except Error as err:
904            raise _db_error(
905                "Error in '%s': '%s' " % (sql, err), InterfaceError)
906        except Exception as err:
907            raise _op_error("Internal error in '%s': %s" % (sql, err))
908        # then initialize result raw count and description
909        if self._src.resulttype == RESULT_DQL:
910            self._description = True  # fetch on demand
911            self.rowcount = self._src.ntuples
912            self.lastrowid = None
913            if self.build_row_factory:
914                self.row_factory = self.build_row_factory()
915        else:
916            self.rowcount = rowcount
917            self.lastrowid = self._src.oidstatus()
918        # return the cursor object, so you can write statements such as
919        # "cursor.execute(...).fetchall()" or "for row in cursor.execute(...)"
920        return self
921
922    def fetchone(self):
923        """Fetch the next row of a query result set."""
924        res = self.fetchmany(1, False)
925        try:
926            return res[0]
927        except IndexError:
928            return None
929
930    def fetchall(self):
931        """Fetch all (remaining) rows of a query result."""
932        return self.fetchmany(-1, False)
933
934    def fetchmany(self, size=None, keep=False):
935        """Fetch the next set of rows of a query result.
936
937        The number of rows to fetch per call is specified by the
938        size parameter. If it is not given, the cursor's arraysize
939        determines the number of rows to be fetched. If you set
940        the keep parameter to true, this is kept as new arraysize.
941        """
942        if size is None:
943            size = self.arraysize
944        if keep:
945            self.arraysize = size
946        try:
947            result = self._src.fetch(size)
948        except DatabaseError:
949            raise
950        except Error as err:
951            raise _db_error(str(err))
952        typecast = self.type_cache.typecast
953        return [self.row_factory([typecast(value, typ)
954            for typ, value in zip(self.coltypes, row)]) for row in result]
955
956    def callproc(self, procname, parameters=None):
957        """Call a stored database procedure with the given name.
958
959        The sequence of parameters must contain one entry for each input
960        argument that the procedure expects. The result of the call is the
961        same as this input sequence; replacement of output and input/output
962        parameters in the return value is currently not supported.
963
964        The procedure may also provide a result set as output. These can be
965        requested through the standard fetch methods of the cursor.
966        """
967        n = parameters and len(parameters) or 0
968        query = 'select * from "%s"(%s)' % (procname, ','.join(n * ['%s']))
969        self.execute(query, parameters)
970        return parameters
971
972    def copy_from(self, stream, table,
973            format=None, sep=None, null=None, size=None, columns=None):
974        """Copy data from an input stream to the specified table.
975
976        The input stream can be a file-like object with a read() method or
977        it can also be an iterable returning a row or multiple rows of input
978        on each iteration.
979
980        The format must be text, csv or binary. The sep option sets the
981        column separator (delimiter) used in the non binary formats.
982        The null option sets the textual representation of NULL in the input.
983
984        The size option sets the size of the buffer used when reading data
985        from file-like objects.
986
987        The copy operation can be restricted to a subset of columns. If no
988        columns are specified, all of them will be copied.
989        """
990        binary_format = format == 'binary'
991        try:
992            read = stream.read
993        except AttributeError:
994            if size:
995                raise ValueError("Size must only be set for file-like objects")
996            if binary_format:
997                input_type = bytes
998                type_name = 'byte strings'
999            else:
1000                input_type = basestring
1001                type_name = 'strings'
1002
1003            if isinstance(stream, basestring):
1004                if not isinstance(stream, input_type):
1005                    raise ValueError("The input must be %s" % type_name)
1006                if not binary_format:
1007                    if isinstance(stream, str):
1008                        if not stream.endswith('\n'):
1009                            stream += '\n'
1010                    else:
1011                        if not stream.endswith(b'\n'):
1012                            stream += b'\n'
1013
1014                def chunks():
1015                    yield stream
1016
1017            elif isinstance(stream, Iterable):
1018
1019                def chunks():
1020                    for chunk in stream:
1021                        if not isinstance(chunk, input_type):
1022                            raise ValueError(
1023                                "Input stream must consist of %s" % type_name)
1024                        if isinstance(chunk, str):
1025                            if not chunk.endswith('\n'):
1026                                chunk += '\n'
1027                        else:
1028                            if not chunk.endswith(b'\n'):
1029                                chunk += b'\n'
1030                        yield chunk
1031
1032            else:
1033                raise TypeError("Need an input stream to copy from")
1034        else:
1035            if size is None:
1036                size = 8192
1037            elif not isinstance(size, int):
1038                raise TypeError("The size option must be an integer")
1039            if size > 0:
1040
1041                def chunks():
1042                    while True:
1043                        buffer = read(size)
1044                        yield buffer
1045                        if not buffer or len(buffer) < size:
1046                            break
1047
1048            else:
1049
1050                def chunks():
1051                    yield read()
1052
1053        if not table or not isinstance(table, basestring):
1054            raise TypeError("Need a table to copy to")
1055        if table.lower().startswith('select'):
1056                raise ValueError("Must specify a table, not a query")
1057        else:
1058            table = '"%s"' % (table,)
1059        operation = ['copy %s' % (table,)]
1060        options = []
1061        params = []
1062        if format is not None:
1063            if not isinstance(format, basestring):
1064                raise TypeError("The frmat option must be be a string")
1065            if format not in ('text', 'csv', 'binary'):
1066                raise ValueError("Invalid format")
1067            options.append('format %s' % (format,))
1068        if sep is not None:
1069            if not isinstance(sep, basestring):
1070                raise TypeError("The sep option must be a string")
1071            if format == 'binary':
1072                raise ValueError(
1073                    "The sep option is not allowed with binary format")
1074            if len(sep) != 1:
1075                raise ValueError(
1076                    "The sep option must be a single one-byte character")
1077            options.append('delimiter %s')
1078            params.append(sep)
1079        if null is not None:
1080            if not isinstance(null, basestring):
1081                raise TypeError("The null option must be a string")
1082            options.append('null %s')
1083            params.append(null)
1084        if columns:
1085            if not isinstance(columns, basestring):
1086                columns = ','.join('"%s"' % (col,) for col in columns)
1087            operation.append('(%s)' % (columns,))
1088        operation.append("from stdin")
1089        if options:
1090            operation.append('(%s)' % ','.join(options))
1091        operation = ' '.join(operation)
1092
1093        putdata = self._src.putdata
1094        self.execute(operation, params)
1095
1096        try:
1097            for chunk in chunks():
1098                putdata(chunk)
1099        except BaseException as error:
1100            self.rowcount = -1
1101            # the following call will re-raise the error
1102            putdata(error)
1103        else:
1104            self.rowcount = putdata(None)
1105
1106        # return the cursor object, so you can chain operations
1107        return self
1108
1109    def copy_to(self, stream, table,
1110            format=None, sep=None, null=None, decode=None, columns=None):
1111        """Copy data from the specified table to an output stream.
1112
1113        The output stream can be a file-like object with a write() method or
1114        it can also be None, in which case the method will return a generator
1115        yielding a row on each iteration.
1116
1117        Output will be returned as byte strings unless you set decode to true.
1118
1119        Note that you can also use a select query instead of the table name.
1120
1121        The format must be text, csv or binary. The sep option sets the
1122        column separator (delimiter) used in the non binary formats.
1123        The null option sets the textual representation of NULL in the output.
1124
1125        The copy operation can be restricted to a subset of columns. If no
1126        columns are specified, all of them will be copied.
1127        """
1128        binary_format = format == 'binary'
1129        if stream is not None:
1130            try:
1131                write = stream.write
1132            except AttributeError:
1133                raise TypeError("Need an output stream to copy to")
1134        if not table or not isinstance(table, basestring):
1135            raise TypeError("Need a table to copy to")
1136        if table.lower().startswith('select'):
1137            if columns:
1138                raise ValueError("Columns must be specified in the query")
1139            table = '(%s)' % (table,)
1140        else:
1141            table = '"%s"' % (table,)
1142        operation = ['copy %s' % (table,)]
1143        options = []
1144        params = []
1145        if format is not None:
1146            if not isinstance(format, basestring):
1147                raise TypeError("The format option must be a string")
1148            if format not in ('text', 'csv', 'binary'):
1149                raise ValueError("Invalid format")
1150            options.append('format %s' % (format,))
1151        if sep is not None:
1152            if not isinstance(sep, basestring):
1153                raise TypeError("The sep option must be a string")
1154            if binary_format:
1155                raise ValueError(
1156                    "The sep option is not allowed with binary format")
1157            if len(sep) != 1:
1158                raise ValueError(
1159                    "The sep option must be a single one-byte character")
1160            options.append('delimiter %s')
1161            params.append(sep)
1162        if null is not None:
1163            if not isinstance(null, basestring):
1164                raise TypeError("The null option must be a string")
1165            options.append('null %s')
1166            params.append(null)
1167        if decode is None:
1168            if format == 'binary':
1169                decode = False
1170            else:
1171                decode = str is unicode
1172        else:
1173            if not isinstance(decode, (int, bool)):
1174                raise TypeError("The decode option must be a boolean")
1175            if decode and binary_format:
1176                raise ValueError(
1177                    "The decode option is not allowed with binary format")
1178        if columns:
1179            if not isinstance(columns, basestring):
1180                columns = ','.join('"%s"' % (col,) for col in columns)
1181            operation.append('(%s)' % (columns,))
1182
1183        operation.append("to stdout")
1184        if options:
1185            operation.append('(%s)' % ','.join(options))
1186        operation = ' '.join(operation)
1187
1188        getdata = self._src.getdata
1189        self.execute(operation, params)
1190
1191        def copy():
1192            self.rowcount = 0
1193            while True:
1194                row = getdata(decode)
1195                if isinstance(row, int):
1196                    if self.rowcount != row:
1197                        self.rowcount = row
1198                    break
1199                self.rowcount += 1
1200                yield row
1201
1202        if stream is None:
1203            # no input stream, return the generator
1204            return copy()
1205
1206        # write the rows to the file-like input stream
1207        for row in copy():
1208            write(row)
1209
1210        # return the cursor object, so you can chain operations
1211        return self
1212
1213    def __next__(self):
1214        """Return the next row (support for the iteration protocol)."""
1215        res = self.fetchone()
1216        if res is None:
1217            raise StopIteration
1218        return res
1219
1220    # Note that since Python 2.6 the iterator protocol uses __next()__
1221    # instead of next(), we keep it only for backward compatibility of pgdb.
1222    next = __next__
1223
1224    @staticmethod
1225    def nextset():
1226        """Not supported."""
1227        raise NotSupportedError("The nextset() method is not supported")
1228
1229    @staticmethod
1230    def setinputsizes(sizes):
1231        """Not supported."""
1232        pass  # unsupported, but silently passed
1233
1234    @staticmethod
1235    def setoutputsize(size, column=0):
1236        """Not supported."""
1237        pass  # unsupported, but silently passed
1238
1239    @staticmethod
1240    def row_factory(row):
1241        """Process rows before they are returned.
1242
1243        You can overwrite this statically with a custom row factory, or
1244        you can build a row factory dynamically with build_row_factory().
1245
1246        For example, you can create a Cursor class that returns rows as
1247        Python dictionaries like this:
1248
1249            class DictCursor(pgdb.Cursor):
1250
1251                def row_factory(self, row):
1252                    return {desc[0]: value
1253                        for desc, value in zip(self.description, row)}
1254
1255            cur = DictCursor(con)  # get one DictCursor instance or
1256            con.cursor_type = DictCursor  # always use DictCursor instances
1257        """
1258        raise NotImplementedError
1259
1260    def build_row_factory(self):
1261        """Build a row factory based on the current description.
1262
1263        This implementation builds a row factory for creating named tuples.
1264        You can overwrite this method if you want to dynamically create
1265        different row factories whenever the column description changes.
1266        """
1267        colnames = self.colnames
1268        if colnames:
1269            try:
1270                try:
1271                    return namedtuple('Row', colnames, rename=True)._make
1272                except TypeError:  # Python 2.6 and 3.0 do not support rename
1273                    colnames = [v if v.isalnum() else 'column_%d' % n
1274                             for n, v in enumerate(colnames)]
1275                    return namedtuple('Row', colnames)._make
1276            except ValueError:  # there is still a problem with the field names
1277                colnames = ['column_%d' % n for n in range(len(colnames))]
1278                return namedtuple('Row', colnames)._make
1279
1280
1281CursorDescription = namedtuple('CursorDescription',
1282    ['name', 'type_code', 'display_size', 'internal_size',
1283     'precision', 'scale', 'null_ok'])
1284
1285
1286### Connection Objects
1287
1288class Connection(object):
1289    """Connection object."""
1290
1291    # expose the exceptions as attributes on the connection object
1292    Error = Error
1293    Warning = Warning
1294    InterfaceError = InterfaceError
1295    DatabaseError = DatabaseError
1296    InternalError = InternalError
1297    OperationalError = OperationalError
1298    ProgrammingError = ProgrammingError
1299    IntegrityError = IntegrityError
1300    DataError = DataError
1301    NotSupportedError = NotSupportedError
1302
1303    def __init__(self, cnx):
1304        """Create a database connection object."""
1305        self._cnx = cnx  # connection
1306        self._tnx = False  # transaction state
1307        self.type_cache = TypeCache(cnx)
1308        self.cursor_type = Cursor
1309        try:
1310            self._cnx.source()
1311        except Exception:
1312            raise _op_error("Invalid connection")
1313
1314    def __enter__(self):
1315        """Enter the runtime context for the connection object.
1316
1317        The runtime context can be used for running transactions.
1318        """
1319        return self
1320
1321    def __exit__(self, et, ev, tb):
1322        """Exit the runtime context for the connection object.
1323
1324        This does not close the connection, but it ends a transaction.
1325        """
1326        if et is None and ev is None and tb is None:
1327            self.commit()
1328        else:
1329            self.rollback()
1330
1331    def close(self):
1332        """Close the connection object."""
1333        if self._cnx:
1334            if self._tnx:
1335                try:
1336                    self.rollback()
1337                except DatabaseError:
1338                    pass
1339            self._cnx.close()
1340            self._cnx = None
1341        else:
1342            raise _op_error("Connection has been closed")
1343
1344    def commit(self):
1345        """Commit any pending transaction to the database."""
1346        if self._cnx:
1347            if self._tnx:
1348                self._tnx = False
1349                try:
1350                    self._cnx.source().execute("COMMIT")
1351                except DatabaseError:
1352                    raise
1353                except Exception:
1354                    raise _op_error("Can't commit")
1355        else:
1356            raise _op_error("Connection has been closed")
1357
1358    def rollback(self):
1359        """Roll back to the start of any pending transaction."""
1360        if self._cnx:
1361            if self._tnx:
1362                self._tnx = False
1363                try:
1364                    self._cnx.source().execute("ROLLBACK")
1365                except DatabaseError:
1366                    raise
1367                except Exception:
1368                    raise _op_error("Can't rollback")
1369        else:
1370            raise _op_error("Connection has been closed")
1371
1372    def cursor(self):
1373        """Return a new cursor object using the connection."""
1374        if self._cnx:
1375            try:
1376                return self.cursor_type(self)
1377            except Exception:
1378                raise _op_error("Invalid connection")
1379        else:
1380            raise _op_error("Connection has been closed")
1381
1382    if shortcutmethods:  # otherwise do not implement and document this
1383
1384        def execute(self, operation, params=None):
1385            """Shortcut method to run an operation on an implicit cursor."""
1386            cursor = self.cursor()
1387            cursor.execute(operation, params)
1388            return cursor
1389
1390        def executemany(self, operation, param_seq):
1391            """Shortcut method to run an operation against a sequence."""
1392            cursor = self.cursor()
1393            cursor.executemany(operation, param_seq)
1394            return cursor
1395
1396
1397### Module Interface
1398
1399_connect = connect
1400
1401def connect(dsn=None,
1402        user=None, password=None,
1403        host=None, database=None):
1404    """Connect to a database."""
1405    # first get params from DSN
1406    dbport = -1
1407    dbhost = ""
1408    dbbase = ""
1409    dbuser = ""
1410    dbpasswd = ""
1411    dbopt = ""
1412    try:
1413        params = dsn.split(":")
1414        dbhost = params[0]
1415        dbbase = params[1]
1416        dbuser = params[2]
1417        dbpasswd = params[3]
1418        dbopt = params[4]
1419    except (AttributeError, IndexError, TypeError):
1420        pass
1421
1422    # override if necessary
1423    if user is not None:
1424        dbuser = user
1425    if password is not None:
1426        dbpasswd = password
1427    if database is not None:
1428        dbbase = database
1429    if host is not None:
1430        try:
1431            params = host.split(":")
1432            dbhost = params[0]
1433            dbport = int(params[1])
1434        except (AttributeError, IndexError, TypeError, ValueError):
1435            pass
1436
1437    # empty host is localhost
1438    if dbhost == "":
1439        dbhost = None
1440    if dbuser == "":
1441        dbuser = None
1442
1443    # open the connection
1444    cnx = _connect(dbbase, dbhost, dbport, dbopt, dbuser, dbpasswd)
1445    return Connection(cnx)
1446
1447
1448### Types Handling
1449
1450class Type(frozenset):
1451    """Type class for a couple of PostgreSQL data types.
1452
1453    PostgreSQL is object-oriented: types are dynamic.
1454    We must thus use type names as internal type codes.
1455    """
1456
1457    def __new__(cls, values):
1458        if isinstance(values, basestring):
1459            values = values.split()
1460        return super(Type, cls).__new__(cls, values)
1461
1462    def __eq__(self, other):
1463        if isinstance(other, basestring):
1464            if other.startswith('_'):
1465                other = other[1:]
1466            return other in self
1467        else:
1468            return super(Type, self).__eq__(other)
1469
1470    def __ne__(self, other):
1471        if isinstance(other, basestring):
1472            if other.startswith('_'):
1473                other = other[1:]
1474            return other not in self
1475        else:
1476            return super(Type, self).__ne__(other)
1477
1478
1479class ArrayType:
1480    """Type class for PostgreSQL array types."""
1481
1482    def __eq__(self, other):
1483        if isinstance(other, basestring):
1484            return other.startswith('_')
1485        else:
1486            return isinstance(other, ArrayType)
1487
1488    def __ne__(self, other):
1489        if isinstance(other, basestring):
1490            return not other.startswith('_')
1491        else:
1492            return not isinstance(other, ArrayType)
1493
1494
1495class RecordType:
1496    """Type class for PostgreSQL record types."""
1497
1498    def __eq__(self, other):
1499        if isinstance(other, TypeCode):
1500            return other.type == 'c'
1501        elif isinstance(other, basestring):
1502            return other == 'record'
1503        else:
1504            return isinstance(other, RecordType)
1505
1506    def __ne__(self, other):
1507        if isinstance(other, TypeCode):
1508            return other.type != 'c'
1509        elif isinstance(other, basestring):
1510            return other != 'record'
1511        else:
1512            return not isinstance(other, RecordType)
1513
1514
1515# Mandatory type objects defined by DB-API 2 specs:
1516
1517STRING = Type('char bpchar name text varchar')
1518BINARY = Type('bytea')
1519NUMBER = Type('int2 int4 serial int8 float4 float8 numeric money')
1520DATETIME = Type('date time timetz timestamp timestamptz interval'
1521    ' abstime reltime')  # these are very old
1522ROWID = Type('oid')
1523
1524
1525# Additional type objects (more specific):
1526
1527BOOL = Type('bool')
1528SMALLINT = Type('int2')
1529INTEGER = Type('int2 int4 int8 serial')
1530LONG = Type('int8')
1531FLOAT = Type('float4 float8')
1532NUMERIC = Type('numeric')
1533MONEY = Type('money')
1534DATE = Type('date')
1535TIME = Type('time timetz')
1536TIMESTAMP = Type('timestamp timestamptz')
1537INTERVAL = Type('interval')
1538UUID = Type('uuid')
1539HSTORE = Type('hstore')
1540JSON = Type('json jsonb')
1541
1542# Type object for arrays (also equate to their base types):
1543
1544ARRAY = ArrayType()
1545
1546# Type object for records (encompassing all composite types):
1547
1548RECORD = RecordType()
1549
1550
1551# Mandatory type helpers defined by DB-API 2 specs:
1552
1553def Date(year, month, day):
1554    """Construct an object holding a date value."""
1555    return date(year, month, day)
1556
1557
1558def Time(hour, minute=0, second=0, microsecond=0, tzinfo=None):
1559    """Construct an object holding a time value."""
1560    return time(hour, minute, second, microsecond, tzinfo)
1561
1562
1563def Timestamp(year, month, day, hour=0, minute=0, second=0, microsecond=0,
1564        tzinfo=None):
1565    """Construct an object holding a time stamp value."""
1566    return datetime(year, month, day, hour, minute, second, microsecond, tzinfo)
1567
1568
1569def DateFromTicks(ticks):
1570    """Construct an object holding a date value from the given ticks value."""
1571    return Date(*localtime(ticks)[:3])
1572
1573
1574def TimeFromTicks(ticks):
1575    """Construct an object holding a time value from the given ticks value."""
1576    return Time(*localtime(ticks)[3:6])
1577
1578
1579def TimestampFromTicks(ticks):
1580    """Construct an object holding a time stamp from the given ticks value."""
1581    return Timestamp(*localtime(ticks)[:6])
1582
1583
1584class Binary(bytes):
1585    """Construct an object capable of holding a binary (long) string value."""
1586
1587
1588# Additional type helpers for PyGreSQL:
1589
1590def Interval(days, hours=0, minutes=0, seconds=0, microseconds=0):
1591    """Construct an object holding a time inverval value."""
1592    return timedelta(days, hours=hours, minutes=minutes, seconds=seconds,
1593        microseconds=microseconds)
1594
1595
1596Uuid = Uuid  # Construct an object holding a UUID value
1597
1598
1599class Hstore(dict):
1600    """Wrapper class for marking hstore values."""
1601
1602    _re_quote = regex('^[Nn][Uu][Ll][Ll]$|[ ,=>]')
1603    _re_escape = regex(r'(["\\])')
1604
1605    @classmethod
1606    def _quote(cls, s):
1607        if s is None:
1608            return 'NULL'
1609        if not s:
1610            return '""'
1611        quote = cls._re_quote.search(s)
1612        s = cls._re_escape.sub(r'\\\1', s)
1613        if quote:
1614            s = '"%s"' % s
1615        return s
1616
1617    def __str__(self):
1618        q = self._quote
1619        return ','.join('%s=>%s' % (q(k), q(v)) for k, v in self.items())
1620
1621
1622class Json:
1623    """Construct a wrapper for holding an object serializable to JSON."""
1624
1625    def __init__(self, obj, encode=None):
1626        self.obj = obj
1627        self.encode = encode or jsonencode
1628
1629    def __str__(self):
1630        obj = self.obj
1631        if isinstance(obj, basestring):
1632            return obj
1633        return self.encode(obj)
1634
1635
1636class Literal:
1637    """Construct a wrapper for holding a literal SQL string."""
1638
1639    def __init__(self, sql):
1640        self.sql = sql
1641
1642    def __str__(self):
1643        return self.sql
1644
1645    __pg_repr__ = __str__
1646
1647# If run as script, print some information:
1648
1649if __name__ == '__main__':
1650    print('PyGreSQL version', version)
1651    print('')
1652    print(__doc__)
Note: See TracBrowser for help on using the repository browser.