source: trunk/pgdb.py @ 840

Last change on this file since 840 was 840, checked in by cito, 4 years ago

Improve adaptation of hstore and empty array

  • Property svn:keywords set to Id
File size: 53.5 KB
Line 
1#! /usr/bin/python
2#
3# pgdb.py
4#
5# Written by D'Arcy J.M. Cain
6#
7# $Id: pgdb.py 840 2016-02-08 17:32:50Z cito $
8#
9
10"""pgdb - DB-API 2.0 compliant module for PygreSQL.
11
12(c) 1999, Pascal Andre <andre@via.ecp.fr>.
13See package documentation for further information on copyright.
14
15Inline documentation is sparse.
16See DB-API 2.0 specification for usage information:
17http://www.python.org/peps/pep-0249.html
18
19Basic usage:
20
21    pgdb.connect(connect_string) # open a connection
22    # connect_string = 'host:database:user:password:opt'
23    # All parts are optional. You may also pass host through
24    # password as keyword arguments. To pass a port,
25    # pass it in the host keyword parameter:
26    connection = pgdb.connect(host='localhost:5432')
27
28    cursor = connection.cursor() # open a cursor
29
30    cursor.execute(query[, params])
31    # Execute a query, binding params (a dictionary) if they are
32    # passed. The binding syntax is the same as the % operator
33    # for dictionaries, and no quoting is done.
34
35    cursor.executemany(query, list of params)
36    # Execute a query many times, binding each param dictionary
37    # from the list.
38
39    cursor.fetchone() # fetch one row, [value, value, ...]
40
41    cursor.fetchall() # fetch all rows, [[value, value, ...], ...]
42
43    cursor.fetchmany([size])
44    # returns size or cursor.arraysize number of rows,
45    # [[value, value, ...], ...] from result set.
46    # Default cursor.arraysize is 1.
47
48    cursor.description # returns information about the columns
49    #   [(column_name, type_name, display_size,
50    #           internal_size, precision, scale, null_ok), ...]
51    # Note that display_size, precision, scale and null_ok
52    # are not implemented.
53
54    cursor.rowcount # number of rows available in the result set
55    # Available after a call to execute.
56
57    connection.commit() # commit transaction
58
59    connection.rollback() # or rollback transaction
60
61    cursor.close() # close the cursor
62
63    connection.close() # close the connection
64"""
65
66from __future__ import print_function
67
68from _pg import *
69
70__version__ = version
71
72from datetime import date, time, datetime, timedelta
73from time import localtime
74from decimal import Decimal
75from uuid import UUID
76from math import isnan, isinf
77from collections import namedtuple
78from functools import partial
79from re import compile as regex
80from json import loads as jsondecode, dumps as jsonencode
81
82try:
83    long
84except NameError:  # Python >= 3.0
85    long = int
86
87try:
88    unicode
89except NameError:  # Python >= 3.0
90    unicode = str
91
92try:
93    basestring
94except NameError:  # Python >= 3.0
95    basestring = (str, bytes)
96
97from collections import Iterable
98
99
100### Module Constants
101
102# compliant with DB API 2.0
103apilevel = '2.0'
104
105# module may be shared, but not connections
106threadsafety = 1
107
108# this module use extended python format codes
109paramstyle = 'pyformat'
110
111# shortcut methods have been excluded from DB API 2 and
112# are not recommended by the DB SIG, but they can be handy
113shortcutmethods = 1
114
115
116### Internal Type Handling
117
118try:
119    from inspect import signature
120except ImportError:  # Python < 3.3
121    from inspect import getargspec
122
123    def get_args(func):
124        return getargspec(func).args
125else:
126
127    def get_args(func):
128        return list(signature(func).parameters)
129
130try:
131    if datetime.strptime('+0100', '%z') is None:
132        raise ValueError
133except ValueError:  # Python < 3.2
134    timezones = None
135else:
136    # time zones used in Postgres timestamptz output
137    timezones = dict(CET='+0100', EET='+0200', EST='-0500',
138        GMT='+0000', HST='-1000', MET='+0100', MST='-0700',
139        UCT='+0000', UTC='+0000', WET='+0000')
140
141
142def decimal_type(decimal_type=None):
143    """Get or set global type to be used for decimal values.
144
145    Note that connections cache cast functions. To be sure a global change
146    is picked up by a running connection, call con.type_cache.reset_typecast().
147    """
148    global Decimal
149    if decimal_type is not None:
150        Decimal = decimal_type
151        set_typecast('numeric', decimal_type)
152    return Decimal
153
154
155def cast_bool(value):
156    """Cast boolean value in database format to bool."""
157    if value:
158        return value[0] in ('t', 'T')
159
160
161def cast_money(value):
162    """Cast money value in database format to Decimal."""
163    if value:
164        value = value.replace('(', '-')
165        return Decimal(''.join(c for c in value if c.isdigit() or c in '.-'))
166
167
168def cast_int2vector(value):
169    """Cast an int2vector value."""
170    return [int(v) for v in value.split()]
171
172
173def cast_date(value, connection):
174    """Cast a date value."""
175    # The output format depends on the server setting DateStyle.  The default
176    # setting ISO and the setting for German are actually unambiguous.  The
177    # order of days and months in the other two settings is however ambiguous,
178    # so at least here we need to consult the setting to properly parse values.
179    if value == '-infinity':
180        return date.min
181    if value == 'infinity':
182        return date.max
183    value = value.split()
184    if value[-1] == 'BC':
185        return date.min
186    value = value[0]
187    if len(value) > 10:
188        return date.max
189    fmt = connection.date_format()
190    return datetime.strptime(value, fmt).date()
191
192
193def cast_time(value):
194    """Cast a time value."""
195    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
196    return datetime.strptime(value, fmt).time()
197
198
199_re_timezone = regex('(.*)([+-].*)')
200
201
202def cast_timetz(value):
203    """Cast a timetz value."""
204    tz = _re_timezone.match(value)
205    if tz:
206        value, tz = tz.groups()
207    else:
208        tz = '+0000'
209    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
210    if timezones:
211        if tz.startswith(('+', '-')):
212            if len(tz) < 5:
213                tz += '00'
214            else:
215                tz = tz.replace(':', '')
216        elif tz in timezones:
217            tz = timezones[tz]
218        else:
219            tz = '+0000'
220        value += tz
221        fmt += '%z'
222    return datetime.strptime(value, fmt).timetz()
223
224
225def cast_timestamp(value, connection):
226    """Cast a timestamp value."""
227    if value == '-infinity':
228        return datetime.min
229    if value == 'infinity':
230        return datetime.max
231    value = value.split()
232    if value[-1] == 'BC':
233        return datetime.min
234    fmt = connection.date_format()
235    if fmt.endswith('-%Y') and len(value) > 2:
236        value = value[1:5]
237        if len(value[3]) > 4:
238            return datetime.max
239        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
240            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
241    else:
242        if len(value[0]) > 10:
243            return datetime.max
244        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
245    return datetime.strptime(' '.join(value), ' '.join(fmt))
246
247
248def cast_timestamptz(value, connection):
249    """Cast a timestamptz value."""
250    if value == '-infinity':
251        return datetime.min
252    if value == 'infinity':
253        return datetime.max
254    value = value.split()
255    if value[-1] == 'BC':
256        return datetime.min
257    fmt = connection.date_format()
258    if fmt.endswith('-%Y') and len(value) > 2:
259        value = value[1:]
260        if len(value[3]) > 4:
261            return datetime.max
262        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
263            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
264        value, tz = value[:-1], value[-1]
265    else:
266        if fmt.startswith('%Y-'):
267            tz = _re_timezone.match(value[1])
268            if tz:
269                value[1], tz = tz.groups()
270            else:
271                tz = '+0000'
272        else:
273            value, tz = value[:-1], value[-1]
274        if len(value[0]) > 10:
275            return datetime.max
276        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
277    if timezones:
278        if tz.startswith(('+', '-')):
279            if len(tz) < 5:
280                tz += '00'
281            else:
282                tz = tz.replace(':', '')
283        elif tz in timezones:
284            tz = timezones[tz]
285        else:
286            tz = '+0000'
287        value.append(tz)
288        fmt.append('%z')
289    return datetime.strptime(' '.join(value), ' '.join(fmt))
290
291_re_interval_sql_standard = regex(
292    '(?:([+-])?([0-9]+)-([0-9]+) ?)?'
293    '(?:([+-]?[0-9]+)(?!:) ?)?'
294    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
295
296_re_interval_postgres = regex(
297    '(?:([+-]?[0-9]+) ?years? ?)?'
298    '(?:([+-]?[0-9]+) ?mons? ?)?'
299    '(?:([+-]?[0-9]+) ?days? ?)?'
300    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
301
302_re_interval_postgres_verbose = regex(
303    '@ ?(?:([+-]?[0-9]+) ?years? ?)?'
304    '(?:([+-]?[0-9]+) ?mons? ?)?'
305    '(?:([+-]?[0-9]+) ?days? ?)?'
306    '(?:([+-]?[0-9]+) ?hours? ?)?'
307    '(?:([+-]?[0-9]+) ?mins? ?)?'
308    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))? ?secs?)? ?(ago)?')
309
310_re_interval_iso_8601 = regex(
311    'P(?:([+-]?[0-9]+)Y)?'
312    '(?:([+-]?[0-9]+)M)?'
313    '(?:([+-]?[0-9]+)D)?'
314    '(?:T(?:([+-]?[0-9]+)H)?'
315    '(?:([+-]?[0-9]+)M)?'
316    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))?S)?)?')
317
318
319def cast_interval(value):
320    """Cast an interval value."""
321    # The output format depends on the server setting IntervalStyle, but it's
322    # not necessary to consult this setting to parse it.  It's faster to just
323    # check all possible formats, and there is no ambiguity here.
324    m = _re_interval_iso_8601.match(value)
325    if m:
326        m = [d or '0' for d in m.groups()]
327        secs_ago = m.pop(5) == '-'
328        m = [int(d) for d in m]
329        years, mons, days, hours, mins, secs, usecs = m
330        if secs_ago:
331            secs = -secs
332            usecs = -usecs
333    else:
334        m = _re_interval_postgres_verbose.match(value)
335        if m:
336            m, ago = [d or '0' for d in m.groups()[:8]], m.group(9)
337            secs_ago = m.pop(5) == '-'
338            m = [-int(d) for d in m] if ago else [int(d) for d in m]
339            years, mons, days, hours, mins, secs, usecs = m
340            if secs_ago:
341                secs = - secs
342                usecs = -usecs
343        else:
344            m = _re_interval_postgres.match(value)
345            if m and any(m.groups()):
346                m = [d or '0' for d in m.groups()]
347                hours_ago = m.pop(3) == '-'
348                m = [int(d) for d in m]
349                years, mons, days, hours, mins, secs, usecs = m
350                if hours_ago:
351                    hours = -hours
352                    mins = -mins
353                    secs = -secs
354                    usecs = -usecs
355            else:
356                m = _re_interval_sql_standard.match(value)
357                if m and any(m.groups()):
358                    m = [d or '0' for d in m.groups()]
359                    years_ago = m.pop(0) == '-'
360                    hours_ago = m.pop(3) == '-'
361                    m = [int(d) for d in m]
362                    years, mons, days, hours, mins, secs, usecs = m
363                    if years_ago:
364                        years = -years
365                        mons = -mons
366                    if hours_ago:
367                        hours = -hours
368                        mins = -mins
369                        secs = -secs
370                        usecs = -usecs
371                else:
372                    raise ValueError('Cannot parse interval: %s' % value)
373    days += 365 * years + 30 * mons
374    return timedelta(days=days, hours=hours, minutes=mins,
375        seconds=secs, microseconds=usecs)
376
377
378class Typecasts(dict):
379    """Dictionary mapping database types to typecast functions.
380
381    The cast functions get passed the string representation of a value in
382    the database which they need to convert to a Python object.  The
383    passed string will never be None since NULL values are already be
384    handled before the cast function is called.
385    """
386
387    # the default cast functions
388    # (str functions are ignored but have been added for faster access)
389    defaults = {'char': str, 'bpchar': str, 'name': str,
390        'text': str, 'varchar': str,
391        'bool': cast_bool, 'bytea': unescape_bytea,
392        'int2': int, 'int4': int, 'serial': int, 'int8': long, 'oid': int,
393        'hstore': cast_hstore, 'json': jsondecode, 'jsonb': jsondecode,
394        'float4': float, 'float8': float,
395        'numeric': Decimal, 'money': cast_money,
396        'date': cast_date, 'interval': cast_interval,
397        'time': cast_time, 'timetz': cast_timetz,
398        'timestamp': cast_timestamp, 'timestamptz': cast_timestamptz,
399        'int2vector': cast_int2vector, 'uuid': UUID,
400        'anyarray': cast_array, 'record': cast_record}
401
402    connection = None  # will be set in local connection specific instances
403
404    def __missing__(self, typ):
405        """Create a cast function if it is not cached.
406
407        Note that this class never raises a KeyError,
408        but returns None when no special cast function exists.
409        """
410        if not isinstance(typ, str):
411            raise TypeError('Invalid type: %s' % typ)
412        cast = self.defaults.get(typ)
413        if cast:
414            # store default for faster access
415            cast = self._add_connection(cast)
416            self[typ] = cast
417        elif typ.startswith('_'):
418            # create array cast
419            base_cast = self[typ[1:]]
420            cast = self.create_array_cast(base_cast)
421            if base_cast:
422                # store only if base type exists
423                self[typ] = cast
424        return cast
425
426    @staticmethod
427    def _needs_connection(func):
428        """Check if a typecast function needs a connection argument."""
429        try:
430            args = get_args(func)
431        except (TypeError, ValueError):
432            return False
433        else:
434            return 'connection' in args[1:]
435
436    def _add_connection(self, cast):
437        """Add a connection argument to the typecast function if necessary."""
438        if not self.connection or not self._needs_connection(cast):
439            return cast
440        return partial(cast, connection=self.connection)
441
442    def get(self, typ, default=None):
443        """Get the typecast function for the given database type."""
444        return self[typ] or default
445
446    def set(self, typ, cast):
447        """Set a typecast function for the specified database type(s)."""
448        if isinstance(typ, basestring):
449            typ = [typ]
450        if cast is None:
451            for t in typ:
452                self.pop(t, None)
453                self.pop('_%s' % t, None)
454        else:
455            if not callable(cast):
456                raise TypeError("Cast parameter must be callable")
457            for t in typ:
458                self[t] = self._add_connection(cast)
459                self.pop('_%s' % t, None)
460
461    def reset(self, typ=None):
462        """Reset the typecasts for the specified type(s) to their defaults.
463
464        When no type is specified, all typecasts will be reset.
465        """
466        defaults = self.defaults
467        if typ is None:
468            self.clear()
469            self.update(defaults)
470        else:
471            if isinstance(typ, basestring):
472                typ = [typ]
473            for t in typ:
474                cast = defaults.get(t)
475                if cast:
476                    self[t] = self._add_connection(cast)
477                    t = '_%s' % t
478                    cast = defaults.get(t)
479                    if cast:
480                        self[t] = self._add_connection(cast)
481                    else:
482                        self.pop(t, None)
483                else:
484                    self.pop(t, None)
485                    self.pop('_%s' % t, None)
486
487    def create_array_cast(self, basecast):
488        """Create an array typecast for the given base cast."""
489        def cast(v):
490            return cast_array(v, basecast)
491        return cast
492
493    def create_record_cast(self, name, fields, casts):
494        """Create a named record typecast for the given fields and casts."""
495        record = namedtuple(name, fields)
496        def cast(v):
497            return record(*cast_record(v, casts))
498        return cast
499
500
501_typecasts = Typecasts()  # this is the global typecast dictionary
502
503
504def get_typecast(typ):
505    """Get the global typecast function for the given database type(s)."""
506    return _typecasts.get(typ)
507
508
509def set_typecast(typ, cast):
510    """Set a global typecast function for the given database type(s).
511
512    Note that connections cache cast functions. To be sure a global change
513    is picked up by a running connection, call con.type_cache.reset_typecast().
514    """
515    _typecasts.set(typ, cast)
516
517
518def reset_typecast(typ=None):
519    """Reset the global typecasts for the given type(s) to their default.
520
521    When no type is specified, all typecasts will be reset.
522
523    Note that connections cache cast functions. To be sure a global change
524    is picked up by a running connection, call con.type_cache.reset_typecast().
525    """
526    _typecasts.reset(typ)
527
528
529class LocalTypecasts(Typecasts):
530    """Map typecasts, including local composite types, to cast functions."""
531
532    defaults = _typecasts
533
534    connection = None  # will be set in a connection specific instance
535
536    def __missing__(self, typ):
537        """Create a cast function if it is not cached."""
538        if typ.startswith('_'):
539            base_cast = self[typ[1:]]
540            cast = self.create_array_cast(base_cast)
541            if base_cast:
542                self[typ] = cast
543        else:
544            cast = self.defaults.get(typ)
545            if cast:
546                cast = self._add_connection(cast)
547                self[typ] = cast
548            else:
549                fields = self.get_fields(typ)
550                if fields:
551                    casts = [self[field.type] for field in fields]
552                    fields = [field.name for field in fields]
553                    cast = self.create_record_cast(typ, fields, casts)
554                    self[typ] = cast
555        return cast
556
557    def get_fields(self, typ):
558        """Return the fields for the given record type.
559
560        This method will be replaced with a method that looks up the fields
561        using the type cache of the connection.
562        """
563        return []
564
565
566class TypeCode(str):
567    """Class representing the type_code used by the DB-API 2.0.
568
569    TypeCode objects are strings equal to the PostgreSQL type name,
570    but carry some additional information.
571    """
572
573    @classmethod
574    def create(cls, oid, name, len, type, category, delim, relid):
575        """Create a type code for a PostgreSQL data type."""
576        self = cls(name)
577        self.oid = oid
578        self.len = len
579        self.type = type
580        self.category = category
581        self.delim = delim
582        self.relid = relid
583        return self
584
585FieldInfo = namedtuple('FieldInfo', ['name', 'type'])
586
587
588class TypeCache(dict):
589    """Cache for database types.
590
591    This cache maps type OIDs and names to TypeCode strings containing
592    important information on the associated database type.
593    """
594
595    def __init__(self, cnx):
596        """Initialize type cache for connection."""
597        super(TypeCache, self).__init__()
598        self._escape_string = cnx.escape_string
599        self._src = cnx.source()
600        self._typecasts = LocalTypecasts()
601        self._typecasts.get_fields = self.get_fields
602        self._typecasts.connection = cnx
603
604    def __missing__(self, key):
605        """Get the type info from the database if it is not cached."""
606        if isinstance(key, int):
607            oid = key
608        else:
609            if '.' not in key and '"' not in key:
610                key = '"%s"' % key
611            oid = "'%s'::regtype" % self._escape_string(key)
612        try:
613            self._src.execute("SELECT oid, typname,"
614                 " typlen, typtype, typcategory, typdelim, typrelid"
615                " FROM pg_type WHERE oid=%s" % oid)
616        except ProgrammingError:
617            res = None
618        else:
619            res = self._src.fetch(1)
620        if not res:
621            raise KeyError('Type %s could not be found' % key)
622        res = res[0]
623        type_code = TypeCode.create(int(res[0]), res[1],
624            int(res[2]), res[3], res[4], res[5], int(res[6]))
625        self[type_code.oid] = self[str(type_code)] = type_code
626        return type_code
627
628    def get(self, key, default=None):
629        """Get the type even if it is not cached."""
630        try:
631            return self[key]
632        except KeyError:
633            return default
634
635    def get_fields(self, typ):
636        """Get the names and types of the fields of composite types."""
637        if not isinstance(typ, TypeCode):
638            typ = self.get(typ)
639            if not typ:
640                return None
641        if not typ.relid:
642            return None  # this type is not composite
643        self._src.execute("SELECT attname, atttypid"
644            " FROM pg_attribute WHERE attrelid=%s AND attnum>0"
645            " AND NOT attisdropped ORDER BY attnum" % typ.relid)
646        return [FieldInfo(name, self.get(int(oid)))
647            for name, oid in self._src.fetch(-1)]
648
649    def get_typecast(self, typ):
650        """Get the typecast function for the given database type."""
651        return self._typecasts.get(typ)
652
653    def set_typecast(self, typ, cast):
654        """Set a typecast function for the specified database type(s)."""
655        self._typecasts.set(typ, cast)
656
657    def reset_typecast(self, typ=None):
658        """Reset the typecast function for the specified database type(s)."""
659        self._typecasts.reset(typ)
660
661    def typecast(self, value, typ):
662        """Cast the given value according to the given database type."""
663        if value is None:
664            # for NULL values, no typecast is necessary
665            return None
666        cast = self.get_typecast(typ)
667        if not cast or cast is str:
668            # no typecast is necessary
669            return value
670        return cast(value)
671
672
673class _quotedict(dict):
674    """Dictionary with auto quoting of its items.
675
676    The quote attribute must be set to the desired quote function.
677    """
678
679    def __getitem__(self, key):
680        return self.quote(super(_quotedict, self).__getitem__(key))
681
682
683### Error messages
684
685def _db_error(msg, cls=DatabaseError):
686    """Return DatabaseError with empty sqlstate attribute."""
687    error = cls(msg)
688    error.sqlstate = None
689    return error
690
691
692def _op_error(msg):
693    """Return OperationalError."""
694    return _db_error(msg, OperationalError)
695
696
697### Cursor Object
698
699class Cursor(object):
700    """Cursor object."""
701
702    def __init__(self, dbcnx):
703        """Create a cursor object for the database connection."""
704        self.connection = self._dbcnx = dbcnx
705        self._cnx = dbcnx._cnx
706        self.type_cache = dbcnx.type_cache
707        self._src = self._cnx.source()
708        # the official attribute for describing the result columns
709        self._description = None
710        if self.row_factory is Cursor.row_factory:
711            # the row factory needs to be determined dynamically
712            self.row_factory = None
713        else:
714            self.build_row_factory = None
715        self.rowcount = -1
716        self.arraysize = 1
717        self.lastrowid = None
718
719    def __iter__(self):
720        """Make cursor compatible to the iteration protocol."""
721        return self
722
723    def __enter__(self):
724        """Enter the runtime context for the cursor object."""
725        return self
726
727    def __exit__(self, et, ev, tb):
728        """Exit the runtime context for the cursor object."""
729        self.close()
730
731    def _quote(self, value):
732        """Quote value depending on its type."""
733        if value is None:
734            return 'NULL'
735        if isinstance(value, (Hstore, Json, UUID)):
736            value = str(value)
737        if isinstance(value, basestring):
738            if isinstance(value, Binary):
739                value = self._cnx.escape_bytea(value)
740                if bytes is not str:  # Python >= 3.0
741                    value = value.decode('ascii')
742            else:
743                value = self._cnx.escape_string(value)
744            return "'%s'" % value
745        if isinstance(value, float):
746            if isinf(value):
747                return "'-Infinity'" if value < 0 else "'Infinity'"
748            if isnan(value):
749                return "'NaN'"
750            return value
751        if isinstance(value, (int, long, Decimal, Literal)):
752            return value
753        if isinstance(value, datetime):
754            if value.tzinfo:
755                return "'%s'::timestamptz" % value
756            return "'%s'::timestamp" % value
757        if isinstance(value, date):
758            return "'%s'::date" % value
759        if isinstance(value, time):
760            if value.tzinfo:
761                return "'%s'::timetz" % value
762            return "'%s'::time" % value
763        if isinstance(value, timedelta):
764            return "'%s'::interval" % value
765        if isinstance(value, list):
766            # Quote value as an ARRAY constructor. This is better than using
767            # an array literal because it carries the information that this is
768            # an array and not a string.  One issue with this syntax is that
769            # you need to add an explicit typecast when passing empty arrays.
770            # The ARRAY keyword is actually only necessary at the top level.
771            if not value:  # exception for empty array
772                return "'{}'"
773            q = self._quote
774            return 'ARRAY[%s]' % ','.join(str(q(v)) for v in value)
775        if isinstance(value, tuple):
776            # Quote as a ROW constructor.  This is better than using a record
777            # literal because it carries the information that this is a record
778            # and not a string.  We don't use the keyword ROW in order to make
779            # this usable with the IN syntax as well.  It is only necessary
780            # when the records has a single column which is not really useful.
781            q = self._quote
782            return '(%s)' % ','.join(str(q(v)) for v in value)
783        try:
784            value = value.__pg_repr__()
785        except AttributeError:
786            raise InterfaceError(
787                'Do not know how to adapt type %s' % type(value))
788        if isinstance(value, (tuple, list)):
789            value = self._quote(value)
790        return value
791
792    def _quoteparams(self, string, parameters):
793        """Quote parameters.
794
795        This function works for both mappings and sequences.
796        """
797        if isinstance(parameters, dict):
798            parameters = _quotedict(parameters)
799            parameters.quote = self._quote
800        else:
801            parameters = tuple(map(self._quote, parameters))
802        return string % parameters
803
804    def _make_description(self, info):
805        """Make the description tuple for the given field info."""
806        name, typ, size, mod = info[1:]
807        type_code = self.type_cache[typ]
808        if mod > 0:
809            mod -= 4
810        if type_code == 'numeric':
811            precision, scale = mod >> 16, mod & 0xffff
812            size = precision
813        else:
814            if not size:
815                size = type_code.size
816            if size == -1:
817                size = mod
818            precision = scale = None
819        return CursorDescription(name, type_code,
820            None, size, precision, scale, None)
821
822    @property
823    def description(self):
824        """Read-only attribute describing the result columns."""
825        descr = self._description
826        if self._description is True:
827            make = self._make_description
828            descr = [make(info) for info in self._src.listinfo()]
829            self._description = descr
830        return descr
831
832    @property
833    def colnames(self):
834        """Unofficial convenience method for getting the column names."""
835        return [d[0] for d in self.description]
836
837    @property
838    def coltypes(self):
839        """Unofficial convenience method for getting the column types."""
840        return [d[1] for d in self.description]
841
842    def close(self):
843        """Close the cursor object."""
844        self._src.close()
845        self._description = None
846        self.rowcount = -1
847        self.lastrowid = None
848
849    def execute(self, operation, parameters=None):
850        """Prepare and execute a database operation (query or command)."""
851        # The parameters may also be specified as list of tuples to e.g.
852        # insert multiple rows in a single operation, but this kind of
853        # usage is deprecated.  We make several plausibility checks because
854        # tuples can also be passed with the meaning of ROW constructors.
855        if (parameters and isinstance(parameters, list)
856                and len(parameters) > 1
857                and all(isinstance(p, tuple) for p in parameters)
858                and all(len(p) == len(parameters[0]) for p in parameters[1:])):
859            return self.executemany(operation, parameters)
860        else:
861            # not a list of tuples
862            return self.executemany(operation, [parameters])
863
864    def executemany(self, operation, seq_of_parameters):
865        """Prepare operation and execute it against a parameter sequence."""
866        if not seq_of_parameters:
867            # don't do anything without parameters
868            return
869        self._description = None
870        self.rowcount = -1
871        # first try to execute all queries
872        rowcount = 0
873        sql = "BEGIN"
874        try:
875            if not self._dbcnx._tnx:
876                try:
877                    self._cnx.source().execute(sql)
878                except DatabaseError:
879                    raise  # database provides error message
880                except Exception:
881                    raise _op_error("Can't start transaction")
882                self._dbcnx._tnx = True
883            for parameters in seq_of_parameters:
884                sql = operation
885                if parameters:
886                    sql = self._quoteparams(sql, parameters)
887                rows = self._src.execute(sql)
888                if rows:  # true if not DML
889                    rowcount += rows
890                else:
891                    self.rowcount = -1
892        except DatabaseError:
893            raise  # database provides error message
894        except Error as err:
895            raise _db_error(
896                "Error in '%s': '%s' " % (sql, err), InterfaceError)
897        except Exception as err:
898            raise _op_error("Internal error in '%s': %s" % (sql, err))
899        # then initialize result raw count and description
900        if self._src.resulttype == RESULT_DQL:
901            self._description = True  # fetch on demand
902            self.rowcount = self._src.ntuples
903            self.lastrowid = None
904            if self.build_row_factory:
905                self.row_factory = self.build_row_factory()
906        else:
907            self.rowcount = rowcount
908            self.lastrowid = self._src.oidstatus()
909        # return the cursor object, so you can write statements such as
910        # "cursor.execute(...).fetchall()" or "for row in cursor.execute(...)"
911        return self
912
913    def fetchone(self):
914        """Fetch the next row of a query result set."""
915        res = self.fetchmany(1, False)
916        try:
917            return res[0]
918        except IndexError:
919            return None
920
921    def fetchall(self):
922        """Fetch all (remaining) rows of a query result."""
923        return self.fetchmany(-1, False)
924
925    def fetchmany(self, size=None, keep=False):
926        """Fetch the next set of rows of a query result.
927
928        The number of rows to fetch per call is specified by the
929        size parameter. If it is not given, the cursor's arraysize
930        determines the number of rows to be fetched. If you set
931        the keep parameter to true, this is kept as new arraysize.
932        """
933        if size is None:
934            size = self.arraysize
935        if keep:
936            self.arraysize = size
937        try:
938            result = self._src.fetch(size)
939        except DatabaseError:
940            raise
941        except Error as err:
942            raise _db_error(str(err))
943        typecast = self.type_cache.typecast
944        return [self.row_factory([typecast(value, typ)
945            for typ, value in zip(self.coltypes, row)]) for row in result]
946
947    def callproc(self, procname, parameters=None):
948        """Call a stored database procedure with the given name.
949
950        The sequence of parameters must contain one entry for each input
951        argument that the procedure expects. The result of the call is the
952        same as this input sequence; replacement of output and input/output
953        parameters in the return value is currently not supported.
954
955        The procedure may also provide a result set as output. These can be
956        requested through the standard fetch methods of the cursor.
957        """
958        n = parameters and len(parameters) or 0
959        query = 'select * from "%s"(%s)' % (procname, ','.join(n * ['%s']))
960        self.execute(query, parameters)
961        return parameters
962
963    def copy_from(self, stream, table,
964            format=None, sep=None, null=None, size=None, columns=None):
965        """Copy data from an input stream to the specified table.
966
967        The input stream can be a file-like object with a read() method or
968        it can also be an iterable returning a row or multiple rows of input
969        on each iteration.
970
971        The format must be text, csv or binary. The sep option sets the
972        column separator (delimiter) used in the non binary formats.
973        The null option sets the textual representation of NULL in the input.
974
975        The size option sets the size of the buffer used when reading data
976        from file-like objects.
977
978        The copy operation can be restricted to a subset of columns. If no
979        columns are specified, all of them will be copied.
980        """
981        binary_format = format == 'binary'
982        try:
983            read = stream.read
984        except AttributeError:
985            if size:
986                raise ValueError("Size must only be set for file-like objects")
987            if binary_format:
988                input_type = bytes
989                type_name = 'byte strings'
990            else:
991                input_type = basestring
992                type_name = 'strings'
993
994            if isinstance(stream, basestring):
995                if not isinstance(stream, input_type):
996                    raise ValueError("The input must be %s" % type_name)
997                if not binary_format:
998                    if isinstance(stream, str):
999                        if not stream.endswith('\n'):
1000                            stream += '\n'
1001                    else:
1002                        if not stream.endswith(b'\n'):
1003                            stream += b'\n'
1004
1005                def chunks():
1006                    yield stream
1007
1008            elif isinstance(stream, Iterable):
1009
1010                def chunks():
1011                    for chunk in stream:
1012                        if not isinstance(chunk, input_type):
1013                            raise ValueError(
1014                                "Input stream must consist of %s" % type_name)
1015                        if isinstance(chunk, str):
1016                            if not chunk.endswith('\n'):
1017                                chunk += '\n'
1018                        else:
1019                            if not chunk.endswith(b'\n'):
1020                                chunk += b'\n'
1021                        yield chunk
1022
1023            else:
1024                raise TypeError("Need an input stream to copy from")
1025        else:
1026            if size is None:
1027                size = 8192
1028            elif not isinstance(size, int):
1029                raise TypeError("The size option must be an integer")
1030            if size > 0:
1031
1032                def chunks():
1033                    while True:
1034                        buffer = read(size)
1035                        yield buffer
1036                        if not buffer or len(buffer) < size:
1037                            break
1038
1039            else:
1040
1041                def chunks():
1042                    yield read()
1043
1044        if not table or not isinstance(table, basestring):
1045            raise TypeError("Need a table to copy to")
1046        if table.lower().startswith('select'):
1047                raise ValueError("Must specify a table, not a query")
1048        else:
1049            table = '"%s"' % (table,)
1050        operation = ['copy %s' % (table,)]
1051        options = []
1052        params = []
1053        if format is not None:
1054            if not isinstance(format, basestring):
1055                raise TypeError("The frmat option must be be a string")
1056            if format not in ('text', 'csv', 'binary'):
1057                raise ValueError("Invalid format")
1058            options.append('format %s' % (format,))
1059        if sep is not None:
1060            if not isinstance(sep, basestring):
1061                raise TypeError("The sep option must be a string")
1062            if format == 'binary':
1063                raise ValueError(
1064                    "The sep option is not allowed with binary format")
1065            if len(sep) != 1:
1066                raise ValueError(
1067                    "The sep option must be a single one-byte character")
1068            options.append('delimiter %s')
1069            params.append(sep)
1070        if null is not None:
1071            if not isinstance(null, basestring):
1072                raise TypeError("The null option must be a string")
1073            options.append('null %s')
1074            params.append(null)
1075        if columns:
1076            if not isinstance(columns, basestring):
1077                columns = ','.join('"%s"' % (col,) for col in columns)
1078            operation.append('(%s)' % (columns,))
1079        operation.append("from stdin")
1080        if options:
1081            operation.append('(%s)' % ','.join(options))
1082        operation = ' '.join(operation)
1083
1084        putdata = self._src.putdata
1085        self.execute(operation, params)
1086
1087        try:
1088            for chunk in chunks():
1089                putdata(chunk)
1090        except BaseException as error:
1091            self.rowcount = -1
1092            # the following call will re-raise the error
1093            putdata(error)
1094        else:
1095            self.rowcount = putdata(None)
1096
1097        # return the cursor object, so you can chain operations
1098        return self
1099
1100    def copy_to(self, stream, table,
1101            format=None, sep=None, null=None, decode=None, columns=None):
1102        """Copy data from the specified table to an output stream.
1103
1104        The output stream can be a file-like object with a write() method or
1105        it can also be None, in which case the method will return a generator
1106        yielding a row on each iteration.
1107
1108        Output will be returned as byte strings unless you set decode to true.
1109
1110        Note that you can also use a select query instead of the table name.
1111
1112        The format must be text, csv or binary. The sep option sets the
1113        column separator (delimiter) used in the non binary formats.
1114        The null option sets the textual representation of NULL in the output.
1115
1116        The copy operation can be restricted to a subset of columns. If no
1117        columns are specified, all of them will be copied.
1118        """
1119        binary_format = format == 'binary'
1120        if stream is not None:
1121            try:
1122                write = stream.write
1123            except AttributeError:
1124                raise TypeError("Need an output stream to copy to")
1125        if not table or not isinstance(table, basestring):
1126            raise TypeError("Need a table to copy to")
1127        if table.lower().startswith('select'):
1128            if columns:
1129                raise ValueError("Columns must be specified in the query")
1130            table = '(%s)' % (table,)
1131        else:
1132            table = '"%s"' % (table,)
1133        operation = ['copy %s' % (table,)]
1134        options = []
1135        params = []
1136        if format is not None:
1137            if not isinstance(format, basestring):
1138                raise TypeError("The format option must be a string")
1139            if format not in ('text', 'csv', 'binary'):
1140                raise ValueError("Invalid format")
1141            options.append('format %s' % (format,))
1142        if sep is not None:
1143            if not isinstance(sep, basestring):
1144                raise TypeError("The sep option must be a string")
1145            if binary_format:
1146                raise ValueError(
1147                    "The sep option is not allowed with binary format")
1148            if len(sep) != 1:
1149                raise ValueError(
1150                    "The sep option must be a single one-byte character")
1151            options.append('delimiter %s')
1152            params.append(sep)
1153        if null is not None:
1154            if not isinstance(null, basestring):
1155                raise TypeError("The null option must be a string")
1156            options.append('null %s')
1157            params.append(null)
1158        if decode is None:
1159            if format == 'binary':
1160                decode = False
1161            else:
1162                decode = str is unicode
1163        else:
1164            if not isinstance(decode, (int, bool)):
1165                raise TypeError("The decode option must be a boolean")
1166            if decode and binary_format:
1167                raise ValueError(
1168                    "The decode option is not allowed with binary format")
1169        if columns:
1170            if not isinstance(columns, basestring):
1171                columns = ','.join('"%s"' % (col,) for col in columns)
1172            operation.append('(%s)' % (columns,))
1173
1174        operation.append("to stdout")
1175        if options:
1176            operation.append('(%s)' % ','.join(options))
1177        operation = ' '.join(operation)
1178
1179        getdata = self._src.getdata
1180        self.execute(operation, params)
1181
1182        def copy():
1183            self.rowcount = 0
1184            while True:
1185                row = getdata(decode)
1186                if isinstance(row, int):
1187                    if self.rowcount != row:
1188                        self.rowcount = row
1189                    break
1190                self.rowcount += 1
1191                yield row
1192
1193        if stream is None:
1194            # no input stream, return the generator
1195            return copy()
1196
1197        # write the rows to the file-like input stream
1198        for row in copy():
1199            write(row)
1200
1201        # return the cursor object, so you can chain operations
1202        return self
1203
1204    def __next__(self):
1205        """Return the next row (support for the iteration protocol)."""
1206        res = self.fetchone()
1207        if res is None:
1208            raise StopIteration
1209        return res
1210
1211    # Note that since Python 2.6 the iterator protocol uses __next()__
1212    # instead of next(), we keep it only for backward compatibility of pgdb.
1213    next = __next__
1214
1215    @staticmethod
1216    def nextset():
1217        """Not supported."""
1218        raise NotSupportedError("The nextset() method is not supported")
1219
1220    @staticmethod
1221    def setinputsizes(sizes):
1222        """Not supported."""
1223        pass  # unsupported, but silently passed
1224
1225    @staticmethod
1226    def setoutputsize(size, column=0):
1227        """Not supported."""
1228        pass  # unsupported, but silently passed
1229
1230    @staticmethod
1231    def row_factory(row):
1232        """Process rows before they are returned.
1233
1234        You can overwrite this statically with a custom row factory, or
1235        you can build a row factory dynamically with build_row_factory().
1236
1237        For example, you can create a Cursor class that returns rows as
1238        Python dictionaries like this:
1239
1240            class DictCursor(pgdb.Cursor):
1241
1242                def row_factory(self, row):
1243                    return {desc[0]: value
1244                        for desc, value in zip(self.description, row)}
1245
1246            cur = DictCursor(con)  # get one DictCursor instance or
1247            con.cursor_type = DictCursor  # always use DictCursor instances
1248        """
1249        raise NotImplementedError
1250
1251    def build_row_factory(self):
1252        """Build a row factory based on the current description.
1253
1254        This implementation builds a row factory for creating named tuples.
1255        You can overwrite this method if you want to dynamically create
1256        different row factories whenever the column description changes.
1257        """
1258        colnames = self.colnames
1259        if colnames:
1260            try:
1261                try:
1262                    return namedtuple('Row', colnames, rename=True)._make
1263                except TypeError:  # Python 2.6 and 3.0 do not support rename
1264                    colnames = [v if v.isalnum() else 'column_%d' % n
1265                             for n, v in enumerate(colnames)]
1266                    return namedtuple('Row', colnames)._make
1267            except ValueError:  # there is still a problem with the field names
1268                colnames = ['column_%d' % n for n in range(len(colnames))]
1269                return namedtuple('Row', colnames)._make
1270
1271
1272CursorDescription = namedtuple('CursorDescription',
1273    ['name', 'type_code', 'display_size', 'internal_size',
1274     'precision', 'scale', 'null_ok'])
1275
1276
1277### Connection Objects
1278
1279class Connection(object):
1280    """Connection object."""
1281
1282    # expose the exceptions as attributes on the connection object
1283    Error = Error
1284    Warning = Warning
1285    InterfaceError = InterfaceError
1286    DatabaseError = DatabaseError
1287    InternalError = InternalError
1288    OperationalError = OperationalError
1289    ProgrammingError = ProgrammingError
1290    IntegrityError = IntegrityError
1291    DataError = DataError
1292    NotSupportedError = NotSupportedError
1293
1294    def __init__(self, cnx):
1295        """Create a database connection object."""
1296        self._cnx = cnx  # connection
1297        self._tnx = False  # transaction state
1298        self.type_cache = TypeCache(cnx)
1299        self.cursor_type = Cursor
1300        try:
1301            self._cnx.source()
1302        except Exception:
1303            raise _op_error("Invalid connection")
1304
1305    def __enter__(self):
1306        """Enter the runtime context for the connection object.
1307
1308        The runtime context can be used for running transactions.
1309        """
1310        return self
1311
1312    def __exit__(self, et, ev, tb):
1313        """Exit the runtime context for the connection object.
1314
1315        This does not close the connection, but it ends a transaction.
1316        """
1317        if et is None and ev is None and tb is None:
1318            self.commit()
1319        else:
1320            self.rollback()
1321
1322    def close(self):
1323        """Close the connection object."""
1324        if self._cnx:
1325            if self._tnx:
1326                try:
1327                    self.rollback()
1328                except DatabaseError:
1329                    pass
1330            self._cnx.close()
1331            self._cnx = None
1332        else:
1333            raise _op_error("Connection has been closed")
1334
1335    def commit(self):
1336        """Commit any pending transaction to the database."""
1337        if self._cnx:
1338            if self._tnx:
1339                self._tnx = False
1340                try:
1341                    self._cnx.source().execute("COMMIT")
1342                except DatabaseError:
1343                    raise
1344                except Exception:
1345                    raise _op_error("Can't commit")
1346        else:
1347            raise _op_error("Connection has been closed")
1348
1349    def rollback(self):
1350        """Roll back to the start of any pending transaction."""
1351        if self._cnx:
1352            if self._tnx:
1353                self._tnx = False
1354                try:
1355                    self._cnx.source().execute("ROLLBACK")
1356                except DatabaseError:
1357                    raise
1358                except Exception:
1359                    raise _op_error("Can't rollback")
1360        else:
1361            raise _op_error("Connection has been closed")
1362
1363    def cursor(self):
1364        """Return a new cursor object using the connection."""
1365        if self._cnx:
1366            try:
1367                return self.cursor_type(self)
1368            except Exception:
1369                raise _op_error("Invalid connection")
1370        else:
1371            raise _op_error("Connection has been closed")
1372
1373    if shortcutmethods:  # otherwise do not implement and document this
1374
1375        def execute(self, operation, params=None):
1376            """Shortcut method to run an operation on an implicit cursor."""
1377            cursor = self.cursor()
1378            cursor.execute(operation, params)
1379            return cursor
1380
1381        def executemany(self, operation, param_seq):
1382            """Shortcut method to run an operation against a sequence."""
1383            cursor = self.cursor()
1384            cursor.executemany(operation, param_seq)
1385            return cursor
1386
1387
1388### Module Interface
1389
1390_connect = connect
1391
1392def connect(dsn=None,
1393        user=None, password=None,
1394        host=None, database=None):
1395    """Connect to a database."""
1396    # first get params from DSN
1397    dbport = -1
1398    dbhost = ""
1399    dbbase = ""
1400    dbuser = ""
1401    dbpasswd = ""
1402    dbopt = ""
1403    try:
1404        params = dsn.split(":")
1405        dbhost = params[0]
1406        dbbase = params[1]
1407        dbuser = params[2]
1408        dbpasswd = params[3]
1409        dbopt = params[4]
1410    except (AttributeError, IndexError, TypeError):
1411        pass
1412
1413    # override if necessary
1414    if user is not None:
1415        dbuser = user
1416    if password is not None:
1417        dbpasswd = password
1418    if database is not None:
1419        dbbase = database
1420    if host is not None:
1421        try:
1422            params = host.split(":")
1423            dbhost = params[0]
1424            dbport = int(params[1])
1425        except (AttributeError, IndexError, TypeError, ValueError):
1426            pass
1427
1428    # empty host is localhost
1429    if dbhost == "":
1430        dbhost = None
1431    if dbuser == "":
1432        dbuser = None
1433
1434    # open the connection
1435    cnx = _connect(dbbase, dbhost, dbport, dbopt, dbuser, dbpasswd)
1436    return Connection(cnx)
1437
1438
1439### Types Handling
1440
1441class Type(frozenset):
1442    """Type class for a couple of PostgreSQL data types.
1443
1444    PostgreSQL is object-oriented: types are dynamic.
1445    We must thus use type names as internal type codes.
1446    """
1447
1448    def __new__(cls, values):
1449        if isinstance(values, basestring):
1450            values = values.split()
1451        return super(Type, cls).__new__(cls, values)
1452
1453    def __eq__(self, other):
1454        if isinstance(other, basestring):
1455            if other.startswith('_'):
1456                other = other[1:]
1457            return other in self
1458        else:
1459            return super(Type, self).__eq__(other)
1460
1461    def __ne__(self, other):
1462        if isinstance(other, basestring):
1463            if other.startswith('_'):
1464                other = other[1:]
1465            return other not in self
1466        else:
1467            return super(Type, self).__ne__(other)
1468
1469
1470class ArrayType:
1471    """Type class for PostgreSQL array types."""
1472
1473    def __eq__(self, other):
1474        if isinstance(other, basestring):
1475            return other.startswith('_')
1476        else:
1477            return isinstance(other, ArrayType)
1478
1479    def __ne__(self, other):
1480        if isinstance(other, basestring):
1481            return not other.startswith('_')
1482        else:
1483            return not isinstance(other, ArrayType)
1484
1485
1486class RecordType:
1487    """Type class for PostgreSQL record types."""
1488
1489    def __eq__(self, other):
1490        if isinstance(other, TypeCode):
1491            return other.type == 'c'
1492        elif isinstance(other, basestring):
1493            return other == 'record'
1494        else:
1495            return isinstance(other, RecordType)
1496
1497    def __ne__(self, other):
1498        if isinstance(other, TypeCode):
1499            return other.type != 'c'
1500        elif isinstance(other, basestring):
1501            return other != 'record'
1502        else:
1503            return not isinstance(other, RecordType)
1504
1505
1506# Mandatory type objects defined by DB-API 2 specs:
1507
1508STRING = Type('char bpchar name text varchar')
1509BINARY = Type('bytea')
1510NUMBER = Type('int2 int4 serial int8 float4 float8 numeric money')
1511DATETIME = Type('date time timetz timestamp timestamptz interval'
1512    ' abstime reltime')  # these are very old
1513ROWID = Type('oid')
1514
1515
1516# Additional type objects (more specific):
1517
1518BOOL = Type('bool')
1519SMALLINT = Type('int2')
1520INTEGER = Type('int2 int4 int8 serial')
1521LONG = Type('int8')
1522FLOAT = Type('float4 float8')
1523NUMERIC = Type('numeric')
1524MONEY = Type('money')
1525DATE = Type('date')
1526TIME = Type('time timetz')
1527TIMESTAMP = Type('timestamp timestamptz')
1528INTERVAL = Type('interval')
1529HSTORE = Type('hstore')
1530JSON = Type('json jsonb')
1531
1532# Type object for arrays (also equate to their base types):
1533
1534ARRAY = ArrayType()
1535
1536# Type object for records (encompassing all composite types):
1537
1538RECORD = RecordType()
1539
1540
1541# Mandatory type helpers defined by DB-API 2 specs:
1542
1543def Date(year, month, day):
1544    """Construct an object holding a date value."""
1545    return date(year, month, day)
1546
1547
1548def Time(hour, minute=0, second=0, microsecond=0, tzinfo=None):
1549    """Construct an object holding a time value."""
1550    return time(hour, minute, second, microsecond, tzinfo)
1551
1552
1553def Timestamp(year, month, day, hour=0, minute=0, second=0, microsecond=0,
1554        tzinfo=None):
1555    """Construct an object holding a time stamp value."""
1556    return datetime(year, month, day, hour, minute, second, microsecond, tzinfo)
1557
1558
1559def DateFromTicks(ticks):
1560    """Construct an object holding a date value from the given ticks value."""
1561    return Date(*localtime(ticks)[:3])
1562
1563
1564def TimeFromTicks(ticks):
1565    """Construct an object holding a time value from the given ticks value."""
1566    return Time(*localtime(ticks)[3:6])
1567
1568
1569def TimestampFromTicks(ticks):
1570    """Construct an object holding a time stamp from the given ticks value."""
1571    return Timestamp(*localtime(ticks)[:6])
1572
1573
1574class Binary(bytes):
1575    """Construct an object capable of holding a binary (long) string value."""
1576
1577
1578# Additional type helpers for PyGreSQL:
1579
1580def Interval(days, hours=0, minutes=0, seconds=0, microseconds=0):
1581    """Construct an object holding a time inverval value."""
1582    return timedelta(days, hours=hours, minutes=minutes, seconds=seconds,
1583        microseconds=microseconds)
1584
1585
1586class Hstore(dict):
1587    """Wrapper class for marking hstore values."""
1588
1589    _re_quote = regex('^[Nn][Uu][Ll][Ll]$|[ ,=>]')
1590    _re_escape = regex(r'(["\\])')
1591
1592    @classmethod
1593    def _quote(cls, s):
1594        if s is None:
1595            return 'NULL'
1596        if not s:
1597            return '""'
1598        quote = cls._re_quote.search(s)
1599        s = cls._re_escape.sub(r'\\\1', s)
1600        if quote:
1601            s = '"%s"' % s
1602        return s
1603
1604    def __str__(self):
1605        q = self._quote
1606        return ','.join('%s=>%s' % (q(k), q(v)) for k, v in self.items())
1607
1608
1609class Json:
1610    """Construct a wrapper for holding an object serializable to JSON."""
1611
1612    def __init__(self, obj, encode=None):
1613        self.obj = obj
1614        self.encode = encode or jsonencode
1615
1616    def __str__(self):
1617        obj = self.obj
1618        if isinstance(obj, basestring):
1619            return obj
1620        return self.encode(obj)
1621
1622
1623class Literal:
1624    """Construct a wrapper for holding a literal SQL string."""
1625
1626    def __init__(self, sql):
1627        self.sql = sql
1628
1629    def __str__(self):
1630        return self.sql
1631
1632    __pg_repr__ = __str__
1633
1634# If run as script, print some information:
1635
1636if __name__ == '__main__':
1637    print('PyGreSQL version', version)
1638    print('')
1639    print(__doc__)
Note: See TracBrowser for help on using the repository browser.