source: trunk/pgdb.py @ 841

Last change on this file since 841 was 841, checked in by cito, 4 years ago

Add type helper for UUIDs

  • Property svn:keywords set to Id
File size: 53.6 KB
Line 
1#! /usr/bin/python
2#
3# pgdb.py
4#
5# Written by D'Arcy J.M. Cain
6#
7# $Id: pgdb.py 841 2016-02-08 20:05:37Z cito $
8#
9
10"""pgdb - DB-API 2.0 compliant module for PygreSQL.
11
12(c) 1999, Pascal Andre <andre@via.ecp.fr>.
13See package documentation for further information on copyright.
14
15Inline documentation is sparse.
16See DB-API 2.0 specification for usage information:
17http://www.python.org/peps/pep-0249.html
18
19Basic usage:
20
21    pgdb.connect(connect_string) # open a connection
22    # connect_string = 'host:database:user:password:opt'
23    # All parts are optional. You may also pass host through
24    # password as keyword arguments. To pass a port,
25    # pass it in the host keyword parameter:
26    connection = pgdb.connect(host='localhost:5432')
27
28    cursor = connection.cursor() # open a cursor
29
30    cursor.execute(query[, params])
31    # Execute a query, binding params (a dictionary) if they are
32    # passed. The binding syntax is the same as the % operator
33    # for dictionaries, and no quoting is done.
34
35    cursor.executemany(query, list of params)
36    # Execute a query many times, binding each param dictionary
37    # from the list.
38
39    cursor.fetchone() # fetch one row, [value, value, ...]
40
41    cursor.fetchall() # fetch all rows, [[value, value, ...], ...]
42
43    cursor.fetchmany([size])
44    # returns size or cursor.arraysize number of rows,
45    # [[value, value, ...], ...] from result set.
46    # Default cursor.arraysize is 1.
47
48    cursor.description # returns information about the columns
49    #   [(column_name, type_name, display_size,
50    #           internal_size, precision, scale, null_ok), ...]
51    # Note that display_size, precision, scale and null_ok
52    # are not implemented.
53
54    cursor.rowcount # number of rows available in the result set
55    # Available after a call to execute.
56
57    connection.commit() # commit transaction
58
59    connection.rollback() # or rollback transaction
60
61    cursor.close() # close the cursor
62
63    connection.close() # close the connection
64"""
65
66from __future__ import print_function
67
68from _pg import *
69
70__version__ = version
71
72from datetime import date, time, datetime, timedelta
73from time import localtime
74from decimal import Decimal
75from uuid import UUID as Uuid
76from math import isnan, isinf
77from collections import namedtuple
78from functools import partial
79from re import compile as regex
80from json import loads as jsondecode, dumps as jsonencode
81
82try:
83    long
84except NameError:  # Python >= 3.0
85    long = int
86
87try:
88    unicode
89except NameError:  # Python >= 3.0
90    unicode = str
91
92try:
93    basestring
94except NameError:  # Python >= 3.0
95    basestring = (str, bytes)
96
97from collections import Iterable
98
99
100### Module Constants
101
102# compliant with DB API 2.0
103apilevel = '2.0'
104
105# module may be shared, but not connections
106threadsafety = 1
107
108# this module use extended python format codes
109paramstyle = 'pyformat'
110
111# shortcut methods have been excluded from DB API 2 and
112# are not recommended by the DB SIG, but they can be handy
113shortcutmethods = 1
114
115
116### Internal Type Handling
117
118try:
119    from inspect import signature
120except ImportError:  # Python < 3.3
121    from inspect import getargspec
122
123    def get_args(func):
124        return getargspec(func).args
125else:
126
127    def get_args(func):
128        return list(signature(func).parameters)
129
130try:
131    if datetime.strptime('+0100', '%z') is None:
132        raise ValueError
133except ValueError:  # Python < 3.2
134    timezones = None
135else:
136    # time zones used in Postgres timestamptz output
137    timezones = dict(CET='+0100', EET='+0200', EST='-0500',
138        GMT='+0000', HST='-1000', MET='+0100', MST='-0700',
139        UCT='+0000', UTC='+0000', WET='+0000')
140
141
142def decimal_type(decimal_type=None):
143    """Get or set global type to be used for decimal values.
144
145    Note that connections cache cast functions. To be sure a global change
146    is picked up by a running connection, call con.type_cache.reset_typecast().
147    """
148    global Decimal
149    if decimal_type is not None:
150        Decimal = decimal_type
151        set_typecast('numeric', decimal_type)
152    return Decimal
153
154
155def cast_bool(value):
156    """Cast boolean value in database format to bool."""
157    if value:
158        return value[0] in ('t', 'T')
159
160
161def cast_money(value):
162    """Cast money value in database format to Decimal."""
163    if value:
164        value = value.replace('(', '-')
165        return Decimal(''.join(c for c in value if c.isdigit() or c in '.-'))
166
167
168def cast_int2vector(value):
169    """Cast an int2vector value."""
170    return [int(v) for v in value.split()]
171
172
173def cast_date(value, connection):
174    """Cast a date value."""
175    # The output format depends on the server setting DateStyle.  The default
176    # setting ISO and the setting for German are actually unambiguous.  The
177    # order of days and months in the other two settings is however ambiguous,
178    # so at least here we need to consult the setting to properly parse values.
179    if value == '-infinity':
180        return date.min
181    if value == 'infinity':
182        return date.max
183    value = value.split()
184    if value[-1] == 'BC':
185        return date.min
186    value = value[0]
187    if len(value) > 10:
188        return date.max
189    fmt = connection.date_format()
190    return datetime.strptime(value, fmt).date()
191
192
193def cast_time(value):
194    """Cast a time value."""
195    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
196    return datetime.strptime(value, fmt).time()
197
198
199_re_timezone = regex('(.*)([+-].*)')
200
201
202def cast_timetz(value):
203    """Cast a timetz value."""
204    tz = _re_timezone.match(value)
205    if tz:
206        value, tz = tz.groups()
207    else:
208        tz = '+0000'
209    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
210    if timezones:
211        if tz.startswith(('+', '-')):
212            if len(tz) < 5:
213                tz += '00'
214            else:
215                tz = tz.replace(':', '')
216        elif tz in timezones:
217            tz = timezones[tz]
218        else:
219            tz = '+0000'
220        value += tz
221        fmt += '%z'
222    return datetime.strptime(value, fmt).timetz()
223
224
225def cast_timestamp(value, connection):
226    """Cast a timestamp value."""
227    if value == '-infinity':
228        return datetime.min
229    if value == 'infinity':
230        return datetime.max
231    value = value.split()
232    if value[-1] == 'BC':
233        return datetime.min
234    fmt = connection.date_format()
235    if fmt.endswith('-%Y') and len(value) > 2:
236        value = value[1:5]
237        if len(value[3]) > 4:
238            return datetime.max
239        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
240            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
241    else:
242        if len(value[0]) > 10:
243            return datetime.max
244        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
245    return datetime.strptime(' '.join(value), ' '.join(fmt))
246
247
248def cast_timestamptz(value, connection):
249    """Cast a timestamptz value."""
250    if value == '-infinity':
251        return datetime.min
252    if value == 'infinity':
253        return datetime.max
254    value = value.split()
255    if value[-1] == 'BC':
256        return datetime.min
257    fmt = connection.date_format()
258    if fmt.endswith('-%Y') and len(value) > 2:
259        value = value[1:]
260        if len(value[3]) > 4:
261            return datetime.max
262        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
263            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
264        value, tz = value[:-1], value[-1]
265    else:
266        if fmt.startswith('%Y-'):
267            tz = _re_timezone.match(value[1])
268            if tz:
269                value[1], tz = tz.groups()
270            else:
271                tz = '+0000'
272        else:
273            value, tz = value[:-1], value[-1]
274        if len(value[0]) > 10:
275            return datetime.max
276        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
277    if timezones:
278        if tz.startswith(('+', '-')):
279            if len(tz) < 5:
280                tz += '00'
281            else:
282                tz = tz.replace(':', '')
283        elif tz in timezones:
284            tz = timezones[tz]
285        else:
286            tz = '+0000'
287        value.append(tz)
288        fmt.append('%z')
289    return datetime.strptime(' '.join(value), ' '.join(fmt))
290
291_re_interval_sql_standard = regex(
292    '(?:([+-])?([0-9]+)-([0-9]+) ?)?'
293    '(?:([+-]?[0-9]+)(?!:) ?)?'
294    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
295
296_re_interval_postgres = regex(
297    '(?:([+-]?[0-9]+) ?years? ?)?'
298    '(?:([+-]?[0-9]+) ?mons? ?)?'
299    '(?:([+-]?[0-9]+) ?days? ?)?'
300    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
301
302_re_interval_postgres_verbose = regex(
303    '@ ?(?:([+-]?[0-9]+) ?years? ?)?'
304    '(?:([+-]?[0-9]+) ?mons? ?)?'
305    '(?:([+-]?[0-9]+) ?days? ?)?'
306    '(?:([+-]?[0-9]+) ?hours? ?)?'
307    '(?:([+-]?[0-9]+) ?mins? ?)?'
308    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))? ?secs?)? ?(ago)?')
309
310_re_interval_iso_8601 = regex(
311    'P(?:([+-]?[0-9]+)Y)?'
312    '(?:([+-]?[0-9]+)M)?'
313    '(?:([+-]?[0-9]+)D)?'
314    '(?:T(?:([+-]?[0-9]+)H)?'
315    '(?:([+-]?[0-9]+)M)?'
316    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))?S)?)?')
317
318
319def cast_interval(value):
320    """Cast an interval value."""
321    # The output format depends on the server setting IntervalStyle, but it's
322    # not necessary to consult this setting to parse it.  It's faster to just
323    # check all possible formats, and there is no ambiguity here.
324    m = _re_interval_iso_8601.match(value)
325    if m:
326        m = [d or '0' for d in m.groups()]
327        secs_ago = m.pop(5) == '-'
328        m = [int(d) for d in m]
329        years, mons, days, hours, mins, secs, usecs = m
330        if secs_ago:
331            secs = -secs
332            usecs = -usecs
333    else:
334        m = _re_interval_postgres_verbose.match(value)
335        if m:
336            m, ago = [d or '0' for d in m.groups()[:8]], m.group(9)
337            secs_ago = m.pop(5) == '-'
338            m = [-int(d) for d in m] if ago else [int(d) for d in m]
339            years, mons, days, hours, mins, secs, usecs = m
340            if secs_ago:
341                secs = - secs
342                usecs = -usecs
343        else:
344            m = _re_interval_postgres.match(value)
345            if m and any(m.groups()):
346                m = [d or '0' for d in m.groups()]
347                hours_ago = m.pop(3) == '-'
348                m = [int(d) for d in m]
349                years, mons, days, hours, mins, secs, usecs = m
350                if hours_ago:
351                    hours = -hours
352                    mins = -mins
353                    secs = -secs
354                    usecs = -usecs
355            else:
356                m = _re_interval_sql_standard.match(value)
357                if m and any(m.groups()):
358                    m = [d or '0' for d in m.groups()]
359                    years_ago = m.pop(0) == '-'
360                    hours_ago = m.pop(3) == '-'
361                    m = [int(d) for d in m]
362                    years, mons, days, hours, mins, secs, usecs = m
363                    if years_ago:
364                        years = -years
365                        mons = -mons
366                    if hours_ago:
367                        hours = -hours
368                        mins = -mins
369                        secs = -secs
370                        usecs = -usecs
371                else:
372                    raise ValueError('Cannot parse interval: %s' % value)
373    days += 365 * years + 30 * mons
374    return timedelta(days=days, hours=hours, minutes=mins,
375        seconds=secs, microseconds=usecs)
376
377
378class Typecasts(dict):
379    """Dictionary mapping database types to typecast functions.
380
381    The cast functions get passed the string representation of a value in
382    the database which they need to convert to a Python object.  The
383    passed string will never be None since NULL values are already be
384    handled before the cast function is called.
385    """
386
387    # the default cast functions
388    # (str functions are ignored but have been added for faster access)
389    defaults = {'char': str, 'bpchar': str, 'name': str,
390        'text': str, 'varchar': str,
391        'bool': cast_bool, 'bytea': unescape_bytea,
392        'int2': int, 'int4': int, 'serial': int, 'int8': long, 'oid': int,
393        'hstore': cast_hstore, 'json': jsondecode, 'jsonb': jsondecode,
394        'float4': float, 'float8': float,
395        'numeric': Decimal, 'money': cast_money,
396        'date': cast_date, 'interval': cast_interval,
397        'time': cast_time, 'timetz': cast_timetz,
398        'timestamp': cast_timestamp, 'timestamptz': cast_timestamptz,
399        'int2vector': cast_int2vector, 'uuid': Uuid,
400        'anyarray': cast_array, 'record': cast_record}
401
402    connection = None  # will be set in local connection specific instances
403
404    def __missing__(self, typ):
405        """Create a cast function if it is not cached.
406
407        Note that this class never raises a KeyError,
408        but returns None when no special cast function exists.
409        """
410        if not isinstance(typ, str):
411            raise TypeError('Invalid type: %s' % typ)
412        cast = self.defaults.get(typ)
413        if cast:
414            # store default for faster access
415            cast = self._add_connection(cast)
416            self[typ] = cast
417        elif typ.startswith('_'):
418            # create array cast
419            base_cast = self[typ[1:]]
420            cast = self.create_array_cast(base_cast)
421            if base_cast:
422                # store only if base type exists
423                self[typ] = cast
424        return cast
425
426    @staticmethod
427    def _needs_connection(func):
428        """Check if a typecast function needs a connection argument."""
429        try:
430            args = get_args(func)
431        except (TypeError, ValueError):
432            return False
433        else:
434            return 'connection' in args[1:]
435
436    def _add_connection(self, cast):
437        """Add a connection argument to the typecast function if necessary."""
438        if not self.connection or not self._needs_connection(cast):
439            return cast
440        return partial(cast, connection=self.connection)
441
442    def get(self, typ, default=None):
443        """Get the typecast function for the given database type."""
444        return self[typ] or default
445
446    def set(self, typ, cast):
447        """Set a typecast function for the specified database type(s)."""
448        if isinstance(typ, basestring):
449            typ = [typ]
450        if cast is None:
451            for t in typ:
452                self.pop(t, None)
453                self.pop('_%s' % t, None)
454        else:
455            if not callable(cast):
456                raise TypeError("Cast parameter must be callable")
457            for t in typ:
458                self[t] = self._add_connection(cast)
459                self.pop('_%s' % t, None)
460
461    def reset(self, typ=None):
462        """Reset the typecasts for the specified type(s) to their defaults.
463
464        When no type is specified, all typecasts will be reset.
465        """
466        defaults = self.defaults
467        if typ is None:
468            self.clear()
469            self.update(defaults)
470        else:
471            if isinstance(typ, basestring):
472                typ = [typ]
473            for t in typ:
474                cast = defaults.get(t)
475                if cast:
476                    self[t] = self._add_connection(cast)
477                    t = '_%s' % t
478                    cast = defaults.get(t)
479                    if cast:
480                        self[t] = self._add_connection(cast)
481                    else:
482                        self.pop(t, None)
483                else:
484                    self.pop(t, None)
485                    self.pop('_%s' % t, None)
486
487    def create_array_cast(self, basecast):
488        """Create an array typecast for the given base cast."""
489        def cast(v):
490            return cast_array(v, basecast)
491        return cast
492
493    def create_record_cast(self, name, fields, casts):
494        """Create a named record typecast for the given fields and casts."""
495        record = namedtuple(name, fields)
496        def cast(v):
497            return record(*cast_record(v, casts))
498        return cast
499
500
501_typecasts = Typecasts()  # this is the global typecast dictionary
502
503
504def get_typecast(typ):
505    """Get the global typecast function for the given database type(s)."""
506    return _typecasts.get(typ)
507
508
509def set_typecast(typ, cast):
510    """Set a global typecast function for the given database type(s).
511
512    Note that connections cache cast functions. To be sure a global change
513    is picked up by a running connection, call con.type_cache.reset_typecast().
514    """
515    _typecasts.set(typ, cast)
516
517
518def reset_typecast(typ=None):
519    """Reset the global typecasts for the given type(s) to their default.
520
521    When no type is specified, all typecasts will be reset.
522
523    Note that connections cache cast functions. To be sure a global change
524    is picked up by a running connection, call con.type_cache.reset_typecast().
525    """
526    _typecasts.reset(typ)
527
528
529class LocalTypecasts(Typecasts):
530    """Map typecasts, including local composite types, to cast functions."""
531
532    defaults = _typecasts
533
534    connection = None  # will be set in a connection specific instance
535
536    def __missing__(self, typ):
537        """Create a cast function if it is not cached."""
538        if typ.startswith('_'):
539            base_cast = self[typ[1:]]
540            cast = self.create_array_cast(base_cast)
541            if base_cast:
542                self[typ] = cast
543        else:
544            cast = self.defaults.get(typ)
545            if cast:
546                cast = self._add_connection(cast)
547                self[typ] = cast
548            else:
549                fields = self.get_fields(typ)
550                if fields:
551                    casts = [self[field.type] for field in fields]
552                    fields = [field.name for field in fields]
553                    cast = self.create_record_cast(typ, fields, casts)
554                    self[typ] = cast
555        return cast
556
557    def get_fields(self, typ):
558        """Return the fields for the given record type.
559
560        This method will be replaced with a method that looks up the fields
561        using the type cache of the connection.
562        """
563        return []
564
565
566class TypeCode(str):
567    """Class representing the type_code used by the DB-API 2.0.
568
569    TypeCode objects are strings equal to the PostgreSQL type name,
570    but carry some additional information.
571    """
572
573    @classmethod
574    def create(cls, oid, name, len, type, category, delim, relid):
575        """Create a type code for a PostgreSQL data type."""
576        self = cls(name)
577        self.oid = oid
578        self.len = len
579        self.type = type
580        self.category = category
581        self.delim = delim
582        self.relid = relid
583        return self
584
585FieldInfo = namedtuple('FieldInfo', ['name', 'type'])
586
587
588class TypeCache(dict):
589    """Cache for database types.
590
591    This cache maps type OIDs and names to TypeCode strings containing
592    important information on the associated database type.
593    """
594
595    def __init__(self, cnx):
596        """Initialize type cache for connection."""
597        super(TypeCache, self).__init__()
598        self._escape_string = cnx.escape_string
599        self._src = cnx.source()
600        self._typecasts = LocalTypecasts()
601        self._typecasts.get_fields = self.get_fields
602        self._typecasts.connection = cnx
603
604    def __missing__(self, key):
605        """Get the type info from the database if it is not cached."""
606        if isinstance(key, int):
607            oid = key
608        else:
609            if '.' not in key and '"' not in key:
610                key = '"%s"' % key
611            oid = "'%s'::regtype" % self._escape_string(key)
612        try:
613            self._src.execute("SELECT oid, typname,"
614                 " typlen, typtype, typcategory, typdelim, typrelid"
615                " FROM pg_type WHERE oid=%s" % oid)
616        except ProgrammingError:
617            res = None
618        else:
619            res = self._src.fetch(1)
620        if not res:
621            raise KeyError('Type %s could not be found' % key)
622        res = res[0]
623        type_code = TypeCode.create(int(res[0]), res[1],
624            int(res[2]), res[3], res[4], res[5], int(res[6]))
625        self[type_code.oid] = self[str(type_code)] = type_code
626        return type_code
627
628    def get(self, key, default=None):
629        """Get the type even if it is not cached."""
630        try:
631            return self[key]
632        except KeyError:
633            return default
634
635    def get_fields(self, typ):
636        """Get the names and types of the fields of composite types."""
637        if not isinstance(typ, TypeCode):
638            typ = self.get(typ)
639            if not typ:
640                return None
641        if not typ.relid:
642            return None  # this type is not composite
643        self._src.execute("SELECT attname, atttypid"
644            " FROM pg_attribute WHERE attrelid=%s AND attnum>0"
645            " AND NOT attisdropped ORDER BY attnum" % typ.relid)
646        return [FieldInfo(name, self.get(int(oid)))
647            for name, oid in self._src.fetch(-1)]
648
649    def get_typecast(self, typ):
650        """Get the typecast function for the given database type."""
651        return self._typecasts.get(typ)
652
653    def set_typecast(self, typ, cast):
654        """Set a typecast function for the specified database type(s)."""
655        self._typecasts.set(typ, cast)
656
657    def reset_typecast(self, typ=None):
658        """Reset the typecast function for the specified database type(s)."""
659        self._typecasts.reset(typ)
660
661    def typecast(self, value, typ):
662        """Cast the given value according to the given database type."""
663        if value is None:
664            # for NULL values, no typecast is necessary
665            return None
666        cast = self.get_typecast(typ)
667        if not cast or cast is str:
668            # no typecast is necessary
669            return value
670        return cast(value)
671
672
673class _quotedict(dict):
674    """Dictionary with auto quoting of its items.
675
676    The quote attribute must be set to the desired quote function.
677    """
678
679    def __getitem__(self, key):
680        return self.quote(super(_quotedict, self).__getitem__(key))
681
682
683### Error messages
684
685def _db_error(msg, cls=DatabaseError):
686    """Return DatabaseError with empty sqlstate attribute."""
687    error = cls(msg)
688    error.sqlstate = None
689    return error
690
691
692def _op_error(msg):
693    """Return OperationalError."""
694    return _db_error(msg, OperationalError)
695
696
697### Cursor Object
698
699class Cursor(object):
700    """Cursor object."""
701
702    def __init__(self, dbcnx):
703        """Create a cursor object for the database connection."""
704        self.connection = self._dbcnx = dbcnx
705        self._cnx = dbcnx._cnx
706        self.type_cache = dbcnx.type_cache
707        self._src = self._cnx.source()
708        # the official attribute for describing the result columns
709        self._description = None
710        if self.row_factory is Cursor.row_factory:
711            # the row factory needs to be determined dynamically
712            self.row_factory = None
713        else:
714            self.build_row_factory = None
715        self.rowcount = -1
716        self.arraysize = 1
717        self.lastrowid = None
718
719    def __iter__(self):
720        """Make cursor compatible to the iteration protocol."""
721        return self
722
723    def __enter__(self):
724        """Enter the runtime context for the cursor object."""
725        return self
726
727    def __exit__(self, et, ev, tb):
728        """Exit the runtime context for the cursor object."""
729        self.close()
730
731    def _quote(self, value):
732        """Quote value depending on its type."""
733        if value is None:
734            return 'NULL'
735        if isinstance(value, (Hstore, Json)):
736            value = str(value)
737        if isinstance(value, basestring):
738            if isinstance(value, Binary):
739                value = self._cnx.escape_bytea(value)
740                if bytes is not str:  # Python >= 3.0
741                    value = value.decode('ascii')
742            else:
743                value = self._cnx.escape_string(value)
744            return "'%s'" % value
745        if isinstance(value, float):
746            if isinf(value):
747                return "'-Infinity'" if value < 0 else "'Infinity'"
748            if isnan(value):
749                return "'NaN'"
750            return value
751        if isinstance(value, (int, long, Decimal, Literal)):
752            return value
753        if isinstance(value, datetime):
754            if value.tzinfo:
755                return "'%s'::timestamptz" % value
756            return "'%s'::timestamp" % value
757        if isinstance(value, date):
758            return "'%s'::date" % value
759        if isinstance(value, time):
760            if value.tzinfo:
761                return "'%s'::timetz" % value
762            return "'%s'::time" % value
763        if isinstance(value, timedelta):
764            return "'%s'::interval" % value
765        if isinstance(value, Uuid):
766            return "'%s'::uuid" % value
767        if isinstance(value, list):
768            # Quote value as an ARRAY constructor. This is better than using
769            # an array literal because it carries the information that this is
770            # an array and not a string.  One issue with this syntax is that
771            # you need to add an explicit typecast when passing empty arrays.
772            # The ARRAY keyword is actually only necessary at the top level.
773            if not value:  # exception for empty array
774                return "'{}'"
775            q = self._quote
776            return 'ARRAY[%s]' % ','.join(str(q(v)) for v in value)
777        if isinstance(value, tuple):
778            # Quote as a ROW constructor.  This is better than using a record
779            # literal because it carries the information that this is a record
780            # and not a string.  We don't use the keyword ROW in order to make
781            # this usable with the IN syntax as well.  It is only necessary
782            # when the records has a single column which is not really useful.
783            q = self._quote
784            return '(%s)' % ','.join(str(q(v)) for v in value)
785        try:
786            value = value.__pg_repr__()
787        except AttributeError:
788            raise InterfaceError(
789                'Do not know how to adapt type %s' % type(value))
790        if isinstance(value, (tuple, list)):
791            value = self._quote(value)
792        return value
793
794    def _quoteparams(self, string, parameters):
795        """Quote parameters.
796
797        This function works for both mappings and sequences.
798        """
799        if isinstance(parameters, dict):
800            parameters = _quotedict(parameters)
801            parameters.quote = self._quote
802        else:
803            parameters = tuple(map(self._quote, parameters))
804        return string % parameters
805
806    def _make_description(self, info):
807        """Make the description tuple for the given field info."""
808        name, typ, size, mod = info[1:]
809        type_code = self.type_cache[typ]
810        if mod > 0:
811            mod -= 4
812        if type_code == 'numeric':
813            precision, scale = mod >> 16, mod & 0xffff
814            size = precision
815        else:
816            if not size:
817                size = type_code.size
818            if size == -1:
819                size = mod
820            precision = scale = None
821        return CursorDescription(name, type_code,
822            None, size, precision, scale, None)
823
824    @property
825    def description(self):
826        """Read-only attribute describing the result columns."""
827        descr = self._description
828        if self._description is True:
829            make = self._make_description
830            descr = [make(info) for info in self._src.listinfo()]
831            self._description = descr
832        return descr
833
834    @property
835    def colnames(self):
836        """Unofficial convenience method for getting the column names."""
837        return [d[0] for d in self.description]
838
839    @property
840    def coltypes(self):
841        """Unofficial convenience method for getting the column types."""
842        return [d[1] for d in self.description]
843
844    def close(self):
845        """Close the cursor object."""
846        self._src.close()
847        self._description = None
848        self.rowcount = -1
849        self.lastrowid = None
850
851    def execute(self, operation, parameters=None):
852        """Prepare and execute a database operation (query or command)."""
853        # The parameters may also be specified as list of tuples to e.g.
854        # insert multiple rows in a single operation, but this kind of
855        # usage is deprecated.  We make several plausibility checks because
856        # tuples can also be passed with the meaning of ROW constructors.
857        if (parameters and isinstance(parameters, list)
858                and len(parameters) > 1
859                and all(isinstance(p, tuple) for p in parameters)
860                and all(len(p) == len(parameters[0]) for p in parameters[1:])):
861            return self.executemany(operation, parameters)
862        else:
863            # not a list of tuples
864            return self.executemany(operation, [parameters])
865
866    def executemany(self, operation, seq_of_parameters):
867        """Prepare operation and execute it against a parameter sequence."""
868        if not seq_of_parameters:
869            # don't do anything without parameters
870            return
871        self._description = None
872        self.rowcount = -1
873        # first try to execute all queries
874        rowcount = 0
875        sql = "BEGIN"
876        try:
877            if not self._dbcnx._tnx:
878                try:
879                    self._cnx.source().execute(sql)
880                except DatabaseError:
881                    raise  # database provides error message
882                except Exception:
883                    raise _op_error("Can't start transaction")
884                self._dbcnx._tnx = True
885            for parameters in seq_of_parameters:
886                sql = operation
887                if parameters:
888                    sql = self._quoteparams(sql, parameters)
889                rows = self._src.execute(sql)
890                if rows:  # true if not DML
891                    rowcount += rows
892                else:
893                    self.rowcount = -1
894        except DatabaseError:
895            raise  # database provides error message
896        except Error as err:
897            raise _db_error(
898                "Error in '%s': '%s' " % (sql, err), InterfaceError)
899        except Exception as err:
900            raise _op_error("Internal error in '%s': %s" % (sql, err))
901        # then initialize result raw count and description
902        if self._src.resulttype == RESULT_DQL:
903            self._description = True  # fetch on demand
904            self.rowcount = self._src.ntuples
905            self.lastrowid = None
906            if self.build_row_factory:
907                self.row_factory = self.build_row_factory()
908        else:
909            self.rowcount = rowcount
910            self.lastrowid = self._src.oidstatus()
911        # return the cursor object, so you can write statements such as
912        # "cursor.execute(...).fetchall()" or "for row in cursor.execute(...)"
913        return self
914
915    def fetchone(self):
916        """Fetch the next row of a query result set."""
917        res = self.fetchmany(1, False)
918        try:
919            return res[0]
920        except IndexError:
921            return None
922
923    def fetchall(self):
924        """Fetch all (remaining) rows of a query result."""
925        return self.fetchmany(-1, False)
926
927    def fetchmany(self, size=None, keep=False):
928        """Fetch the next set of rows of a query result.
929
930        The number of rows to fetch per call is specified by the
931        size parameter. If it is not given, the cursor's arraysize
932        determines the number of rows to be fetched. If you set
933        the keep parameter to true, this is kept as new arraysize.
934        """
935        if size is None:
936            size = self.arraysize
937        if keep:
938            self.arraysize = size
939        try:
940            result = self._src.fetch(size)
941        except DatabaseError:
942            raise
943        except Error as err:
944            raise _db_error(str(err))
945        typecast = self.type_cache.typecast
946        return [self.row_factory([typecast(value, typ)
947            for typ, value in zip(self.coltypes, row)]) for row in result]
948
949    def callproc(self, procname, parameters=None):
950        """Call a stored database procedure with the given name.
951
952        The sequence of parameters must contain one entry for each input
953        argument that the procedure expects. The result of the call is the
954        same as this input sequence; replacement of output and input/output
955        parameters in the return value is currently not supported.
956
957        The procedure may also provide a result set as output. These can be
958        requested through the standard fetch methods of the cursor.
959        """
960        n = parameters and len(parameters) or 0
961        query = 'select * from "%s"(%s)' % (procname, ','.join(n * ['%s']))
962        self.execute(query, parameters)
963        return parameters
964
965    def copy_from(self, stream, table,
966            format=None, sep=None, null=None, size=None, columns=None):
967        """Copy data from an input stream to the specified table.
968
969        The input stream can be a file-like object with a read() method or
970        it can also be an iterable returning a row or multiple rows of input
971        on each iteration.
972
973        The format must be text, csv or binary. The sep option sets the
974        column separator (delimiter) used in the non binary formats.
975        The null option sets the textual representation of NULL in the input.
976
977        The size option sets the size of the buffer used when reading data
978        from file-like objects.
979
980        The copy operation can be restricted to a subset of columns. If no
981        columns are specified, all of them will be copied.
982        """
983        binary_format = format == 'binary'
984        try:
985            read = stream.read
986        except AttributeError:
987            if size:
988                raise ValueError("Size must only be set for file-like objects")
989            if binary_format:
990                input_type = bytes
991                type_name = 'byte strings'
992            else:
993                input_type = basestring
994                type_name = 'strings'
995
996            if isinstance(stream, basestring):
997                if not isinstance(stream, input_type):
998                    raise ValueError("The input must be %s" % type_name)
999                if not binary_format:
1000                    if isinstance(stream, str):
1001                        if not stream.endswith('\n'):
1002                            stream += '\n'
1003                    else:
1004                        if not stream.endswith(b'\n'):
1005                            stream += b'\n'
1006
1007                def chunks():
1008                    yield stream
1009
1010            elif isinstance(stream, Iterable):
1011
1012                def chunks():
1013                    for chunk in stream:
1014                        if not isinstance(chunk, input_type):
1015                            raise ValueError(
1016                                "Input stream must consist of %s" % type_name)
1017                        if isinstance(chunk, str):
1018                            if not chunk.endswith('\n'):
1019                                chunk += '\n'
1020                        else:
1021                            if not chunk.endswith(b'\n'):
1022                                chunk += b'\n'
1023                        yield chunk
1024
1025            else:
1026                raise TypeError("Need an input stream to copy from")
1027        else:
1028            if size is None:
1029                size = 8192
1030            elif not isinstance(size, int):
1031                raise TypeError("The size option must be an integer")
1032            if size > 0:
1033
1034                def chunks():
1035                    while True:
1036                        buffer = read(size)
1037                        yield buffer
1038                        if not buffer or len(buffer) < size:
1039                            break
1040
1041            else:
1042
1043                def chunks():
1044                    yield read()
1045
1046        if not table or not isinstance(table, basestring):
1047            raise TypeError("Need a table to copy to")
1048        if table.lower().startswith('select'):
1049                raise ValueError("Must specify a table, not a query")
1050        else:
1051            table = '"%s"' % (table,)
1052        operation = ['copy %s' % (table,)]
1053        options = []
1054        params = []
1055        if format is not None:
1056            if not isinstance(format, basestring):
1057                raise TypeError("The frmat option must be be a string")
1058            if format not in ('text', 'csv', 'binary'):
1059                raise ValueError("Invalid format")
1060            options.append('format %s' % (format,))
1061        if sep is not None:
1062            if not isinstance(sep, basestring):
1063                raise TypeError("The sep option must be a string")
1064            if format == 'binary':
1065                raise ValueError(
1066                    "The sep option is not allowed with binary format")
1067            if len(sep) != 1:
1068                raise ValueError(
1069                    "The sep option must be a single one-byte character")
1070            options.append('delimiter %s')
1071            params.append(sep)
1072        if null is not None:
1073            if not isinstance(null, basestring):
1074                raise TypeError("The null option must be a string")
1075            options.append('null %s')
1076            params.append(null)
1077        if columns:
1078            if not isinstance(columns, basestring):
1079                columns = ','.join('"%s"' % (col,) for col in columns)
1080            operation.append('(%s)' % (columns,))
1081        operation.append("from stdin")
1082        if options:
1083            operation.append('(%s)' % ','.join(options))
1084        operation = ' '.join(operation)
1085
1086        putdata = self._src.putdata
1087        self.execute(operation, params)
1088
1089        try:
1090            for chunk in chunks():
1091                putdata(chunk)
1092        except BaseException as error:
1093            self.rowcount = -1
1094            # the following call will re-raise the error
1095            putdata(error)
1096        else:
1097            self.rowcount = putdata(None)
1098
1099        # return the cursor object, so you can chain operations
1100        return self
1101
1102    def copy_to(self, stream, table,
1103            format=None, sep=None, null=None, decode=None, columns=None):
1104        """Copy data from the specified table to an output stream.
1105
1106        The output stream can be a file-like object with a write() method or
1107        it can also be None, in which case the method will return a generator
1108        yielding a row on each iteration.
1109
1110        Output will be returned as byte strings unless you set decode to true.
1111
1112        Note that you can also use a select query instead of the table name.
1113
1114        The format must be text, csv or binary. The sep option sets the
1115        column separator (delimiter) used in the non binary formats.
1116        The null option sets the textual representation of NULL in the output.
1117
1118        The copy operation can be restricted to a subset of columns. If no
1119        columns are specified, all of them will be copied.
1120        """
1121        binary_format = format == 'binary'
1122        if stream is not None:
1123            try:
1124                write = stream.write
1125            except AttributeError:
1126                raise TypeError("Need an output stream to copy to")
1127        if not table or not isinstance(table, basestring):
1128            raise TypeError("Need a table to copy to")
1129        if table.lower().startswith('select'):
1130            if columns:
1131                raise ValueError("Columns must be specified in the query")
1132            table = '(%s)' % (table,)
1133        else:
1134            table = '"%s"' % (table,)
1135        operation = ['copy %s' % (table,)]
1136        options = []
1137        params = []
1138        if format is not None:
1139            if not isinstance(format, basestring):
1140                raise TypeError("The format option must be a string")
1141            if format not in ('text', 'csv', 'binary'):
1142                raise ValueError("Invalid format")
1143            options.append('format %s' % (format,))
1144        if sep is not None:
1145            if not isinstance(sep, basestring):
1146                raise TypeError("The sep option must be a string")
1147            if binary_format:
1148                raise ValueError(
1149                    "The sep option is not allowed with binary format")
1150            if len(sep) != 1:
1151                raise ValueError(
1152                    "The sep option must be a single one-byte character")
1153            options.append('delimiter %s')
1154            params.append(sep)
1155        if null is not None:
1156            if not isinstance(null, basestring):
1157                raise TypeError("The null option must be a string")
1158            options.append('null %s')
1159            params.append(null)
1160        if decode is None:
1161            if format == 'binary':
1162                decode = False
1163            else:
1164                decode = str is unicode
1165        else:
1166            if not isinstance(decode, (int, bool)):
1167                raise TypeError("The decode option must be a boolean")
1168            if decode and binary_format:
1169                raise ValueError(
1170                    "The decode option is not allowed with binary format")
1171        if columns:
1172            if not isinstance(columns, basestring):
1173                columns = ','.join('"%s"' % (col,) for col in columns)
1174            operation.append('(%s)' % (columns,))
1175
1176        operation.append("to stdout")
1177        if options:
1178            operation.append('(%s)' % ','.join(options))
1179        operation = ' '.join(operation)
1180
1181        getdata = self._src.getdata
1182        self.execute(operation, params)
1183
1184        def copy():
1185            self.rowcount = 0
1186            while True:
1187                row = getdata(decode)
1188                if isinstance(row, int):
1189                    if self.rowcount != row:
1190                        self.rowcount = row
1191                    break
1192                self.rowcount += 1
1193                yield row
1194
1195        if stream is None:
1196            # no input stream, return the generator
1197            return copy()
1198
1199        # write the rows to the file-like input stream
1200        for row in copy():
1201            write(row)
1202
1203        # return the cursor object, so you can chain operations
1204        return self
1205
1206    def __next__(self):
1207        """Return the next row (support for the iteration protocol)."""
1208        res = self.fetchone()
1209        if res is None:
1210            raise StopIteration
1211        return res
1212
1213    # Note that since Python 2.6 the iterator protocol uses __next()__
1214    # instead of next(), we keep it only for backward compatibility of pgdb.
1215    next = __next__
1216
1217    @staticmethod
1218    def nextset():
1219        """Not supported."""
1220        raise NotSupportedError("The nextset() method is not supported")
1221
1222    @staticmethod
1223    def setinputsizes(sizes):
1224        """Not supported."""
1225        pass  # unsupported, but silently passed
1226
1227    @staticmethod
1228    def setoutputsize(size, column=0):
1229        """Not supported."""
1230        pass  # unsupported, but silently passed
1231
1232    @staticmethod
1233    def row_factory(row):
1234        """Process rows before they are returned.
1235
1236        You can overwrite this statically with a custom row factory, or
1237        you can build a row factory dynamically with build_row_factory().
1238
1239        For example, you can create a Cursor class that returns rows as
1240        Python dictionaries like this:
1241
1242            class DictCursor(pgdb.Cursor):
1243
1244                def row_factory(self, row):
1245                    return {desc[0]: value
1246                        for desc, value in zip(self.description, row)}
1247
1248            cur = DictCursor(con)  # get one DictCursor instance or
1249            con.cursor_type = DictCursor  # always use DictCursor instances
1250        """
1251        raise NotImplementedError
1252
1253    def build_row_factory(self):
1254        """Build a row factory based on the current description.
1255
1256        This implementation builds a row factory for creating named tuples.
1257        You can overwrite this method if you want to dynamically create
1258        different row factories whenever the column description changes.
1259        """
1260        colnames = self.colnames
1261        if colnames:
1262            try:
1263                try:
1264                    return namedtuple('Row', colnames, rename=True)._make
1265                except TypeError:  # Python 2.6 and 3.0 do not support rename
1266                    colnames = [v if v.isalnum() else 'column_%d' % n
1267                             for n, v in enumerate(colnames)]
1268                    return namedtuple('Row', colnames)._make
1269            except ValueError:  # there is still a problem with the field names
1270                colnames = ['column_%d' % n for n in range(len(colnames))]
1271                return namedtuple('Row', colnames)._make
1272
1273
1274CursorDescription = namedtuple('CursorDescription',
1275    ['name', 'type_code', 'display_size', 'internal_size',
1276     'precision', 'scale', 'null_ok'])
1277
1278
1279### Connection Objects
1280
1281class Connection(object):
1282    """Connection object."""
1283
1284    # expose the exceptions as attributes on the connection object
1285    Error = Error
1286    Warning = Warning
1287    InterfaceError = InterfaceError
1288    DatabaseError = DatabaseError
1289    InternalError = InternalError
1290    OperationalError = OperationalError
1291    ProgrammingError = ProgrammingError
1292    IntegrityError = IntegrityError
1293    DataError = DataError
1294    NotSupportedError = NotSupportedError
1295
1296    def __init__(self, cnx):
1297        """Create a database connection object."""
1298        self._cnx = cnx  # connection
1299        self._tnx = False  # transaction state
1300        self.type_cache = TypeCache(cnx)
1301        self.cursor_type = Cursor
1302        try:
1303            self._cnx.source()
1304        except Exception:
1305            raise _op_error("Invalid connection")
1306
1307    def __enter__(self):
1308        """Enter the runtime context for the connection object.
1309
1310        The runtime context can be used for running transactions.
1311        """
1312        return self
1313
1314    def __exit__(self, et, ev, tb):
1315        """Exit the runtime context for the connection object.
1316
1317        This does not close the connection, but it ends a transaction.
1318        """
1319        if et is None and ev is None and tb is None:
1320            self.commit()
1321        else:
1322            self.rollback()
1323
1324    def close(self):
1325        """Close the connection object."""
1326        if self._cnx:
1327            if self._tnx:
1328                try:
1329                    self.rollback()
1330                except DatabaseError:
1331                    pass
1332            self._cnx.close()
1333            self._cnx = None
1334        else:
1335            raise _op_error("Connection has been closed")
1336
1337    def commit(self):
1338        """Commit any pending transaction to the database."""
1339        if self._cnx:
1340            if self._tnx:
1341                self._tnx = False
1342                try:
1343                    self._cnx.source().execute("COMMIT")
1344                except DatabaseError:
1345                    raise
1346                except Exception:
1347                    raise _op_error("Can't commit")
1348        else:
1349            raise _op_error("Connection has been closed")
1350
1351    def rollback(self):
1352        """Roll back to the start of any pending transaction."""
1353        if self._cnx:
1354            if self._tnx:
1355                self._tnx = False
1356                try:
1357                    self._cnx.source().execute("ROLLBACK")
1358                except DatabaseError:
1359                    raise
1360                except Exception:
1361                    raise _op_error("Can't rollback")
1362        else:
1363            raise _op_error("Connection has been closed")
1364
1365    def cursor(self):
1366        """Return a new cursor object using the connection."""
1367        if self._cnx:
1368            try:
1369                return self.cursor_type(self)
1370            except Exception:
1371                raise _op_error("Invalid connection")
1372        else:
1373            raise _op_error("Connection has been closed")
1374
1375    if shortcutmethods:  # otherwise do not implement and document this
1376
1377        def execute(self, operation, params=None):
1378            """Shortcut method to run an operation on an implicit cursor."""
1379            cursor = self.cursor()
1380            cursor.execute(operation, params)
1381            return cursor
1382
1383        def executemany(self, operation, param_seq):
1384            """Shortcut method to run an operation against a sequence."""
1385            cursor = self.cursor()
1386            cursor.executemany(operation, param_seq)
1387            return cursor
1388
1389
1390### Module Interface
1391
1392_connect = connect
1393
1394def connect(dsn=None,
1395        user=None, password=None,
1396        host=None, database=None):
1397    """Connect to a database."""
1398    # first get params from DSN
1399    dbport = -1
1400    dbhost = ""
1401    dbbase = ""
1402    dbuser = ""
1403    dbpasswd = ""
1404    dbopt = ""
1405    try:
1406        params = dsn.split(":")
1407        dbhost = params[0]
1408        dbbase = params[1]
1409        dbuser = params[2]
1410        dbpasswd = params[3]
1411        dbopt = params[4]
1412    except (AttributeError, IndexError, TypeError):
1413        pass
1414
1415    # override if necessary
1416    if user is not None:
1417        dbuser = user
1418    if password is not None:
1419        dbpasswd = password
1420    if database is not None:
1421        dbbase = database
1422    if host is not None:
1423        try:
1424            params = host.split(":")
1425            dbhost = params[0]
1426            dbport = int(params[1])
1427        except (AttributeError, IndexError, TypeError, ValueError):
1428            pass
1429
1430    # empty host is localhost
1431    if dbhost == "":
1432        dbhost = None
1433    if dbuser == "":
1434        dbuser = None
1435
1436    # open the connection
1437    cnx = _connect(dbbase, dbhost, dbport, dbopt, dbuser, dbpasswd)
1438    return Connection(cnx)
1439
1440
1441### Types Handling
1442
1443class Type(frozenset):
1444    """Type class for a couple of PostgreSQL data types.
1445
1446    PostgreSQL is object-oriented: types are dynamic.
1447    We must thus use type names as internal type codes.
1448    """
1449
1450    def __new__(cls, values):
1451        if isinstance(values, basestring):
1452            values = values.split()
1453        return super(Type, cls).__new__(cls, values)
1454
1455    def __eq__(self, other):
1456        if isinstance(other, basestring):
1457            if other.startswith('_'):
1458                other = other[1:]
1459            return other in self
1460        else:
1461            return super(Type, self).__eq__(other)
1462
1463    def __ne__(self, other):
1464        if isinstance(other, basestring):
1465            if other.startswith('_'):
1466                other = other[1:]
1467            return other not in self
1468        else:
1469            return super(Type, self).__ne__(other)
1470
1471
1472class ArrayType:
1473    """Type class for PostgreSQL array types."""
1474
1475    def __eq__(self, other):
1476        if isinstance(other, basestring):
1477            return other.startswith('_')
1478        else:
1479            return isinstance(other, ArrayType)
1480
1481    def __ne__(self, other):
1482        if isinstance(other, basestring):
1483            return not other.startswith('_')
1484        else:
1485            return not isinstance(other, ArrayType)
1486
1487
1488class RecordType:
1489    """Type class for PostgreSQL record types."""
1490
1491    def __eq__(self, other):
1492        if isinstance(other, TypeCode):
1493            return other.type == 'c'
1494        elif isinstance(other, basestring):
1495            return other == 'record'
1496        else:
1497            return isinstance(other, RecordType)
1498
1499    def __ne__(self, other):
1500        if isinstance(other, TypeCode):
1501            return other.type != 'c'
1502        elif isinstance(other, basestring):
1503            return other != 'record'
1504        else:
1505            return not isinstance(other, RecordType)
1506
1507
1508# Mandatory type objects defined by DB-API 2 specs:
1509
1510STRING = Type('char bpchar name text varchar')
1511BINARY = Type('bytea')
1512NUMBER = Type('int2 int4 serial int8 float4 float8 numeric money')
1513DATETIME = Type('date time timetz timestamp timestamptz interval'
1514    ' abstime reltime')  # these are very old
1515ROWID = Type('oid')
1516
1517
1518# Additional type objects (more specific):
1519
1520BOOL = Type('bool')
1521SMALLINT = Type('int2')
1522INTEGER = Type('int2 int4 int8 serial')
1523LONG = Type('int8')
1524FLOAT = Type('float4 float8')
1525NUMERIC = Type('numeric')
1526MONEY = Type('money')
1527DATE = Type('date')
1528TIME = Type('time timetz')
1529TIMESTAMP = Type('timestamp timestamptz')
1530INTERVAL = Type('interval')
1531UUID = Type('uuid')
1532HSTORE = Type('hstore')
1533JSON = Type('json jsonb')
1534
1535# Type object for arrays (also equate to their base types):
1536
1537ARRAY = ArrayType()
1538
1539# Type object for records (encompassing all composite types):
1540
1541RECORD = RecordType()
1542
1543
1544# Mandatory type helpers defined by DB-API 2 specs:
1545
1546def Date(year, month, day):
1547    """Construct an object holding a date value."""
1548    return date(year, month, day)
1549
1550
1551def Time(hour, minute=0, second=0, microsecond=0, tzinfo=None):
1552    """Construct an object holding a time value."""
1553    return time(hour, minute, second, microsecond, tzinfo)
1554
1555
1556def Timestamp(year, month, day, hour=0, minute=0, second=0, microsecond=0,
1557        tzinfo=None):
1558    """Construct an object holding a time stamp value."""
1559    return datetime(year, month, day, hour, minute, second, microsecond, tzinfo)
1560
1561
1562def DateFromTicks(ticks):
1563    """Construct an object holding a date value from the given ticks value."""
1564    return Date(*localtime(ticks)[:3])
1565
1566
1567def TimeFromTicks(ticks):
1568    """Construct an object holding a time value from the given ticks value."""
1569    return Time(*localtime(ticks)[3:6])
1570
1571
1572def TimestampFromTicks(ticks):
1573    """Construct an object holding a time stamp from the given ticks value."""
1574    return Timestamp(*localtime(ticks)[:6])
1575
1576
1577class Binary(bytes):
1578    """Construct an object capable of holding a binary (long) string value."""
1579
1580
1581# Additional type helpers for PyGreSQL:
1582
1583def Interval(days, hours=0, minutes=0, seconds=0, microseconds=0):
1584    """Construct an object holding a time inverval value."""
1585    return timedelta(days, hours=hours, minutes=minutes, seconds=seconds,
1586        microseconds=microseconds)
1587
1588
1589Uuid = Uuid  # Construct an object holding a UUID value
1590
1591
1592class Hstore(dict):
1593    """Wrapper class for marking hstore values."""
1594
1595    _re_quote = regex('^[Nn][Uu][Ll][Ll]$|[ ,=>]')
1596    _re_escape = regex(r'(["\\])')
1597
1598    @classmethod
1599    def _quote(cls, s):
1600        if s is None:
1601            return 'NULL'
1602        if not s:
1603            return '""'
1604        quote = cls._re_quote.search(s)
1605        s = cls._re_escape.sub(r'\\\1', s)
1606        if quote:
1607            s = '"%s"' % s
1608        return s
1609
1610    def __str__(self):
1611        q = self._quote
1612        return ','.join('%s=>%s' % (q(k), q(v)) for k, v in self.items())
1613
1614
1615class Json:
1616    """Construct a wrapper for holding an object serializable to JSON."""
1617
1618    def __init__(self, obj, encode=None):
1619        self.obj = obj
1620        self.encode = encode or jsonencode
1621
1622    def __str__(self):
1623        obj = self.obj
1624        if isinstance(obj, basestring):
1625            return obj
1626        return self.encode(obj)
1627
1628
1629class Literal:
1630    """Construct a wrapper for holding a literal SQL string."""
1631
1632    def __init__(self, sql):
1633        self.sql = sql
1634
1635    def __str__(self):
1636        return self.sql
1637
1638    __pg_repr__ = __str__
1639
1640# If run as script, print some information:
1641
1642if __name__ == '__main__':
1643    print('PyGreSQL version', version)
1644    print('')
1645    print(__doc__)
Note: See TracBrowser for help on using the repository browser.