source: trunk/pgdb.py @ 842

Last change on this file since 842 was 842, checked in by cito, 3 years ago

Treat percent signs in SQL strings always the same

  • Property svn:keywords set to Id
File size: 53.8 KB
Line 
1#! /usr/bin/python
2#
3# pgdb.py
4#
5# Written by D'Arcy J.M. Cain
6#
7# $Id: pgdb.py 842 2016-02-08 21:02:10Z cito $
8#
9
10"""pgdb - DB-API 2.0 compliant module for PygreSQL.
11
12(c) 1999, Pascal Andre <andre@via.ecp.fr>.
13See package documentation for further information on copyright.
14
15Inline documentation is sparse.
16See DB-API 2.0 specification for usage information:
17http://www.python.org/peps/pep-0249.html
18
19Basic usage:
20
21    pgdb.connect(connect_string) # open a connection
22    # connect_string = 'host:database:user:password:opt'
23    # All parts are optional. You may also pass host through
24    # password as keyword arguments. To pass a port,
25    # pass it in the host keyword parameter:
26    connection = pgdb.connect(host='localhost:5432')
27
28    cursor = connection.cursor() # open a cursor
29
30    cursor.execute(query[, params])
31    # Execute a query, binding params (a dictionary) if they are
32    # passed. The binding syntax is the same as the % operator
33    # for dictionaries, and no quoting is done.
34
35    cursor.executemany(query, list of params)
36    # Execute a query many times, binding each param dictionary
37    # from the list.
38
39    cursor.fetchone() # fetch one row, [value, value, ...]
40
41    cursor.fetchall() # fetch all rows, [[value, value, ...], ...]
42
43    cursor.fetchmany([size])
44    # returns size or cursor.arraysize number of rows,
45    # [[value, value, ...], ...] from result set.
46    # Default cursor.arraysize is 1.
47
48    cursor.description # returns information about the columns
49    #   [(column_name, type_name, display_size,
50    #           internal_size, precision, scale, null_ok), ...]
51    # Note that display_size, precision, scale and null_ok
52    # are not implemented.
53
54    cursor.rowcount # number of rows available in the result set
55    # Available after a call to execute.
56
57    connection.commit() # commit transaction
58
59    connection.rollback() # or rollback transaction
60
61    cursor.close() # close the cursor
62
63    connection.close() # close the connection
64"""
65
66from __future__ import print_function
67
68from _pg import *
69
70__version__ = version
71
72from datetime import date, time, datetime, timedelta
73from time import localtime
74from decimal import Decimal
75from uuid import UUID as Uuid
76from math import isnan, isinf
77from collections import namedtuple
78from functools import partial
79from re import compile as regex
80from json import loads as jsondecode, dumps as jsonencode
81
82try:
83    long
84except NameError:  # Python >= 3.0
85    long = int
86
87try:
88    unicode
89except NameError:  # Python >= 3.0
90    unicode = str
91
92try:
93    basestring
94except NameError:  # Python >= 3.0
95    basestring = (str, bytes)
96
97from collections import Iterable
98
99
100### Module Constants
101
102# compliant with DB API 2.0
103apilevel = '2.0'
104
105# module may be shared, but not connections
106threadsafety = 1
107
108# this module use extended python format codes
109paramstyle = 'pyformat'
110
111# shortcut methods have been excluded from DB API 2 and
112# are not recommended by the DB SIG, but they can be handy
113shortcutmethods = 1
114
115
116### Internal Type Handling
117
118try:
119    from inspect import signature
120except ImportError:  # Python < 3.3
121    from inspect import getargspec
122
123    def get_args(func):
124        return getargspec(func).args
125else:
126
127    def get_args(func):
128        return list(signature(func).parameters)
129
130try:
131    if datetime.strptime('+0100', '%z') is None:
132        raise ValueError
133except ValueError:  # Python < 3.2
134    timezones = None
135else:
136    # time zones used in Postgres timestamptz output
137    timezones = dict(CET='+0100', EET='+0200', EST='-0500',
138        GMT='+0000', HST='-1000', MET='+0100', MST='-0700',
139        UCT='+0000', UTC='+0000', WET='+0000')
140
141
142def decimal_type(decimal_type=None):
143    """Get or set global type to be used for decimal values.
144
145    Note that connections cache cast functions. To be sure a global change
146    is picked up by a running connection, call con.type_cache.reset_typecast().
147    """
148    global Decimal
149    if decimal_type is not None:
150        Decimal = decimal_type
151        set_typecast('numeric', decimal_type)
152    return Decimal
153
154
155def cast_bool(value):
156    """Cast boolean value in database format to bool."""
157    if value:
158        return value[0] in ('t', 'T')
159
160
161def cast_money(value):
162    """Cast money value in database format to Decimal."""
163    if value:
164        value = value.replace('(', '-')
165        return Decimal(''.join(c for c in value if c.isdigit() or c in '.-'))
166
167
168def cast_int2vector(value):
169    """Cast an int2vector value."""
170    return [int(v) for v in value.split()]
171
172
173def cast_date(value, connection):
174    """Cast a date value."""
175    # The output format depends on the server setting DateStyle.  The default
176    # setting ISO and the setting for German are actually unambiguous.  The
177    # order of days and months in the other two settings is however ambiguous,
178    # so at least here we need to consult the setting to properly parse values.
179    if value == '-infinity':
180        return date.min
181    if value == 'infinity':
182        return date.max
183    value = value.split()
184    if value[-1] == 'BC':
185        return date.min
186    value = value[0]
187    if len(value) > 10:
188        return date.max
189    fmt = connection.date_format()
190    return datetime.strptime(value, fmt).date()
191
192
193def cast_time(value):
194    """Cast a time value."""
195    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
196    return datetime.strptime(value, fmt).time()
197
198
199_re_timezone = regex('(.*)([+-].*)')
200
201
202def cast_timetz(value):
203    """Cast a timetz value."""
204    tz = _re_timezone.match(value)
205    if tz:
206        value, tz = tz.groups()
207    else:
208        tz = '+0000'
209    fmt = '%H:%M:%S.%f' if len(value) > 8 else '%H:%M:%S'
210    if timezones:
211        if tz.startswith(('+', '-')):
212            if len(tz) < 5:
213                tz += '00'
214            else:
215                tz = tz.replace(':', '')
216        elif tz in timezones:
217            tz = timezones[tz]
218        else:
219            tz = '+0000'
220        value += tz
221        fmt += '%z'
222    return datetime.strptime(value, fmt).timetz()
223
224
225def cast_timestamp(value, connection):
226    """Cast a timestamp value."""
227    if value == '-infinity':
228        return datetime.min
229    if value == 'infinity':
230        return datetime.max
231    value = value.split()
232    if value[-1] == 'BC':
233        return datetime.min
234    fmt = connection.date_format()
235    if fmt.endswith('-%Y') and len(value) > 2:
236        value = value[1:5]
237        if len(value[3]) > 4:
238            return datetime.max
239        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
240            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
241    else:
242        if len(value[0]) > 10:
243            return datetime.max
244        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
245    return datetime.strptime(' '.join(value), ' '.join(fmt))
246
247
248def cast_timestamptz(value, connection):
249    """Cast a timestamptz value."""
250    if value == '-infinity':
251        return datetime.min
252    if value == 'infinity':
253        return datetime.max
254    value = value.split()
255    if value[-1] == 'BC':
256        return datetime.min
257    fmt = connection.date_format()
258    if fmt.endswith('-%Y') and len(value) > 2:
259        value = value[1:]
260        if len(value[3]) > 4:
261            return datetime.max
262        fmt = ['%d %b' if fmt.startswith('%d') else '%b %d',
263            '%H:%M:%S.%f' if len(value[2]) > 8 else '%H:%M:%S', '%Y']
264        value, tz = value[:-1], value[-1]
265    else:
266        if fmt.startswith('%Y-'):
267            tz = _re_timezone.match(value[1])
268            if tz:
269                value[1], tz = tz.groups()
270            else:
271                tz = '+0000'
272        else:
273            value, tz = value[:-1], value[-1]
274        if len(value[0]) > 10:
275            return datetime.max
276        fmt = [fmt, '%H:%M:%S.%f' if len(value[1]) > 8 else '%H:%M:%S']
277    if timezones:
278        if tz.startswith(('+', '-')):
279            if len(tz) < 5:
280                tz += '00'
281            else:
282                tz = tz.replace(':', '')
283        elif tz in timezones:
284            tz = timezones[tz]
285        else:
286            tz = '+0000'
287        value.append(tz)
288        fmt.append('%z')
289    return datetime.strptime(' '.join(value), ' '.join(fmt))
290
291_re_interval_sql_standard = regex(
292    '(?:([+-])?([0-9]+)-([0-9]+) ?)?'
293    '(?:([+-]?[0-9]+)(?!:) ?)?'
294    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
295
296_re_interval_postgres = regex(
297    '(?:([+-]?[0-9]+) ?years? ?)?'
298    '(?:([+-]?[0-9]+) ?mons? ?)?'
299    '(?:([+-]?[0-9]+) ?days? ?)?'
300    '(?:([+-])?([0-9]+):([0-9]+):([0-9]+)(?:\\.([0-9]+))?)?')
301
302_re_interval_postgres_verbose = regex(
303    '@ ?(?:([+-]?[0-9]+) ?years? ?)?'
304    '(?:([+-]?[0-9]+) ?mons? ?)?'
305    '(?:([+-]?[0-9]+) ?days? ?)?'
306    '(?:([+-]?[0-9]+) ?hours? ?)?'
307    '(?:([+-]?[0-9]+) ?mins? ?)?'
308    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))? ?secs?)? ?(ago)?')
309
310_re_interval_iso_8601 = regex(
311    'P(?:([+-]?[0-9]+)Y)?'
312    '(?:([+-]?[0-9]+)M)?'
313    '(?:([+-]?[0-9]+)D)?'
314    '(?:T(?:([+-]?[0-9]+)H)?'
315    '(?:([+-]?[0-9]+)M)?'
316    '(?:([+-])?([0-9]+)(?:\\.([0-9]+))?S)?)?')
317
318
319def cast_interval(value):
320    """Cast an interval value."""
321    # The output format depends on the server setting IntervalStyle, but it's
322    # not necessary to consult this setting to parse it.  It's faster to just
323    # check all possible formats, and there is no ambiguity here.
324    m = _re_interval_iso_8601.match(value)
325    if m:
326        m = [d or '0' for d in m.groups()]
327        secs_ago = m.pop(5) == '-'
328        m = [int(d) for d in m]
329        years, mons, days, hours, mins, secs, usecs = m
330        if secs_ago:
331            secs = -secs
332            usecs = -usecs
333    else:
334        m = _re_interval_postgres_verbose.match(value)
335        if m:
336            m, ago = [d or '0' for d in m.groups()[:8]], m.group(9)
337            secs_ago = m.pop(5) == '-'
338            m = [-int(d) for d in m] if ago else [int(d) for d in m]
339            years, mons, days, hours, mins, secs, usecs = m
340            if secs_ago:
341                secs = - secs
342                usecs = -usecs
343        else:
344            m = _re_interval_postgres.match(value)
345            if m and any(m.groups()):
346                m = [d or '0' for d in m.groups()]
347                hours_ago = m.pop(3) == '-'
348                m = [int(d) for d in m]
349                years, mons, days, hours, mins, secs, usecs = m
350                if hours_ago:
351                    hours = -hours
352                    mins = -mins
353                    secs = -secs
354                    usecs = -usecs
355            else:
356                m = _re_interval_sql_standard.match(value)
357                if m and any(m.groups()):
358                    m = [d or '0' for d in m.groups()]
359                    years_ago = m.pop(0) == '-'
360                    hours_ago = m.pop(3) == '-'
361                    m = [int(d) for d in m]
362                    years, mons, days, hours, mins, secs, usecs = m
363                    if years_ago:
364                        years = -years
365                        mons = -mons
366                    if hours_ago:
367                        hours = -hours
368                        mins = -mins
369                        secs = -secs
370                        usecs = -usecs
371                else:
372                    raise ValueError('Cannot parse interval: %s' % value)
373    days += 365 * years + 30 * mons
374    return timedelta(days=days, hours=hours, minutes=mins,
375        seconds=secs, microseconds=usecs)
376
377
378class Typecasts(dict):
379    """Dictionary mapping database types to typecast functions.
380
381    The cast functions get passed the string representation of a value in
382    the database which they need to convert to a Python object.  The
383    passed string will never be None since NULL values are already be
384    handled before the cast function is called.
385    """
386
387    # the default cast functions
388    # (str functions are ignored but have been added for faster access)
389    defaults = {'char': str, 'bpchar': str, 'name': str,
390        'text': str, 'varchar': str,
391        'bool': cast_bool, 'bytea': unescape_bytea,
392        'int2': int, 'int4': int, 'serial': int, 'int8': long, 'oid': int,
393        'hstore': cast_hstore, 'json': jsondecode, 'jsonb': jsondecode,
394        'float4': float, 'float8': float,
395        'numeric': Decimal, 'money': cast_money,
396        'date': cast_date, 'interval': cast_interval,
397        'time': cast_time, 'timetz': cast_timetz,
398        'timestamp': cast_timestamp, 'timestamptz': cast_timestamptz,
399        'int2vector': cast_int2vector, 'uuid': Uuid,
400        'anyarray': cast_array, 'record': cast_record}
401
402    connection = None  # will be set in local connection specific instances
403
404    def __missing__(self, typ):
405        """Create a cast function if it is not cached.
406
407        Note that this class never raises a KeyError,
408        but returns None when no special cast function exists.
409        """
410        if not isinstance(typ, str):
411            raise TypeError('Invalid type: %s' % typ)
412        cast = self.defaults.get(typ)
413        if cast:
414            # store default for faster access
415            cast = self._add_connection(cast)
416            self[typ] = cast
417        elif typ.startswith('_'):
418            # create array cast
419            base_cast = self[typ[1:]]
420            cast = self.create_array_cast(base_cast)
421            if base_cast:
422                # store only if base type exists
423                self[typ] = cast
424        return cast
425
426    @staticmethod
427    def _needs_connection(func):
428        """Check if a typecast function needs a connection argument."""
429        try:
430            args = get_args(func)
431        except (TypeError, ValueError):
432            return False
433        else:
434            return 'connection' in args[1:]
435
436    def _add_connection(self, cast):
437        """Add a connection argument to the typecast function if necessary."""
438        if not self.connection or not self._needs_connection(cast):
439            return cast
440        return partial(cast, connection=self.connection)
441
442    def get(self, typ, default=None):
443        """Get the typecast function for the given database type."""
444        return self[typ] or default
445
446    def set(self, typ, cast):
447        """Set a typecast function for the specified database type(s)."""
448        if isinstance(typ, basestring):
449            typ = [typ]
450        if cast is None:
451            for t in typ:
452                self.pop(t, None)
453                self.pop('_%s' % t, None)
454        else:
455            if not callable(cast):
456                raise TypeError("Cast parameter must be callable")
457            for t in typ:
458                self[t] = self._add_connection(cast)
459                self.pop('_%s' % t, None)
460
461    def reset(self, typ=None):
462        """Reset the typecasts for the specified type(s) to their defaults.
463
464        When no type is specified, all typecasts will be reset.
465        """
466        defaults = self.defaults
467        if typ is None:
468            self.clear()
469            self.update(defaults)
470        else:
471            if isinstance(typ, basestring):
472                typ = [typ]
473            for t in typ:
474                cast = defaults.get(t)
475                if cast:
476                    self[t] = self._add_connection(cast)
477                    t = '_%s' % t
478                    cast = defaults.get(t)
479                    if cast:
480                        self[t] = self._add_connection(cast)
481                    else:
482                        self.pop(t, None)
483                else:
484                    self.pop(t, None)
485                    self.pop('_%s' % t, None)
486
487    def create_array_cast(self, basecast):
488        """Create an array typecast for the given base cast."""
489        def cast(v):
490            return cast_array(v, basecast)
491        return cast
492
493    def create_record_cast(self, name, fields, casts):
494        """Create a named record typecast for the given fields and casts."""
495        record = namedtuple(name, fields)
496        def cast(v):
497            return record(*cast_record(v, casts))
498        return cast
499
500
501_typecasts = Typecasts()  # this is the global typecast dictionary
502
503
504def get_typecast(typ):
505    """Get the global typecast function for the given database type(s)."""
506    return _typecasts.get(typ)
507
508
509def set_typecast(typ, cast):
510    """Set a global typecast function for the given database type(s).
511
512    Note that connections cache cast functions. To be sure a global change
513    is picked up by a running connection, call con.type_cache.reset_typecast().
514    """
515    _typecasts.set(typ, cast)
516
517
518def reset_typecast(typ=None):
519    """Reset the global typecasts for the given type(s) to their default.
520
521    When no type is specified, all typecasts will be reset.
522
523    Note that connections cache cast functions. To be sure a global change
524    is picked up by a running connection, call con.type_cache.reset_typecast().
525    """
526    _typecasts.reset(typ)
527
528
529class LocalTypecasts(Typecasts):
530    """Map typecasts, including local composite types, to cast functions."""
531
532    defaults = _typecasts
533
534    connection = None  # will be set in a connection specific instance
535
536    def __missing__(self, typ):
537        """Create a cast function if it is not cached."""
538        if typ.startswith('_'):
539            base_cast = self[typ[1:]]
540            cast = self.create_array_cast(base_cast)
541            if base_cast:
542                self[typ] = cast
543        else:
544            cast = self.defaults.get(typ)
545            if cast:
546                cast = self._add_connection(cast)
547                self[typ] = cast
548            else:
549                fields = self.get_fields(typ)
550                if fields:
551                    casts = [self[field.type] for field in fields]
552                    fields = [field.name for field in fields]
553                    cast = self.create_record_cast(typ, fields, casts)
554                    self[typ] = cast
555        return cast
556
557    def get_fields(self, typ):
558        """Return the fields for the given record type.
559
560        This method will be replaced with a method that looks up the fields
561        using the type cache of the connection.
562        """
563        return []
564
565
566class TypeCode(str):
567    """Class representing the type_code used by the DB-API 2.0.
568
569    TypeCode objects are strings equal to the PostgreSQL type name,
570    but carry some additional information.
571    """
572
573    @classmethod
574    def create(cls, oid, name, len, type, category, delim, relid):
575        """Create a type code for a PostgreSQL data type."""
576        self = cls(name)
577        self.oid = oid
578        self.len = len
579        self.type = type
580        self.category = category
581        self.delim = delim
582        self.relid = relid
583        return self
584
585FieldInfo = namedtuple('FieldInfo', ['name', 'type'])
586
587
588class TypeCache(dict):
589    """Cache for database types.
590
591    This cache maps type OIDs and names to TypeCode strings containing
592    important information on the associated database type.
593    """
594
595    def __init__(self, cnx):
596        """Initialize type cache for connection."""
597        super(TypeCache, self).__init__()
598        self._escape_string = cnx.escape_string
599        self._src = cnx.source()
600        self._typecasts = LocalTypecasts()
601        self._typecasts.get_fields = self.get_fields
602        self._typecasts.connection = cnx
603
604    def __missing__(self, key):
605        """Get the type info from the database if it is not cached."""
606        if isinstance(key, int):
607            oid = key
608        else:
609            if '.' not in key and '"' not in key:
610                key = '"%s"' % key
611            oid = "'%s'::regtype" % self._escape_string(key)
612        try:
613            self._src.execute("SELECT oid, typname,"
614                 " typlen, typtype, typcategory, typdelim, typrelid"
615                " FROM pg_type WHERE oid=%s" % oid)
616        except ProgrammingError:
617            res = None
618        else:
619            res = self._src.fetch(1)
620        if not res:
621            raise KeyError('Type %s could not be found' % key)
622        res = res[0]
623        type_code = TypeCode.create(int(res[0]), res[1],
624            int(res[2]), res[3], res[4], res[5], int(res[6]))
625        self[type_code.oid] = self[str(type_code)] = type_code
626        return type_code
627
628    def get(self, key, default=None):
629        """Get the type even if it is not cached."""
630        try:
631            return self[key]
632        except KeyError:
633            return default
634
635    def get_fields(self, typ):
636        """Get the names and types of the fields of composite types."""
637        if not isinstance(typ, TypeCode):
638            typ = self.get(typ)
639            if not typ:
640                return None
641        if not typ.relid:
642            return None  # this type is not composite
643        self._src.execute("SELECT attname, atttypid"
644            " FROM pg_attribute WHERE attrelid=%s AND attnum>0"
645            " AND NOT attisdropped ORDER BY attnum" % typ.relid)
646        return [FieldInfo(name, self.get(int(oid)))
647            for name, oid in self._src.fetch(-1)]
648
649    def get_typecast(self, typ):
650        """Get the typecast function for the given database type."""
651        return self._typecasts.get(typ)
652
653    def set_typecast(self, typ, cast):
654        """Set a typecast function for the specified database type(s)."""
655        self._typecasts.set(typ, cast)
656
657    def reset_typecast(self, typ=None):
658        """Reset the typecast function for the specified database type(s)."""
659        self._typecasts.reset(typ)
660
661    def typecast(self, value, typ):
662        """Cast the given value according to the given database type."""
663        if value is None:
664            # for NULL values, no typecast is necessary
665            return None
666        cast = self.get_typecast(typ)
667        if not cast or cast is str:
668            # no typecast is necessary
669            return value
670        return cast(value)
671
672
673class _quotedict(dict):
674    """Dictionary with auto quoting of its items.
675
676    The quote attribute must be set to the desired quote function.
677    """
678
679    def __getitem__(self, key):
680        return self.quote(super(_quotedict, self).__getitem__(key))
681
682
683### Error messages
684
685def _db_error(msg, cls=DatabaseError):
686    """Return DatabaseError with empty sqlstate attribute."""
687    error = cls(msg)
688    error.sqlstate = None
689    return error
690
691
692def _op_error(msg):
693    """Return OperationalError."""
694    return _db_error(msg, OperationalError)
695
696
697### Cursor Object
698
699class Cursor(object):
700    """Cursor object."""
701
702    def __init__(self, dbcnx):
703        """Create a cursor object for the database connection."""
704        self.connection = self._dbcnx = dbcnx
705        self._cnx = dbcnx._cnx
706        self.type_cache = dbcnx.type_cache
707        self._src = self._cnx.source()
708        # the official attribute for describing the result columns
709        self._description = None
710        if self.row_factory is Cursor.row_factory:
711            # the row factory needs to be determined dynamically
712            self.row_factory = None
713        else:
714            self.build_row_factory = None
715        self.rowcount = -1
716        self.arraysize = 1
717        self.lastrowid = None
718
719    def __iter__(self):
720        """Make cursor compatible to the iteration protocol."""
721        return self
722
723    def __enter__(self):
724        """Enter the runtime context for the cursor object."""
725        return self
726
727    def __exit__(self, et, ev, tb):
728        """Exit the runtime context for the cursor object."""
729        self.close()
730
731    def _quote(self, value):
732        """Quote value depending on its type."""
733        if value is None:
734            return 'NULL'
735        if isinstance(value, (Hstore, Json)):
736            value = str(value)
737        if isinstance(value, basestring):
738            if isinstance(value, Binary):
739                value = self._cnx.escape_bytea(value)
740                if bytes is not str:  # Python >= 3.0
741                    value = value.decode('ascii')
742            else:
743                value = self._cnx.escape_string(value)
744            return "'%s'" % value
745        if isinstance(value, float):
746            if isinf(value):
747                return "'-Infinity'" if value < 0 else "'Infinity'"
748            if isnan(value):
749                return "'NaN'"
750            return value
751        if isinstance(value, (int, long, Decimal, Literal)):
752            return value
753        if isinstance(value, datetime):
754            if value.tzinfo:
755                return "'%s'::timestamptz" % value
756            return "'%s'::timestamp" % value
757        if isinstance(value, date):
758            return "'%s'::date" % value
759        if isinstance(value, time):
760            if value.tzinfo:
761                return "'%s'::timetz" % value
762            return "'%s'::time" % value
763        if isinstance(value, timedelta):
764            return "'%s'::interval" % value
765        if isinstance(value, Uuid):
766            return "'%s'::uuid" % value
767        if isinstance(value, list):
768            # Quote value as an ARRAY constructor. This is better than using
769            # an array literal because it carries the information that this is
770            # an array and not a string.  One issue with this syntax is that
771            # you need to add an explicit typecast when passing empty arrays.
772            # The ARRAY keyword is actually only necessary at the top level.
773            if not value:  # exception for empty array
774                return "'{}'"
775            q = self._quote
776            return 'ARRAY[%s]' % ','.join(str(q(v)) for v in value)
777        if isinstance(value, tuple):
778            # Quote as a ROW constructor.  This is better than using a record
779            # literal because it carries the information that this is a record
780            # and not a string.  We don't use the keyword ROW in order to make
781            # this usable with the IN syntax as well.  It is only necessary
782            # when the records has a single column which is not really useful.
783            q = self._quote
784            return '(%s)' % ','.join(str(q(v)) for v in value)
785        try:
786            value = value.__pg_repr__()
787        except AttributeError:
788            raise InterfaceError(
789                'Do not know how to adapt type %s' % type(value))
790        if isinstance(value, (tuple, list)):
791            value = self._quote(value)
792        return value
793
794    def _quoteparams(self, string, parameters):
795        """Quote parameters.
796
797        This function works for both mappings and sequences.
798
799        The function should be used even when there are no parameters,
800        so that we have a consistent behavior regarding percent signs.
801        """
802        if parameters:
803            if isinstance(parameters, dict):
804                parameters = _quotedict(parameters)
805                parameters.quote = self._quote
806            else:
807                parameters = tuple(map(self._quote, parameters))
808        else:
809            parameters = {}
810        return string % parameters
811
812    def _make_description(self, info):
813        """Make the description tuple for the given field info."""
814        name, typ, size, mod = info[1:]
815        type_code = self.type_cache[typ]
816        if mod > 0:
817            mod -= 4
818        if type_code == 'numeric':
819            precision, scale = mod >> 16, mod & 0xffff
820            size = precision
821        else:
822            if not size:
823                size = type_code.size
824            if size == -1:
825                size = mod
826            precision = scale = None
827        return CursorDescription(name, type_code,
828            None, size, precision, scale, None)
829
830    @property
831    def description(self):
832        """Read-only attribute describing the result columns."""
833        descr = self._description
834        if self._description is True:
835            make = self._make_description
836            descr = [make(info) for info in self._src.listinfo()]
837            self._description = descr
838        return descr
839
840    @property
841    def colnames(self):
842        """Unofficial convenience method for getting the column names."""
843        return [d[0] for d in self.description]
844
845    @property
846    def coltypes(self):
847        """Unofficial convenience method for getting the column types."""
848        return [d[1] for d in self.description]
849
850    def close(self):
851        """Close the cursor object."""
852        self._src.close()
853        self._description = None
854        self.rowcount = -1
855        self.lastrowid = None
856
857    def execute(self, operation, parameters=None):
858        """Prepare and execute a database operation (query or command)."""
859        # The parameters may also be specified as list of tuples to e.g.
860        # insert multiple rows in a single operation, but this kind of
861        # usage is deprecated.  We make several plausibility checks because
862        # tuples can also be passed with the meaning of ROW constructors.
863        if (parameters and isinstance(parameters, list)
864                and len(parameters) > 1
865                and all(isinstance(p, tuple) for p in parameters)
866                and all(len(p) == len(parameters[0]) for p in parameters[1:])):
867            return self.executemany(operation, parameters)
868        else:
869            # not a list of tuples
870            return self.executemany(operation, [parameters])
871
872    def executemany(self, operation, seq_of_parameters):
873        """Prepare operation and execute it against a parameter sequence."""
874        if not seq_of_parameters:
875            # don't do anything without parameters
876            return
877        self._description = None
878        self.rowcount = -1
879        # first try to execute all queries
880        rowcount = 0
881        sql = "BEGIN"
882        try:
883            if not self._dbcnx._tnx:
884                try:
885                    self._cnx.source().execute(sql)
886                except DatabaseError:
887                    raise  # database provides error message
888                except Exception:
889                    raise _op_error("Can't start transaction")
890                self._dbcnx._tnx = True
891            for parameters in seq_of_parameters:
892                sql = operation
893                sql = self._quoteparams(sql, parameters)
894                rows = self._src.execute(sql)
895                if rows:  # true if not DML
896                    rowcount += rows
897                else:
898                    self.rowcount = -1
899        except DatabaseError:
900            raise  # database provides error message
901        except Error as err:
902            raise _db_error(
903                "Error in '%s': '%s' " % (sql, err), InterfaceError)
904        except Exception as err:
905            raise _op_error("Internal error in '%s': %s" % (sql, err))
906        # then initialize result raw count and description
907        if self._src.resulttype == RESULT_DQL:
908            self._description = True  # fetch on demand
909            self.rowcount = self._src.ntuples
910            self.lastrowid = None
911            if self.build_row_factory:
912                self.row_factory = self.build_row_factory()
913        else:
914            self.rowcount = rowcount
915            self.lastrowid = self._src.oidstatus()
916        # return the cursor object, so you can write statements such as
917        # "cursor.execute(...).fetchall()" or "for row in cursor.execute(...)"
918        return self
919
920    def fetchone(self):
921        """Fetch the next row of a query result set."""
922        res = self.fetchmany(1, False)
923        try:
924            return res[0]
925        except IndexError:
926            return None
927
928    def fetchall(self):
929        """Fetch all (remaining) rows of a query result."""
930        return self.fetchmany(-1, False)
931
932    def fetchmany(self, size=None, keep=False):
933        """Fetch the next set of rows of a query result.
934
935        The number of rows to fetch per call is specified by the
936        size parameter. If it is not given, the cursor's arraysize
937        determines the number of rows to be fetched. If you set
938        the keep parameter to true, this is kept as new arraysize.
939        """
940        if size is None:
941            size = self.arraysize
942        if keep:
943            self.arraysize = size
944        try:
945            result = self._src.fetch(size)
946        except DatabaseError:
947            raise
948        except Error as err:
949            raise _db_error(str(err))
950        typecast = self.type_cache.typecast
951        return [self.row_factory([typecast(value, typ)
952            for typ, value in zip(self.coltypes, row)]) for row in result]
953
954    def callproc(self, procname, parameters=None):
955        """Call a stored database procedure with the given name.
956
957        The sequence of parameters must contain one entry for each input
958        argument that the procedure expects. The result of the call is the
959        same as this input sequence; replacement of output and input/output
960        parameters in the return value is currently not supported.
961
962        The procedure may also provide a result set as output. These can be
963        requested through the standard fetch methods of the cursor.
964        """
965        n = parameters and len(parameters) or 0
966        query = 'select * from "%s"(%s)' % (procname, ','.join(n * ['%s']))
967        self.execute(query, parameters)
968        return parameters
969
970    def copy_from(self, stream, table,
971            format=None, sep=None, null=None, size=None, columns=None):
972        """Copy data from an input stream to the specified table.
973
974        The input stream can be a file-like object with a read() method or
975        it can also be an iterable returning a row or multiple rows of input
976        on each iteration.
977
978        The format must be text, csv or binary. The sep option sets the
979        column separator (delimiter) used in the non binary formats.
980        The null option sets the textual representation of NULL in the input.
981
982        The size option sets the size of the buffer used when reading data
983        from file-like objects.
984
985        The copy operation can be restricted to a subset of columns. If no
986        columns are specified, all of them will be copied.
987        """
988        binary_format = format == 'binary'
989        try:
990            read = stream.read
991        except AttributeError:
992            if size:
993                raise ValueError("Size must only be set for file-like objects")
994            if binary_format:
995                input_type = bytes
996                type_name = 'byte strings'
997            else:
998                input_type = basestring
999                type_name = 'strings'
1000
1001            if isinstance(stream, basestring):
1002                if not isinstance(stream, input_type):
1003                    raise ValueError("The input must be %s" % type_name)
1004                if not binary_format:
1005                    if isinstance(stream, str):
1006                        if not stream.endswith('\n'):
1007                            stream += '\n'
1008                    else:
1009                        if not stream.endswith(b'\n'):
1010                            stream += b'\n'
1011
1012                def chunks():
1013                    yield stream
1014
1015            elif isinstance(stream, Iterable):
1016
1017                def chunks():
1018                    for chunk in stream:
1019                        if not isinstance(chunk, input_type):
1020                            raise ValueError(
1021                                "Input stream must consist of %s" % type_name)
1022                        if isinstance(chunk, str):
1023                            if not chunk.endswith('\n'):
1024                                chunk += '\n'
1025                        else:
1026                            if not chunk.endswith(b'\n'):
1027                                chunk += b'\n'
1028                        yield chunk
1029
1030            else:
1031                raise TypeError("Need an input stream to copy from")
1032        else:
1033            if size is None:
1034                size = 8192
1035            elif not isinstance(size, int):
1036                raise TypeError("The size option must be an integer")
1037            if size > 0:
1038
1039                def chunks():
1040                    while True:
1041                        buffer = read(size)
1042                        yield buffer
1043                        if not buffer or len(buffer) < size:
1044                            break
1045
1046            else:
1047
1048                def chunks():
1049                    yield read()
1050
1051        if not table or not isinstance(table, basestring):
1052            raise TypeError("Need a table to copy to")
1053        if table.lower().startswith('select'):
1054                raise ValueError("Must specify a table, not a query")
1055        else:
1056            table = '"%s"' % (table,)
1057        operation = ['copy %s' % (table,)]
1058        options = []
1059        params = []
1060        if format is not None:
1061            if not isinstance(format, basestring):
1062                raise TypeError("The frmat option must be be a string")
1063            if format not in ('text', 'csv', 'binary'):
1064                raise ValueError("Invalid format")
1065            options.append('format %s' % (format,))
1066        if sep is not None:
1067            if not isinstance(sep, basestring):
1068                raise TypeError("The sep option must be a string")
1069            if format == 'binary':
1070                raise ValueError(
1071                    "The sep option is not allowed with binary format")
1072            if len(sep) != 1:
1073                raise ValueError(
1074                    "The sep option must be a single one-byte character")
1075            options.append('delimiter %s')
1076            params.append(sep)
1077        if null is not None:
1078            if not isinstance(null, basestring):
1079                raise TypeError("The null option must be a string")
1080            options.append('null %s')
1081            params.append(null)
1082        if columns:
1083            if not isinstance(columns, basestring):
1084                columns = ','.join('"%s"' % (col,) for col in columns)
1085            operation.append('(%s)' % (columns,))
1086        operation.append("from stdin")
1087        if options:
1088            operation.append('(%s)' % ','.join(options))
1089        operation = ' '.join(operation)
1090
1091        putdata = self._src.putdata
1092        self.execute(operation, params)
1093
1094        try:
1095            for chunk in chunks():
1096                putdata(chunk)
1097        except BaseException as error:
1098            self.rowcount = -1
1099            # the following call will re-raise the error
1100            putdata(error)
1101        else:
1102            self.rowcount = putdata(None)
1103
1104        # return the cursor object, so you can chain operations
1105        return self
1106
1107    def copy_to(self, stream, table,
1108            format=None, sep=None, null=None, decode=None, columns=None):
1109        """Copy data from the specified table to an output stream.
1110
1111        The output stream can be a file-like object with a write() method or
1112        it can also be None, in which case the method will return a generator
1113        yielding a row on each iteration.
1114
1115        Output will be returned as byte strings unless you set decode to true.
1116
1117        Note that you can also use a select query instead of the table name.
1118
1119        The format must be text, csv or binary. The sep option sets the
1120        column separator (delimiter) used in the non binary formats.
1121        The null option sets the textual representation of NULL in the output.
1122
1123        The copy operation can be restricted to a subset of columns. If no
1124        columns are specified, all of them will be copied.
1125        """
1126        binary_format = format == 'binary'
1127        if stream is not None:
1128            try:
1129                write = stream.write
1130            except AttributeError:
1131                raise TypeError("Need an output stream to copy to")
1132        if not table or not isinstance(table, basestring):
1133            raise TypeError("Need a table to copy to")
1134        if table.lower().startswith('select'):
1135            if columns:
1136                raise ValueError("Columns must be specified in the query")
1137            table = '(%s)' % (table,)
1138        else:
1139            table = '"%s"' % (table,)
1140        operation = ['copy %s' % (table,)]
1141        options = []
1142        params = []
1143        if format is not None:
1144            if not isinstance(format, basestring):
1145                raise TypeError("The format option must be a string")
1146            if format not in ('text', 'csv', 'binary'):
1147                raise ValueError("Invalid format")
1148            options.append('format %s' % (format,))
1149        if sep is not None:
1150            if not isinstance(sep, basestring):
1151                raise TypeError("The sep option must be a string")
1152            if binary_format:
1153                raise ValueError(
1154                    "The sep option is not allowed with binary format")
1155            if len(sep) != 1:
1156                raise ValueError(
1157                    "The sep option must be a single one-byte character")
1158            options.append('delimiter %s')
1159            params.append(sep)
1160        if null is not None:
1161            if not isinstance(null, basestring):
1162                raise TypeError("The null option must be a string")
1163            options.append('null %s')
1164            params.append(null)
1165        if decode is None:
1166            if format == 'binary':
1167                decode = False
1168            else:
1169                decode = str is unicode
1170        else:
1171            if not isinstance(decode, (int, bool)):
1172                raise TypeError("The decode option must be a boolean")
1173            if decode and binary_format:
1174                raise ValueError(
1175                    "The decode option is not allowed with binary format")
1176        if columns:
1177            if not isinstance(columns, basestring):
1178                columns = ','.join('"%s"' % (col,) for col in columns)
1179            operation.append('(%s)' % (columns,))
1180
1181        operation.append("to stdout")
1182        if options:
1183            operation.append('(%s)' % ','.join(options))
1184        operation = ' '.join(operation)
1185
1186        getdata = self._src.getdata
1187        self.execute(operation, params)
1188
1189        def copy():
1190            self.rowcount = 0
1191            while True:
1192                row = getdata(decode)
1193                if isinstance(row, int):
1194                    if self.rowcount != row:
1195                        self.rowcount = row
1196                    break
1197                self.rowcount += 1
1198                yield row
1199
1200        if stream is None:
1201            # no input stream, return the generator
1202            return copy()
1203
1204        # write the rows to the file-like input stream
1205        for row in copy():
1206            write(row)
1207
1208        # return the cursor object, so you can chain operations
1209        return self
1210
1211    def __next__(self):
1212        """Return the next row (support for the iteration protocol)."""
1213        res = self.fetchone()
1214        if res is None:
1215            raise StopIteration
1216        return res
1217
1218    # Note that since Python 2.6 the iterator protocol uses __next()__
1219    # instead of next(), we keep it only for backward compatibility of pgdb.
1220    next = __next__
1221
1222    @staticmethod
1223    def nextset():
1224        """Not supported."""
1225        raise NotSupportedError("The nextset() method is not supported")
1226
1227    @staticmethod
1228    def setinputsizes(sizes):
1229        """Not supported."""
1230        pass  # unsupported, but silently passed
1231
1232    @staticmethod
1233    def setoutputsize(size, column=0):
1234        """Not supported."""
1235        pass  # unsupported, but silently passed
1236
1237    @staticmethod
1238    def row_factory(row):
1239        """Process rows before they are returned.
1240
1241        You can overwrite this statically with a custom row factory, or
1242        you can build a row factory dynamically with build_row_factory().
1243
1244        For example, you can create a Cursor class that returns rows as
1245        Python dictionaries like this:
1246
1247            class DictCursor(pgdb.Cursor):
1248
1249                def row_factory(self, row):
1250                    return {desc[0]: value
1251                        for desc, value in zip(self.description, row)}
1252
1253            cur = DictCursor(con)  # get one DictCursor instance or
1254            con.cursor_type = DictCursor  # always use DictCursor instances
1255        """
1256        raise NotImplementedError
1257
1258    def build_row_factory(self):
1259        """Build a row factory based on the current description.
1260
1261        This implementation builds a row factory for creating named tuples.
1262        You can overwrite this method if you want to dynamically create
1263        different row factories whenever the column description changes.
1264        """
1265        colnames = self.colnames
1266        if colnames:
1267            try:
1268                try:
1269                    return namedtuple('Row', colnames, rename=True)._make
1270                except TypeError:  # Python 2.6 and 3.0 do not support rename
1271                    colnames = [v if v.isalnum() else 'column_%d' % n
1272                             for n, v in enumerate(colnames)]
1273                    return namedtuple('Row', colnames)._make
1274            except ValueError:  # there is still a problem with the field names
1275                colnames = ['column_%d' % n for n in range(len(colnames))]
1276                return namedtuple('Row', colnames)._make
1277
1278
1279CursorDescription = namedtuple('CursorDescription',
1280    ['name', 'type_code', 'display_size', 'internal_size',
1281     'precision', 'scale', 'null_ok'])
1282
1283
1284### Connection Objects
1285
1286class Connection(object):
1287    """Connection object."""
1288
1289    # expose the exceptions as attributes on the connection object
1290    Error = Error
1291    Warning = Warning
1292    InterfaceError = InterfaceError
1293    DatabaseError = DatabaseError
1294    InternalError = InternalError
1295    OperationalError = OperationalError
1296    ProgrammingError = ProgrammingError
1297    IntegrityError = IntegrityError
1298    DataError = DataError
1299    NotSupportedError = NotSupportedError
1300
1301    def __init__(self, cnx):
1302        """Create a database connection object."""
1303        self._cnx = cnx  # connection
1304        self._tnx = False  # transaction state
1305        self.type_cache = TypeCache(cnx)
1306        self.cursor_type = Cursor
1307        try:
1308            self._cnx.source()
1309        except Exception:
1310            raise _op_error("Invalid connection")
1311
1312    def __enter__(self):
1313        """Enter the runtime context for the connection object.
1314
1315        The runtime context can be used for running transactions.
1316        """
1317        return self
1318
1319    def __exit__(self, et, ev, tb):
1320        """Exit the runtime context for the connection object.
1321
1322        This does not close the connection, but it ends a transaction.
1323        """
1324        if et is None and ev is None and tb is None:
1325            self.commit()
1326        else:
1327            self.rollback()
1328
1329    def close(self):
1330        """Close the connection object."""
1331        if self._cnx:
1332            if self._tnx:
1333                try:
1334                    self.rollback()
1335                except DatabaseError:
1336                    pass
1337            self._cnx.close()
1338            self._cnx = None
1339        else:
1340            raise _op_error("Connection has been closed")
1341
1342    def commit(self):
1343        """Commit any pending transaction to the database."""
1344        if self._cnx:
1345            if self._tnx:
1346                self._tnx = False
1347                try:
1348                    self._cnx.source().execute("COMMIT")
1349                except DatabaseError:
1350                    raise
1351                except Exception:
1352                    raise _op_error("Can't commit")
1353        else:
1354            raise _op_error("Connection has been closed")
1355
1356    def rollback(self):
1357        """Roll back to the start of any pending transaction."""
1358        if self._cnx:
1359            if self._tnx:
1360                self._tnx = False
1361                try:
1362                    self._cnx.source().execute("ROLLBACK")
1363                except DatabaseError:
1364                    raise
1365                except Exception:
1366                    raise _op_error("Can't rollback")
1367        else:
1368            raise _op_error("Connection has been closed")
1369
1370    def cursor(self):
1371        """Return a new cursor object using the connection."""
1372        if self._cnx:
1373            try:
1374                return self.cursor_type(self)
1375            except Exception:
1376                raise _op_error("Invalid connection")
1377        else:
1378            raise _op_error("Connection has been closed")
1379
1380    if shortcutmethods:  # otherwise do not implement and document this
1381
1382        def execute(self, operation, params=None):
1383            """Shortcut method to run an operation on an implicit cursor."""
1384            cursor = self.cursor()
1385            cursor.execute(operation, params)
1386            return cursor
1387
1388        def executemany(self, operation, param_seq):
1389            """Shortcut method to run an operation against a sequence."""
1390            cursor = self.cursor()
1391            cursor.executemany(operation, param_seq)
1392            return cursor
1393
1394
1395### Module Interface
1396
1397_connect = connect
1398
1399def connect(dsn=None,
1400        user=None, password=None,
1401        host=None, database=None):
1402    """Connect to a database."""
1403    # first get params from DSN
1404    dbport = -1
1405    dbhost = ""
1406    dbbase = ""
1407    dbuser = ""
1408    dbpasswd = ""
1409    dbopt = ""
1410    try:
1411        params = dsn.split(":")
1412        dbhost = params[0]
1413        dbbase = params[1]
1414        dbuser = params[2]
1415        dbpasswd = params[3]
1416        dbopt = params[4]
1417    except (AttributeError, IndexError, TypeError):
1418        pass
1419
1420    # override if necessary
1421    if user is not None:
1422        dbuser = user
1423    if password is not None:
1424        dbpasswd = password
1425    if database is not None:
1426        dbbase = database
1427    if host is not None:
1428        try:
1429            params = host.split(":")
1430            dbhost = params[0]
1431            dbport = int(params[1])
1432        except (AttributeError, IndexError, TypeError, ValueError):
1433            pass
1434
1435    # empty host is localhost
1436    if dbhost == "":
1437        dbhost = None
1438    if dbuser == "":
1439        dbuser = None
1440
1441    # open the connection
1442    cnx = _connect(dbbase, dbhost, dbport, dbopt, dbuser, dbpasswd)
1443    return Connection(cnx)
1444
1445
1446### Types Handling
1447
1448class Type(frozenset):
1449    """Type class for a couple of PostgreSQL data types.
1450
1451    PostgreSQL is object-oriented: types are dynamic.
1452    We must thus use type names as internal type codes.
1453    """
1454
1455    def __new__(cls, values):
1456        if isinstance(values, basestring):
1457            values = values.split()
1458        return super(Type, cls).__new__(cls, values)
1459
1460    def __eq__(self, other):
1461        if isinstance(other, basestring):
1462            if other.startswith('_'):
1463                other = other[1:]
1464            return other in self
1465        else:
1466            return super(Type, self).__eq__(other)
1467
1468    def __ne__(self, other):
1469        if isinstance(other, basestring):
1470            if other.startswith('_'):
1471                other = other[1:]
1472            return other not in self
1473        else:
1474            return super(Type, self).__ne__(other)
1475
1476
1477class ArrayType:
1478    """Type class for PostgreSQL array types."""
1479
1480    def __eq__(self, other):
1481        if isinstance(other, basestring):
1482            return other.startswith('_')
1483        else:
1484            return isinstance(other, ArrayType)
1485
1486    def __ne__(self, other):
1487        if isinstance(other, basestring):
1488            return not other.startswith('_')
1489        else:
1490            return not isinstance(other, ArrayType)
1491
1492
1493class RecordType:
1494    """Type class for PostgreSQL record types."""
1495
1496    def __eq__(self, other):
1497        if isinstance(other, TypeCode):
1498            return other.type == 'c'
1499        elif isinstance(other, basestring):
1500            return other == 'record'
1501        else:
1502            return isinstance(other, RecordType)
1503
1504    def __ne__(self, other):
1505        if isinstance(other, TypeCode):
1506            return other.type != 'c'
1507        elif isinstance(other, basestring):
1508            return other != 'record'
1509        else:
1510            return not isinstance(other, RecordType)
1511
1512
1513# Mandatory type objects defined by DB-API 2 specs:
1514
1515STRING = Type('char bpchar name text varchar')
1516BINARY = Type('bytea')
1517NUMBER = Type('int2 int4 serial int8 float4 float8 numeric money')
1518DATETIME = Type('date time timetz timestamp timestamptz interval'
1519    ' abstime reltime')  # these are very old
1520ROWID = Type('oid')
1521
1522
1523# Additional type objects (more specific):
1524
1525BOOL = Type('bool')
1526SMALLINT = Type('int2')
1527INTEGER = Type('int2 int4 int8 serial')
1528LONG = Type('int8')
1529FLOAT = Type('float4 float8')
1530NUMERIC = Type('numeric')
1531MONEY = Type('money')
1532DATE = Type('date')
1533TIME = Type('time timetz')
1534TIMESTAMP = Type('timestamp timestamptz')
1535INTERVAL = Type('interval')
1536UUID = Type('uuid')
1537HSTORE = Type('hstore')
1538JSON = Type('json jsonb')
1539
1540# Type object for arrays (also equate to their base types):
1541
1542ARRAY = ArrayType()
1543
1544# Type object for records (encompassing all composite types):
1545
1546RECORD = RecordType()
1547
1548
1549# Mandatory type helpers defined by DB-API 2 specs:
1550
1551def Date(year, month, day):
1552    """Construct an object holding a date value."""
1553    return date(year, month, day)
1554
1555
1556def Time(hour, minute=0, second=0, microsecond=0, tzinfo=None):
1557    """Construct an object holding a time value."""
1558    return time(hour, minute, second, microsecond, tzinfo)
1559
1560
1561def Timestamp(year, month, day, hour=0, minute=0, second=0, microsecond=0,
1562        tzinfo=None):
1563    """Construct an object holding a time stamp value."""
1564    return datetime(year, month, day, hour, minute, second, microsecond, tzinfo)
1565
1566
1567def DateFromTicks(ticks):
1568    """Construct an object holding a date value from the given ticks value."""
1569    return Date(*localtime(ticks)[:3])
1570
1571
1572def TimeFromTicks(ticks):
1573    """Construct an object holding a time value from the given ticks value."""
1574    return Time(*localtime(ticks)[3:6])
1575
1576
1577def TimestampFromTicks(ticks):
1578    """Construct an object holding a time stamp from the given ticks value."""
1579    return Timestamp(*localtime(ticks)[:6])
1580
1581
1582class Binary(bytes):
1583    """Construct an object capable of holding a binary (long) string value."""
1584
1585
1586# Additional type helpers for PyGreSQL:
1587
1588def Interval(days, hours=0, minutes=0, seconds=0, microseconds=0):
1589    """Construct an object holding a time inverval value."""
1590    return timedelta(days, hours=hours, minutes=minutes, seconds=seconds,
1591        microseconds=microseconds)
1592
1593
1594Uuid = Uuid  # Construct an object holding a UUID value
1595
1596
1597class Hstore(dict):
1598    """Wrapper class for marking hstore values."""
1599
1600    _re_quote = regex('^[Nn][Uu][Ll][Ll]$|[ ,=>]')
1601    _re_escape = regex(r'(["\\])')
1602
1603    @classmethod
1604    def _quote(cls, s):
1605        if s is None:
1606            return 'NULL'
1607        if not s:
1608            return '""'
1609        quote = cls._re_quote.search(s)
1610        s = cls._re_escape.sub(r'\\\1', s)
1611        if quote:
1612            s = '"%s"' % s
1613        return s
1614
1615    def __str__(self):
1616        q = self._quote
1617        return ','.join('%s=>%s' % (q(k), q(v)) for k, v in self.items())
1618
1619
1620class Json:
1621    """Construct a wrapper for holding an object serializable to JSON."""
1622
1623    def __init__(self, obj, encode=None):
1624        self.obj = obj
1625        self.encode = encode or jsonencode
1626
1627    def __str__(self):
1628        obj = self.obj
1629        if isinstance(obj, basestring):
1630            return obj
1631        return self.encode(obj)
1632
1633
1634class Literal:
1635    """Construct a wrapper for holding a literal SQL string."""
1636
1637    def __init__(self, sql):
1638        self.sql = sql
1639
1640    def __str__(self):
1641        return self.sql
1642
1643    __pg_repr__ = __str__
1644
1645# If run as script, print some information:
1646
1647if __name__ == '__main__':
1648    print('PyGreSQL version', version)
1649    print('')
1650    print(__doc__)
Note: See TracBrowser for help on using the repository browser.