1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite & SpatiaLite
21 - MySQL
22 - Postgres
23 - Firebird
24 - Oracle
25 - MS SQL
26 - DB2
27 - Interbase
28 - Ingres
29 - Informix (9+ and SE)
30 - SapDB (experimental)
31 - Cubrid (experimental)
32 - CouchDB (experimental)
33 - MongoDB (in progress)
34 - Google:nosql
35 - Google:sql
36 - Teradata
37 - IMAP (experimental)
38
39 Example of usage:
40
41 >>> # from dal import DAL, Field
42
43 ### create DAL connection (and create DB if it doesn't exist)
44 >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
45 ... folder=None)
46
47 ### define a table 'person' (create/alter as necessary)
48 >>> person = db.define_table('person',Field('name','string'))
49
50 ### insert a record
51 >>> id = person.insert(name='James')
52
53 ### retrieve it by id
54 >>> james = person(id)
55
56 ### retrieve it by name
57 >>> james = person(name='James')
58
59 ### retrieve it by arbitrary query
60 >>> query = (person.name=='James') & (person.name.startswith('J'))
61 >>> james = db(query).select(person.ALL)[0]
62
63 ### update one record
64 >>> james.update_record(name='Jim')
65 <Row {'id': 1, 'name': 'Jim'}>
66
67 ### update multiple records by query
68 >>> db(person.name.like('J%')).update(name='James')
69 1
70
71 ### delete records by query
72 >>> db(person.name.lower() == 'jim').delete()
73 0
74
75 ### retrieve multiple records (rows)
76 >>> people = db(person).select(orderby=person.name,
77 ... groupby=person.name, limitby=(0,100))
78
79 ### further filter them
80 >>> james = people.find(lambda row: row.name == 'James').first()
81 >>> print james.id, james.name
82 1 James
83
84 ### check aggregates
85 >>> counter = person.id.count()
86 >>> print db(person).select(counter).first()(counter)
87 1
88
89 ### delete one record
90 >>> james.delete_record()
91 1
92
93 ### delete (drop) entire database table
94 >>> person.drop()
95
96 Supported field types:
97 id string text boolean integer double decimal password upload
98 blob time date datetime
99
100 Supported DAL URI strings:
101 'sqlite://test.db'
102 'spatialite://test.db'
103 'sqlite:memory'
104 'spatialite:memory'
105 'jdbc:sqlite://test.db'
106 'mysql://root:none@localhost/test'
107 'postgres://mdipierro:password@localhost/test'
108 'postgres:psycopg2://mdipierro:password@localhost/test'
109 'postgres:pg8000://mdipierro:password@localhost/test'
110 'jdbc:postgres://mdipierro:none@localhost/test'
111 'mssql://web2py:none@A64X2/web2py_test'
112 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
113 'oracle://username:password@database'
114 'firebird://user:password@server:3050/database'
115 'db2://DSN=dsn;UID=user;PWD=pass'
116 'firebird://username:password@hostname/database'
117 'firebird_embedded://username:password@c://path'
118 'informix://user:password@server:3050/database'
119 'informixu://user:password@server:3050/database' # unicode informix
120 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
121 'google:datastore' # for google app engine datastore
122 'google:sql' # for google app engine with sql (mysql compatible)
123 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
124 'imap://user:password@server:port' # experimental
125 'mongodb://user:password@server:port/database' # experimental
126
127 For more info:
128 help(DAL)
129 help(Field)
130 """
131
132
133
134
135
136 __all__ = ['DAL', 'Field']
137
138 DEFAULTLENGTH = {'string':512,
139 'password':512,
140 'upload':512,
141 'text':2**15,
142 'blob':2**31}
143 TIMINGSSIZE = 100
144 SPATIALLIBS = {
145 'Windows':'libspatialite',
146 'Linux':'libspatialite.so',
147 'Darwin':'libspatialite.dylib'
148 }
149 DEFAULT_URI = 'sqlite://dummy.db'
150
151 import re
152 import sys
153 import locale
154 import os
155 import types
156 import datetime
157 import threading
158 import time
159 import csv
160 import cgi
161 import copy
162 import socket
163 import logging
164 import base64
165 import shutil
166 import marshal
167 import decimal
168 import struct
169 import urllib
170 import hashlib
171 import uuid
172 import glob
173 import traceback
174 import platform
175
176 PYTHON_VERSION = sys.version_info[:3]
177 if PYTHON_VERSION[0] == 2:
178 import cPickle as pickle
179 import cStringIO as StringIO
180 import copy_reg as copyreg
181 hashlib_md5 = hashlib.md5
182 bytes, unicode = str, unicode
183 else:
184 import pickle
185 from io import StringIO as StringIO
186 import copyreg
187 long = int
188 hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
189 bytes, unicode = bytes, str
190
191 if PYTHON_VERSION[:2] < (2, 7):
192 from gluon.contrib.ordereddict import OrderedDict
193 else:
194 from collections import OrderedDict
195
196
197 CALLABLETYPES = (types.LambdaType, types.FunctionType,
198 types.BuiltinFunctionType,
199 types.MethodType, types.BuiltinMethodType)
200
201 TABLE_ARGS = set(
202 ('migrate','primarykey','fake_migrate','format','redefine',
203 'singular','plural','trigger_name','sequence_name','fields',
204 'common_filter','polymodel','table_class','on_define','rname'))
205
206 SELECT_ARGS = set(
207 ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
208 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby'))
209
210 ogetattr = object.__getattribute__
211 osetattr = object.__setattr__
212 exists = os.path.exists
213 pjoin = os.path.join
214
215
216
217
218 try:
219 from gluon.utils import web2py_uuid
220 except (ImportError, SystemError):
221 import uuid
223
224 try:
225 import portalocker
226 have_portalocker = True
227 except ImportError:
228 have_portalocker = False
229
230 try:
231 from gluon import serializers
232 have_serializers = True
233 except ImportError:
234 have_serializers = False
235 try:
236 import json as simplejson
237 except ImportError:
238 try:
239 import gluon.contrib.simplejson as simplejson
240 except ImportError:
241 simplejson = None
242
243 LOGGER = logging.getLogger("web2py.dal")
244 DEFAULT = lambda:0
245
246 GLOBAL_LOCKER = threading.RLock()
247 THREAD_LOCAL = threading.local()
248
249
250
251
252 REGEX_TYPE = re.compile('^([\w\_\:]+)')
253 REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
254 REGEX_W = re.compile('^\w+$')
255 REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.([^.]+)$')
256 REGEX_NO_GREEDY_ENTITY_NAME = r'(.+?)'
257 REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)(\.(?P<name>\w+))?\.\w+$')
258 REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
259 REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
260 REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
261 REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
262 REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
263 REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
264 REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
265 REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
266 REGEX_QUOTES = re.compile("'[^']*'")
267 REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
268 REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
269 REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)')
270
271
272
273 DRIVERS = []
274
275 try:
276 from new import classobj
277 from google.appengine.ext import db as gae
278 from google.appengine.ext import ndb
279 from google.appengine.api import namespace_manager, rdbms
280 from google.appengine.api.datastore_types import Key
281 from google.appengine.ext.db.polymodel import PolyModel
282 from google.appengine.ext.ndb.polymodel import PolyModel as NDBPolyModel
283 DRIVERS.append('google')
284 except ImportError:
285 pass
286
287 if not 'google' in DRIVERS:
288
289 try:
290 from pysqlite2 import dbapi2 as sqlite2
291 DRIVERS.append('SQLite(sqlite2)')
292 except ImportError:
293 LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
294
295 try:
296 from sqlite3 import dbapi2 as sqlite3
297 DRIVERS.append('SQLite(sqlite3)')
298 except ImportError:
299 LOGGER.debug('no SQLite drivers sqlite3')
300
301 try:
302
303 try:
304 import gluon.contrib.pymysql as pymysql
305
306
307 pymysql.ESCAPE_REGEX = re.compile("'")
308 pymysql.ESCAPE_MAP = {"'": "''"}
309
310 except ImportError:
311 import pymysql
312 DRIVERS.append('MySQL(pymysql)')
313 except ImportError:
314 LOGGER.debug('no MySQL driver pymysql')
315
316 try:
317 import MySQLdb
318 DRIVERS.append('MySQL(MySQLdb)')
319 except ImportError:
320 LOGGER.debug('no MySQL driver MySQLDB')
321
322 try:
323 import mysql.connector as mysqlconnector
324 DRIVERS.append("MySQL(mysqlconnector)")
325 except ImportError:
326 LOGGER.debug("no driver mysql.connector")
327
328 try:
329 import psycopg2
330 from psycopg2.extensions import adapt as psycopg2_adapt
331 DRIVERS.append('PostgreSQL(psycopg2)')
332 except ImportError:
333 LOGGER.debug('no PostgreSQL driver psycopg2')
334
335 try:
336
337 try:
338 import gluon.contrib.pg8000.dbapi as pg8000
339 except ImportError:
340 import pg8000.dbapi as pg8000
341 DRIVERS.append('PostgreSQL(pg8000)')
342 except ImportError:
343 LOGGER.debug('no PostgreSQL driver pg8000')
344
345 try:
346 import cx_Oracle
347 DRIVERS.append('Oracle(cx_Oracle)')
348 except ImportError:
349 LOGGER.debug('no Oracle driver cx_Oracle')
350
351 try:
352 try:
353 import pyodbc
354 except ImportError:
355 try:
356 import gluon.contrib.pypyodbc as pyodbc
357 except Exception, e:
358 raise ImportError(str(e))
359 DRIVERS.append('MSSQL(pyodbc)')
360 DRIVERS.append('DB2(pyodbc)')
361 DRIVERS.append('Teradata(pyodbc)')
362 DRIVERS.append('Ingres(pyodbc)')
363 except ImportError:
364 LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
365
366 try:
367 import Sybase
368 DRIVERS.append('Sybase(Sybase)')
369 except ImportError:
370 LOGGER.debug('no Sybase driver')
371
372 try:
373 import kinterbasdb
374 DRIVERS.append('Interbase(kinterbasdb)')
375 DRIVERS.append('Firebird(kinterbasdb)')
376 except ImportError:
377 LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
378
379 try:
380 import fdb
381 DRIVERS.append('Firebird(fdb)')
382 except ImportError:
383 LOGGER.debug('no Firebird driver fdb')
384
385 try:
386 import firebirdsql
387 DRIVERS.append('Firebird(firebirdsql)')
388 except ImportError:
389 LOGGER.debug('no Firebird driver firebirdsql')
390
391 try:
392 import informixdb
393 DRIVERS.append('Informix(informixdb)')
394 LOGGER.warning('Informix support is experimental')
395 except ImportError:
396 LOGGER.debug('no Informix driver informixdb')
397
398 try:
399 import sapdb
400 DRIVERS.append('SQL(sapdb)')
401 LOGGER.warning('SAPDB support is experimental')
402 except ImportError:
403 LOGGER.debug('no SAP driver sapdb')
404
405 try:
406 import cubriddb
407 DRIVERS.append('Cubrid(cubriddb)')
408 LOGGER.warning('Cubrid support is experimental')
409 except ImportError:
410 LOGGER.debug('no Cubrid driver cubriddb')
411
412 try:
413 from com.ziclix.python.sql import zxJDBC
414 import java.sql
415
416 from org.sqlite import JDBC
417 zxJDBC_sqlite = java.sql.DriverManager
418 DRIVERS.append('PostgreSQL(zxJDBC)')
419 DRIVERS.append('SQLite(zxJDBC)')
420 LOGGER.warning('zxJDBC support is experimental')
421 is_jdbc = True
422 except ImportError:
423 LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
424 is_jdbc = False
425
426 try:
427 import couchdb
428 DRIVERS.append('CouchDB(couchdb)')
429 except ImportError:
430 LOGGER.debug('no Couchdb driver couchdb')
431
432 try:
433 import pymongo
434 DRIVERS.append('MongoDB(pymongo)')
435 except:
436 LOGGER.debug('no MongoDB driver pymongo')
437
438 try:
439 import imaplib
440 DRIVERS.append('IMAP(imaplib)')
441 except:
442 LOGGER.debug('no IMAP driver imaplib')
443
444 PLURALIZE_RULES = [
445 (re.compile('child$'), re.compile('child$'), 'children'),
446 (re.compile('oot$'), re.compile('oot$'), 'eet'),
447 (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
448 (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
449 (re.compile('sis$'), re.compile('sis$'), 'ses'),
450 (re.compile('man$'), re.compile('man$'), 'men'),
451 (re.compile('ife$'), re.compile('ife$'), 'ives'),
452 (re.compile('eau$'), re.compile('eau$'), 'eaux'),
453 (re.compile('lf$'), re.compile('lf$'), 'lves'),
454 (re.compile('[sxz]$'), re.compile('$'), 'es'),
455 (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
456 (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
457 (re.compile('$'), re.compile('$'), 's'),
458 ]
465
470
473
476
478
481
483 regex = re.compile('\.keyword(?=\w)')
484 a = regex.sub('."%s"' % keyword,a)
485 return a
486
487 if 'google' in DRIVERS:
488
489 is_jdbc = False
492 """
493 GAE decimal implementation
494 """
495 data_type = decimal.Decimal
496
497 - def __init__(self, precision, scale, **kwargs):
498 super(GAEDecimalProperty, self).__init__(self, **kwargs)
499 d = '1.'
500 for x in range(scale):
501 d += '0'
502 self.round = decimal.Decimal(d)
503
511
513 if value is None or value == '':
514 return None
515 else:
516 return decimal.Decimal(value).quantize(self.round)
517
519 value = super(GAEDecimalProperty, self).validate(value)
520 if value is None or isinstance(value, decimal.Decimal):
521 return value
522 elif isinstance(value, basestring):
523 return decimal.Decimal(value)
524 raise gae.BadValueError("Property %s must be a Decimal or string."\
525 % self.name)
526
529 """
530 NDB decimal implementation
531 """
532 data_type = decimal.Decimal
533
534 - def __init__(self, precision, scale, **kwargs):
535 d = '1.'
536 for x in range(scale):
537 d += '0'
538 self.round = decimal.Decimal(d)
539
541 if value is None or value == '':
542 return None
543 else:
544 return str(value)
545
547 if value is None or value == '':
548 return None
549 else:
550 return decimal.Decimal(value).quantize(self.round)
551
553 if value is None or isinstance(value, decimal.Decimal):
554 return value
555 elif isinstance(value, basestring):
556 return decimal.Decimal(value)
557 raise TypeError("Property %s must be a Decimal or string."\
558 % self._name)
559
565
566 POOLS = {}
567 check_active_connection = True
568
569 @staticmethod
572
573
574
575 - def close(self,action='commit',really=True):
592
593 @staticmethod
595 """ to close cleanly databases in a multithreaded environment """
596 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
597 for db_uid, db_group in dbs:
598 for db in db_group:
599 if hasattr(db,'_adapter'):
600 db._adapter.close(action)
601 getattr(THREAD_LOCAL,'db_instances',{}).clear()
602 getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
603 if callable(action):
604 action(None)
605 return
606
619
621 """hook for the after_connection parameter"""
622 if callable(self._after_connection):
623 self._after_connection(self)
624 self.after_connection()
625
627 """ this it is supposed to be overloaded by adapters"""
628 pass
629
673
702
708
709 __metaclass__ = AdapterMeta
710
711 native_json = False
712 driver = None
713 driver_name = None
714 drivers = ()
715 connection = None
716 commit_on_alter_table = False
717 support_distributed_transaction = False
718 uploads_in_blob = False
719 can_select_for_update = True
720 dbpath = None
721 folder = None
722 connector = lambda *args, **kwargs: None
723
724 TRUE = 'T'
725 FALSE = 'F'
726 T_SEP = ' '
727 QUOTE_TEMPLATE = '"%s"'
728
729
730 types = {
731 'boolean': 'CHAR(1)',
732 'string': 'CHAR(%(length)s)',
733 'text': 'TEXT',
734 'json': 'TEXT',
735 'password': 'CHAR(%(length)s)',
736 'blob': 'BLOB',
737 'upload': 'CHAR(%(length)s)',
738 'integer': 'INTEGER',
739 'bigint': 'INTEGER',
740 'float':'DOUBLE',
741 'double': 'DOUBLE',
742 'decimal': 'DOUBLE',
743 'date': 'DATE',
744 'time': 'TIME',
745 'datetime': 'TIMESTAMP',
746 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
747 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
748 'list:integer': 'TEXT',
749 'list:string': 'TEXT',
750 'list:reference': 'TEXT',
751
752 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
753 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
754 'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
755 }
756
758 if not hasattr(self.driver, "OperationalError"):
759 return None
760 return isinstance(exception, self.driver.OperationalError)
761
763 if not hasattr(self.driver, "ProgrammingError"):
764 return None
765 return isinstance(exception, self.driver.ProgrammingError)
766
768 pkeys = getattr(table,'_primarykey',None)
769 if pkeys:
770 return table[pkeys[0]] != None
771 else:
772 return table._id != None
773
775 return "'%s'" % obj.replace("'", "''")
776
778 if isinstance(obj,(int,float)):
779 return str(obj)
780 return self.adapt(str(obj))
781
783 """
784 to be used ONLY for files that on GAE may not be on filesystem
785 """
786 return exists(filename)
787
788 - def file_open(self, filename, mode='rb', lock=True):
789 """
790 to be used ONLY for files that on GAE may not be on filesystem
791 """
792 if have_portalocker and lock:
793 fileobj = portalocker.LockedFile(filename,mode)
794 else:
795 fileobj = open(filename,mode)
796 return fileobj
797
799 """
800 to be used ONLY for files that on GAE may not be on filesystem
801 """
802 if fileobj:
803 fileobj.close()
804
807
809 self.adapter_args = adapter_args
810 if getattr(self,'driver',None) != None:
811 return
812 drivers_available = [driver for driver in self.drivers
813 if driver in globals()]
814 if uri:
815 items = uri.split('://',1)[0].split(':')
816 request_driver = items[1] if len(items)>1 else None
817 else:
818 request_driver = None
819 request_driver = request_driver or adapter_args.get('driver')
820 if request_driver:
821 if request_driver in drivers_available:
822 self.driver_name = request_driver
823 self.driver = globals().get(request_driver)
824 else:
825 raise RuntimeError("driver %s not available" % request_driver)
826 elif drivers_available:
827 self.driver_name = drivers_available[0]
828 self.driver = globals().get(self.driver_name)
829 else:
830 raise RuntimeError("no driver available %s" % str(self.drivers))
831
832 - def log(self, message, table=None):
833 """ Logs migrations
834
835 It will not log changes if logfile is not specified. Defaults
836 to sql.log
837 """
838
839 isabs = None
840 logfilename = self.adapter_args.get('logfile','sql.log')
841 writelog = bool(logfilename)
842 if writelog:
843 isabs = os.path.isabs(logfilename)
844
845 if table and table._dbt and writelog and self.folder:
846 if isabs:
847 table._loggername = logfilename
848 else:
849 table._loggername = pjoin(self.folder, logfilename)
850 logfile = self.file_open(table._loggername, 'a')
851 logfile.write(message)
852 self.file_close(logfile)
853
854
855 - def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
856 credential_decoder=IDENTITY, driver_args={},
857 adapter_args={},do_connect=True, after_connection=None):
858 self.db = db
859 self.dbengine = "None"
860 self.uri = uri
861 self.pool_size = pool_size
862 self.folder = folder
863 self.db_codec = db_codec
864 self._after_connection = after_connection
865 class Dummy(object):
866 lastrowid = 1
867 def __getattr__(self, value):
868 return lambda *a, **b: []
869 self.connection = Dummy()
870 self.cursor = Dummy()
871
872
875
877 return '%s_sequence' % tablename
878
881
882 - def create_table(self, table,
883 migrate=True,
884 fake_migrate=False,
885 polymodel=None):
886 db = table._db
887 fields = []
888
889 postcreation_fields = []
890 sql_fields = {}
891 sql_fields_aux = {}
892 TFK = {}
893 tablename = table._tablename
894 sortable = 0
895 types = self.types
896 for field in table:
897 sortable += 1
898 field_name = field.name
899 field_type = field.type
900 if isinstance(field_type,SQLCustomType):
901 ftype = field_type.native or field_type.type
902 elif field_type.startswith('reference'):
903 referenced = field_type[10:].strip()
904 if referenced == '.':
905 referenced = tablename
906 constraint_name = self.constraint_name(tablename, field_name)
907
908
909
910
911
912 try:
913 rtable = db[referenced]
914 rfield = rtable._id
915 rfieldname = rfield.name
916 rtablename = referenced
917 except (KeyError, ValueError, AttributeError), e:
918 LOGGER.debug('Error: %s' % e)
919 try:
920 rtablename,rfieldname = referenced.split('.')
921 rtable = db[rtablename]
922 rfield = rtable[rfieldname]
923 except Exception, e:
924 LOGGER.debug('Error: %s' %e)
925 raise KeyError('Cannot resolve reference %s in %s definition' % (referenced, table._tablename))
926
927
928 if getattr(rtable, '_primarykey', None) and rfieldname in rtable._primarykey or \
929 rfield.unique:
930 ftype = types[rfield.type[:9]] % \
931 dict(length=rfield.length)
932
933 if not rfield.unique and len(rtable._primarykey)>1:
934
935 if rtablename not in TFK:
936 TFK[rtablename] = {}
937 TFK[rtablename][rfieldname] = field_name
938 else:
939 ftype = ftype + \
940 types['reference FK'] % dict(
941 constraint_name = constraint_name,
942 foreign_key = rtable.sqlsafe + ' (' + rfield.sqlsafe_name + ')',
943 table_name = table.sqlsafe,
944 field_name = field.sqlsafe_name,
945 on_delete_action=field.ondelete)
946 else:
947
948 if referenced in db:
949 id_fieldname = db[referenced]._id.sqlsafe_name
950 elif referenced == tablename:
951 id_fieldname = table._id.sqlsafe_name
952 else:
953 id_fieldname = self.QUOTE_TEMPLATE % 'id'
954
955
956
957
958
959
960 if referenced == tablename:
961 real_referenced = db[referenced].sqlsafe
962 else:
963 real_referenced = (referenced in db
964 and db[referenced].sqlsafe
965 or referenced)
966 rfield = db[referenced]._id
967 ftype = types[field_type[:9]] % dict(
968 index_name = self.QUOTE_TEMPLATE % (field_name+'__idx'),
969 field_name = field.sqlsafe_name,
970 constraint_name = self.QUOTE_TEMPLATE % constraint_name,
971 foreign_key = '%s (%s)' % (real_referenced, rfield.sqlsafe_name),
972 on_delete_action=field.ondelete)
973 elif field_type.startswith('list:reference'):
974 ftype = types[field_type[:14]]
975 elif field_type.startswith('decimal'):
976 precision, scale = map(int,field_type[8:-1].split(','))
977 ftype = types[field_type[:7]] % \
978 dict(precision=precision,scale=scale)
979 elif field_type.startswith('geo'):
980 if not hasattr(self,'srid'):
981 raise RuntimeError('Adapter does not support geometry')
982 srid = self.srid
983 geotype, parms = field_type[:-1].split('(')
984 if not geotype in types:
985 raise SyntaxError(
986 'Field: unknown field type: %s for %s' \
987 % (field_type, field_name))
988 ftype = types[geotype]
989 if self.dbengine == 'postgres' and geotype == 'geometry':
990
991 dimension = 2
992 parms = parms.split(',')
993 if len(parms) == 3:
994 schema, srid, dimension = parms
995 elif len(parms) == 2:
996 schema, srid = parms
997 else:
998 schema = parms[0]
999 ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
1000 ftype = ftype % dict(schema=schema,
1001 tablename=tablename,
1002 fieldname=field_name, srid=srid,
1003 dimension=dimension)
1004 postcreation_fields.append(ftype)
1005 elif not field_type in types:
1006 raise SyntaxError('Field: unknown field type: %s for %s' % \
1007 (field_type, field_name))
1008 else:
1009 ftype = types[field_type]\
1010 % dict(length=field.length)
1011 if not field_type.startswith('id') and \
1012 not field_type.startswith('reference'):
1013 if field.notnull:
1014 ftype += ' NOT NULL'
1015 else:
1016 ftype += self.ALLOW_NULL()
1017 if field.unique:
1018 ftype += ' UNIQUE'
1019 if field.custom_qualifier:
1020 ftype += ' %s' % field.custom_qualifier
1021
1022
1023 sql_fields[field_name] = dict(
1024 length=field.length,
1025 unique=field.unique,
1026 notnull=field.notnull,
1027 sortable=sortable,
1028 type=str(field_type),
1029 sql=ftype)
1030
1031 if field.notnull and not field.default is None:
1032
1033
1034
1035
1036
1037
1038 not_null = self.NOT_NULL(field.default, field_type)
1039 ftype = ftype.replace('NOT NULL', not_null)
1040 sql_fields_aux[field_name] = dict(sql=ftype)
1041
1042
1043 if not (self.dbengine == 'postgres' and \
1044 field_type.startswith('geom')):
1045 fields.append('%s %s' % (field.sqlsafe_name, ftype))
1046 other = ';'
1047
1048
1049 if self.dbengine == 'mysql':
1050 if not hasattr(table, "_primarykey"):
1051 fields.append('PRIMARY KEY (%s)' % (self.QUOTE_TEMPLATE % table._id.name))
1052 engine = self.adapter_args.get('engine','InnoDB')
1053 other = ' ENGINE=%s CHARACTER SET utf8;' % engine
1054
1055 fields = ',\n '.join(fields)
1056 for rtablename in TFK:
1057 rfields = TFK[rtablename]
1058 pkeys = [self.QUOTE_TEMPLATE % pk for pk in db[rtablename]._primarykey]
1059 fkeys = [self.QUOTE_TEMPLATE % rfields[k].name for k in pkeys ]
1060 fields = fields + ',\n ' + \
1061 types['reference TFK'] % dict(
1062 table_name = table.sqlsafe,
1063 field_name=', '.join(fkeys),
1064 foreign_table = table.sqlsafe,
1065 foreign_key = ', '.join(pkeys),
1066 on_delete_action = field.ondelete)
1067
1068 table_rname = table.sqlsafe
1069
1070 if getattr(table,'_primarykey',None):
1071 query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
1072 (table.sqlsafe, fields,
1073 self.PRIMARY_KEY(', '.join([self.QUOTE_TEMPLATE % pk for pk in table._primarykey])),other)
1074 else:
1075 query = "CREATE TABLE %s(\n %s\n)%s" % \
1076 (table.sqlsafe, fields, other)
1077
1078 if self.uri.startswith('sqlite:///') \
1079 or self.uri.startswith('spatialite:///'):
1080 path_encoding = sys.getfilesystemencoding() \
1081 or locale.getdefaultlocale()[1] or 'utf8'
1082 dbpath = self.uri[9:self.uri.rfind('/')]\
1083 .decode('utf8').encode(path_encoding)
1084 else:
1085 dbpath = self.folder
1086
1087 if not migrate:
1088 return query
1089 elif self.uri.startswith('sqlite:memory')\
1090 or self.uri.startswith('spatialite:memory'):
1091 table._dbt = None
1092 elif isinstance(migrate, str):
1093 table._dbt = pjoin(dbpath, migrate)
1094 else:
1095 table._dbt = pjoin(
1096 dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
1097
1098 if not table._dbt or not self.file_exists(table._dbt):
1099 if table._dbt:
1100 self.log('timestamp: %s\n%s\n'
1101 % (datetime.datetime.today().isoformat(),
1102 query), table)
1103 if not fake_migrate:
1104 self.create_sequence_and_triggers(query,table)
1105 table._db.commit()
1106
1107
1108 for query in postcreation_fields:
1109 self.execute(query)
1110 table._db.commit()
1111 if table._dbt:
1112 tfile = self.file_open(table._dbt, 'w')
1113 pickle.dump(sql_fields, tfile)
1114 self.file_close(tfile)
1115 if fake_migrate:
1116 self.log('faked!\n', table)
1117 else:
1118 self.log('success!\n', table)
1119 else:
1120 tfile = self.file_open(table._dbt, 'r')
1121 try:
1122 sql_fields_old = pickle.load(tfile)
1123 except EOFError:
1124 self.file_close(tfile)
1125 raise RuntimeError('File %s appears corrupted' % table._dbt)
1126 self.file_close(tfile)
1127 if sql_fields != sql_fields_old:
1128 self.migrate_table(
1129 table,
1130 sql_fields, sql_fields_old,
1131 sql_fields_aux, None,
1132 fake_migrate=fake_migrate
1133 )
1134 return query
1135
1136 - def migrate_table(
1137 self,
1138 table,
1139 sql_fields,
1140 sql_fields_old,
1141 sql_fields_aux,
1142 logfile,
1143 fake_migrate=False,
1144 ):
1145
1146
1147 db = table._db
1148 db._migrated.append(table._tablename)
1149 tablename = table._tablename
1150 def fix(item):
1151 k,v=item
1152 if not isinstance(v,dict):
1153 v=dict(type='unknown',sql=v)
1154 if self.ignore_field_case is not True: return k, v
1155 return k.lower(),v
1156
1157
1158 sql_fields = dict(map(fix,sql_fields.iteritems()))
1159 sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
1160 sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
1161 if db._debug:
1162 logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
1163
1164 keys = sql_fields.keys()
1165 for key in sql_fields_old:
1166 if not key in keys:
1167 keys.append(key)
1168 new_add = self.concat_add(tablename)
1169
1170 metadata_change = False
1171 sql_fields_current = copy.copy(sql_fields_old)
1172 for key in keys:
1173 query = None
1174 if not key in sql_fields_old:
1175 sql_fields_current[key] = sql_fields[key]
1176 if self.dbengine in ('postgres',) and \
1177 sql_fields[key]['type'].startswith('geometry'):
1178
1179 query = [ sql_fields[key]['sql'] ]
1180 else:
1181 query = ['ALTER TABLE %s ADD %s %s;' % \
1182 (table.sqlsafe, key,
1183 sql_fields_aux[key]['sql'].replace(', ', new_add))]
1184 metadata_change = True
1185 elif self.dbengine in ('sqlite', 'spatialite'):
1186 if key in sql_fields:
1187 sql_fields_current[key] = sql_fields[key]
1188 metadata_change = True
1189 elif not key in sql_fields:
1190 del sql_fields_current[key]
1191 ftype = sql_fields_old[key]['type']
1192 if (self.dbengine in ('postgres',) and
1193 ftype.startswith('geometry')):
1194 geotype, parms = ftype[:-1].split('(')
1195 schema = parms.split(',')[0]
1196 query = [ "SELECT DropGeometryColumn ('%(schema)s', "+
1197 "'%(table)s', '%(field)s');" %
1198 dict(schema=schema, table=tablename, field=key,) ]
1199 elif self.dbengine in ('firebird',):
1200 query = ['ALTER TABLE %s DROP %s;' %
1201 (self.QUOTE_TEMPLATE % tablename, self.QUOTE_TEMPLATE % key)]
1202 else:
1203 query = ['ALTER TABLE %s DROP COLUMN %s;' %
1204 (self.QUOTE_TEMPLATE % tablename, self.QUOTE_TEMPLATE % key)]
1205 metadata_change = True
1206 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
1207 and not (key in table.fields and
1208 isinstance(table[key].type, SQLCustomType)) \
1209 and not sql_fields[key]['type'].startswith('reference')\
1210 and not sql_fields[key]['type'].startswith('double')\
1211 and not sql_fields[key]['type'].startswith('id'):
1212 sql_fields_current[key] = sql_fields[key]
1213 t = tablename
1214 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
1215 if self.dbengine in ('firebird',):
1216 drop_expr = 'ALTER TABLE %s DROP %s;'
1217 else:
1218 drop_expr = 'ALTER TABLE %s DROP COLUMN %s;'
1219 key_tmp = key + '__tmp'
1220 query = ['ALTER TABLE %s ADD %s %s;' % (self.QUOTE_TEMPLATE % t, self.QUOTE_TEMPLATE % key_tmp, tt),
1221 'UPDATE %s SET %s=%s;' %
1222 (self.QUOTE_TEMPLATE % t, self.QUOTE_TEMPLATE % key_tmp, self.QUOTE_TEMPLATE % key),
1223 drop_expr % (self.QUOTE_TEMPLATE % t, self.QUOTE_TEMPLATE % key),
1224 'ALTER TABLE %s ADD %s %s;' %
1225 (self.QUOTE_TEMPLATE % t, self.QUOTE_TEMPLATE % key, tt),
1226 'UPDATE %s SET %s=%s;' %
1227 (self.QUOTE_TEMPLATE % t, self.QUOTE_TEMPLATE % key, self.QUOTE_TEMPLATE % key_tmp),
1228 drop_expr % (self.QUOTE_TEMPLATE % t, self.QUOTE_TEMPLATE % key_tmp)]
1229 metadata_change = True
1230 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
1231 sql_fields_current[key] = sql_fields[key]
1232 metadata_change = True
1233
1234 if query:
1235 self.log('timestamp: %s\n'
1236 % datetime.datetime.today().isoformat(), table)
1237 db['_lastsql'] = '\n'.join(query)
1238 for sub_query in query:
1239 self.log(sub_query + '\n', table)
1240 if fake_migrate:
1241 if db._adapter.commit_on_alter_table:
1242 self.save_dbt(table,sql_fields_current)
1243 self.log('faked!\n', table)
1244 else:
1245 self.execute(sub_query)
1246
1247
1248
1249
1250
1251 if db._adapter.commit_on_alter_table:
1252 db.commit()
1253 self.save_dbt(table,sql_fields_current)
1254 self.log('success!\n', table)
1255
1256 elif metadata_change:
1257 self.save_dbt(table,sql_fields_current)
1258
1259 if metadata_change and not (query and db._adapter.commit_on_alter_table):
1260 db.commit()
1261 self.save_dbt(table,sql_fields_current)
1262 self.log('success!\n', table)
1263
1264 - def save_dbt(self,table, sql_fields_current):
1265 tfile = self.file_open(table._dbt, 'w')
1266 pickle.dump(sql_fields_current, tfile)
1267 self.file_close(tfile)
1268
1269 - def LOWER(self, first):
1271
1272 - def UPPER(self, first):
1274
1275 - def COUNT(self, first, distinct=None):
1276 return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
1277 % self.expand(first)
1278
1280 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
1281
1282 - def EPOCH(self, first):
1284
1287
1290
1293
1296
1299
1300 - def NOT_NULL(self, default, field_type):
1301 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
1302
1304 expressions = [self.expand(first)]+[self.expand(e) for e in second]
1305 return 'COALESCE(%s)' % ','.join(expressions)
1306
1309
1310 - def RAW(self, first):
1312
1315
1317 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1318
1320 return 'PRIMARY KEY(%s)' % key
1321
1322 - def _drop(self, table, mode):
1323 return ['DROP TABLE %s;' % table.sqlsafe]
1324
1325 - def drop(self, table, mode=''):
1339
1340 - def _insert(self, table, fields):
1348
1350 return 'INSERT INTO %s DEFAULT VALUES;' % (table.sqlsafe)
1351
1352 - def insert(self, table, fields):
1353 query = self._insert(table,fields)
1354 try:
1355 self.execute(query)
1356 except Exception:
1357 e = sys.exc_info()[1]
1358 if hasattr(table,'_on_insert_error'):
1359 return table._on_insert_error(table,fields,e)
1360 raise e
1361 if hasattr(table, '_primarykey'):
1362 mydict = dict([(k[0].name, k[1]) for k in fields if k[0].name in table._primarykey])
1363 if mydict != {}:
1364 return mydict
1365 id = self.lastrowid(table)
1366 if hasattr(table, '_primarykey') and len(table._primarykey) == 1:
1367 id = {table._primarykey[0]: id}
1368 if not isinstance(id, (int, long)):
1369 return id
1370 rid = Reference(id)
1371 (rid._table, rid._record) = (table, None)
1372 return rid
1373
1375 return [self.insert(table,item) for item in items]
1376
1377 - def NOT(self, first):
1379
1380 - def AND(self, first, second):
1382
1383 - def OR(self, first, second):
1385
1386 - def BELONGS(self, first, second):
1387 if isinstance(second, str):
1388 return '(%s IN (%s))' % (self.expand(first), second[:-1])
1389 if not second:
1390 return '(1=0)'
1391 items = ','.join(self.expand(item, first.type) for item in second)
1392 return '(%s IN (%s))' % (self.expand(first), items)
1393
1394 - def REGEXP(self, first, second):
1395 "regular expression operator"
1396 raise NotImplementedError
1397
1398 - def LIKE(self, first, second):
1399 "case sensitive like operator"
1400 raise NotImplementedError
1401
1402 - def ILIKE(self, first, second):
1403 "case in-sensitive like operator"
1404 return '(%s LIKE %s)' % (self.expand(first),
1405 self.expand(second, 'string'))
1406
1408 return '(%s LIKE %s)' % (self.expand(first),
1409 self.expand(second+'%', 'string'))
1410
1412 return '(%s LIKE %s)' % (self.expand(first),
1413 self.expand('%'+second, 'string'))
1414
1415 - def CONTAINS(self,first,second,case_sensitive=False):
1416 if first.type in ('string','text', 'json'):
1417 if isinstance(second,Expression):
1418 second = Expression(None,self.CONCAT('%',Expression(
1419 None,self.REPLACE(second,('%','%%'))),'%'))
1420 else:
1421 second = '%'+str(second).replace('%','%%')+'%'
1422 elif first.type.startswith('list:'):
1423 if isinstance(second,Expression):
1424 second = Expression(None,self.CONCAT(
1425 '%|',Expression(None,self.REPLACE(
1426 Expression(None,self.REPLACE(
1427 second,('%','%%'))),('|','||'))),'|%'))
1428 else:
1429 second = '%|'+str(second).replace('%','%%')\
1430 .replace('|','||')+'|%'
1431 op = case_sensitive and self.LIKE or self.ILIKE
1432 return op(first,second)
1433
1434 - def EQ(self, first, second=None):
1439
1440 - def NE(self, first, second=None):
1445
1446 - def LT(self,first,second=None):
1447 if second is None:
1448 raise RuntimeError("Cannot compare %s < None" % first)
1449 return '(%s < %s)' % (self.expand(first),
1450 self.expand(second,first.type))
1451
1452 - def LE(self,first,second=None):
1453 if second is None:
1454 raise RuntimeError("Cannot compare %s <= None" % first)
1455 return '(%s <= %s)' % (self.expand(first),
1456 self.expand(second,first.type))
1457
1458 - def GT(self,first,second=None):
1459 if second is None:
1460 raise RuntimeError("Cannot compare %s > None" % first)
1461 return '(%s > %s)' % (self.expand(first),
1462 self.expand(second,first.type))
1463
1464 - def GE(self,first,second=None):
1465 if second is None:
1466 raise RuntimeError("Cannot compare %s >= None" % first)
1467 return '(%s >= %s)' % (self.expand(first),
1468 self.expand(second,first.type))
1469
1471 return ftype in ('integer','boolean','double','bigint') or \
1472 ftype.startswith('decimal')
1473
1474 - def REPLACE(self, first, (second, third)):
1475 return 'REPLACE(%s,%s,%s)' % (self.expand(first,'string'),
1476 self.expand(second,'string'),
1477 self.expand(third,'string'))
1478
1481
1482 - def ADD(self, first, second):
1488
1489 - def SUB(self, first, second):
1492
1493 - def MUL(self, first, second):
1496
1497 - def DIV(self, first, second):
1500
1501 - def MOD(self, first, second):
1504
1505 - def AS(self, first, second):
1507
1508 - def ON(self, first, second):
1513
1516
1517 - def COMMA(self, first, second):
1519
1520 - def CAST(self, first, second):
1521 return 'CAST(%s AS %s)' % (first, second)
1522
1523 - def expand(self, expression, field_type=None, colnames=False):
1524 if isinstance(expression, Field):
1525 et = expression.table
1526 if not colnames:
1527 table_rname = et._ot and self.QUOTE_TEMPLATE % et._tablename or et._rname or self.QUOTE_TEMPLATE % et._tablename
1528 out = '%s.%s' % (table_rname, expression._rname or (self.QUOTE_TEMPLATE % (expression.name)))
1529 else:
1530 out = '%s.%s' % (self.QUOTE_TEMPLATE % et._tablename, self.QUOTE_TEMPLATE % expression.name)
1531 if field_type == 'string' and not expression.type in (
1532 'string','text','json','password'):
1533 out = self.CAST(out, self.types['text'])
1534 return out
1535 elif isinstance(expression, (Expression, Query)):
1536 first = expression.first
1537 second = expression.second
1538 op = expression.op
1539 optional_args = expression.optional_args or {}
1540 if not second is None:
1541 out = op(first, second, **optional_args)
1542 elif not first is None:
1543 out = op(first,**optional_args)
1544 elif isinstance(op, str):
1545 if op.endswith(';'):
1546 op=op[:-1]
1547 out = '(%s)' % op
1548 else:
1549 out = op()
1550 return out
1551 elif field_type:
1552 return str(self.represent(expression,field_type))
1553 elif isinstance(expression,(list,tuple)):
1554 return ','.join(self.represent(item,field_type) \
1555 for item in expression)
1556 elif isinstance(expression, bool):
1557 return '1' if expression else '0'
1558 else:
1559 return str(expression)
1560
1565
1566
1567 - def alias(self, table, alias):
1568 """
1569 Given a table object, makes a new table object
1570 with alias name.
1571 """
1572 other = copy.copy(table)
1573 other['_ot'] = other._ot or other.sqlsafe
1574 other['ALL'] = SQLALL(other)
1575 other['_tablename'] = alias
1576 for fieldname in other.fields:
1577 other[fieldname] = copy.copy(other[fieldname])
1578 other[fieldname]._tablename = alias
1579 other[fieldname].tablename = alias
1580 other[fieldname].table = other
1581 table._db[alias] = other
1582 return other
1583
1585 return ['TRUNCATE TABLE %s %s;' % (table.sqlsafe, mode or '')]
1586
1588
1589 try:
1590 queries = table._db._adapter._truncate(table, mode)
1591 for query in queries:
1592 self.log(query + '\n', table)
1593 self.execute(query)
1594 self.log('success!\n', table)
1595 finally:
1596 pass
1597
1598 - def _update(self, tablename, query, fields):
1599 if query:
1600 if use_common_filters(query):
1601 query = self.common_filter(query, [tablename])
1602 sql_w = ' WHERE ' + self.expand(query)
1603 else:
1604 sql_w = ''
1605 sql_v = ','.join(['%s=%s' % (field.sqlsafe_name,
1606 self.expand(value, field.type)) \
1607 for (field, value) in fields])
1608 tablename = self.db[tablename].sqlsafe
1609 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1610
1611 - def update(self, tablename, query, fields):
1612 sql = self._update(tablename, query, fields)
1613 try:
1614 self.execute(sql)
1615 except Exception:
1616 e = sys.exc_info()[1]
1617 table = self.db[tablename]
1618 if hasattr(table,'_on_update_error'):
1619 return table._on_update_error(table,query,fields,e)
1620 raise e
1621 try:
1622 return self.cursor.rowcount
1623 except:
1624 return None
1625
1626 - def _delete(self, tablename, query):
1627 if query:
1628 if use_common_filters(query):
1629 query = self.common_filter(query, [tablename])
1630 sql_w = ' WHERE ' + self.expand(query)
1631 else:
1632 sql_w = ''
1633 tablename = self.db[tablename].sqlsafe
1634 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1635
1636 - def delete(self, tablename, query):
1637 sql = self._delete(tablename, query)
1638
1639 db = self.db
1640 table = db[tablename]
1641 if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by:
1642 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1643
1644 self.execute(sql)
1645 try:
1646 counter = self.cursor.rowcount
1647 except:
1648 counter = None
1649
1650 if self.dbengine in ('sqlite', 'spatialite') and counter:
1651 for field in table._referenced_by:
1652 if field.type=='reference '+table._tablename \
1653 and field.ondelete=='CASCADE':
1654 db(field.belongs(deleted)).delete()
1655
1656 return counter
1657
1659 tablenames = self.tables(query)
1660 if len(tablenames)==1:
1661 return tablenames[0]
1662 elif len(tablenames)<1:
1663 raise RuntimeError("No table selected")
1664 else:
1665 raise RuntimeError("Too many tables selected")
1666
1668 db = self.db
1669 new_fields = []
1670 append = new_fields.append
1671 for item in fields:
1672 if isinstance(item,SQLALL):
1673 new_fields += item._table
1674 elif isinstance(item,str):
1675 m = self.REGEX_TABLE_DOT_FIELD.match(item)
1676 if m:
1677 tablename,fieldname = m.groups()
1678 append(db[tablename][fieldname])
1679 else:
1680 append(Expression(db,lambda item=item:item))
1681 else:
1682 append(item)
1683
1684 if not new_fields:
1685 for table in tablenames:
1686 for field in db[table]:
1687 append(field)
1688 return new_fields
1689
1690 - def _select(self, query, fields, attributes):
1691 tables = self.tables
1692 for key in set(attributes.keys())-SELECT_ARGS:
1693 raise SyntaxError('invalid select attribute: %s' % key)
1694 args_get = attributes.get
1695 tablenames = tables(query)
1696 tablenames_for_common_filters = tablenames
1697 for field in fields:
1698 if isinstance(field, basestring):
1699 m = self.REGEX_TABLE_DOT_FIELD.match(field)
1700 if m:
1701 tn,fn = m.groups()
1702 field = self.db[tn][fn]
1703 for tablename in tables(field):
1704 if not tablename in tablenames:
1705 tablenames.append(tablename)
1706
1707 if len(tablenames) < 1:
1708 raise SyntaxError('Set: no tables selected')
1709 def colexpand(field):
1710 return self.expand(field, colnames=True)
1711 self._colnames = map(colexpand, fields)
1712 def geoexpand(field):
1713 if isinstance(field.type,str) and field.type.startswith('geometry') and isinstance(field, Field):
1714 field = field.st_astext()
1715 return self.expand(field)
1716 sql_f = ', '.join(map(geoexpand, fields))
1717 sql_o = ''
1718 sql_s = ''
1719 left = args_get('left', False)
1720 inner_join = args_get('join', False)
1721 distinct = args_get('distinct', False)
1722 groupby = args_get('groupby', False)
1723 orderby = args_get('orderby', False)
1724 having = args_get('having', False)
1725 limitby = args_get('limitby', False)
1726 orderby_on_limitby = args_get('orderby_on_limitby', True)
1727 for_update = args_get('for_update', False)
1728 if self.can_select_for_update is False and for_update is True:
1729 raise SyntaxError('invalid select attribute: for_update')
1730 if distinct is True:
1731 sql_s += 'DISTINCT'
1732 elif distinct:
1733 sql_s += 'DISTINCT ON (%s)' % distinct
1734 if inner_join:
1735 icommand = self.JOIN()
1736 if not isinstance(inner_join, (tuple, list)):
1737 inner_join = [inner_join]
1738 ijoint = [t._tablename for t in inner_join
1739 if not isinstance(t,Expression)]
1740 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1741 itables_to_merge={}
1742 [itables_to_merge.update(
1743 dict.fromkeys(tables(t))) for t in ijoinon]
1744 ijoinont = [t.first._tablename for t in ijoinon]
1745 [itables_to_merge.pop(t) for t in ijoinont
1746 if t in itables_to_merge]
1747 iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
1748 iexcluded = [t for t in tablenames
1749 if not t in iimportant_tablenames]
1750 if left:
1751 join = attributes['left']
1752 command = self.LEFT_JOIN()
1753 if not isinstance(join, (tuple, list)):
1754 join = [join]
1755 joint = [t._tablename for t in join
1756 if not isinstance(t, Expression)]
1757 joinon = [t for t in join if isinstance(t, Expression)]
1758
1759 tables_to_merge={}
1760 [tables_to_merge.update(
1761 dict.fromkeys(tables(t))) for t in joinon]
1762 joinont = [t.first._tablename for t in joinon]
1763 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1764 tablenames_for_common_filters = [t for t in tablenames
1765 if not t in joinont ]
1766 important_tablenames = joint + joinont + tables_to_merge.keys()
1767 excluded = [t for t in tablenames
1768 if not t in important_tablenames ]
1769 else:
1770 excluded = tablenames
1771
1772 if use_common_filters(query):
1773 query = self.common_filter(query,tablenames_for_common_filters)
1774 sql_w = ' WHERE ' + self.expand(query) if query else ''
1775
1776 if inner_join and not left:
1777 sql_t = ', '.join([self.table_alias(t) for t in iexcluded + \
1778 itables_to_merge.keys()])
1779 for t in ijoinon:
1780 sql_t += ' %s %s' % (icommand, t)
1781 elif not inner_join and left:
1782 sql_t = ', '.join([self.table_alias(t) for t in excluded + \
1783 tables_to_merge.keys()])
1784 if joint:
1785 sql_t += ' %s %s' % (command,
1786 ','.join([t for t in joint]))
1787 for t in joinon:
1788 sql_t += ' %s %s' % (command, t)
1789 elif inner_join and left:
1790 all_tables_in_query = set(important_tablenames + \
1791 iimportant_tablenames + \
1792 tablenames)
1793 tables_in_joinon = set(joinont + ijoinont)
1794 tables_not_in_joinon = \
1795 all_tables_in_query.difference(tables_in_joinon)
1796 sql_t = ','.join([self.table_alias(t) for t in tables_not_in_joinon])
1797 for t in ijoinon:
1798 sql_t += ' %s %s' % (icommand, t)
1799 if joint:
1800 sql_t += ' %s %s' % (command,
1801 ','.join([t for t in joint]))
1802 for t in joinon:
1803 sql_t += ' %s %s' % (command, t)
1804 else:
1805 sql_t = ', '.join(self.table_alias(t) for t in tablenames)
1806 if groupby:
1807 if isinstance(groupby, (list, tuple)):
1808 groupby = xorify(groupby)
1809 sql_o += ' GROUP BY %s' % self.expand(groupby)
1810 if having:
1811 sql_o += ' HAVING %s' % attributes['having']
1812 if orderby:
1813 if isinstance(orderby, (list, tuple)):
1814 orderby = xorify(orderby)
1815 if str(orderby) == '<random>':
1816 sql_o += ' ORDER BY %s' % self.RANDOM()
1817 else:
1818 sql_o += ' ORDER BY %s' % self.expand(orderby)
1819 if (limitby and not groupby and tablenames and orderby_on_limitby and not orderby):
1820 sql_o += ' ORDER BY %s' % ', '.join(
1821 [self.db[t].sqlsafe + '.' + self.db[t][x].sqlsafe_name for t in tablenames for x in (
1822 hasattr(self.db[t], '_primarykey') and self.db[t]._primarykey
1823 or ['_id']
1824 )
1825 ]
1826 )
1827
1828 sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1829 if for_update and self.can_select_for_update is True:
1830 sql = sql.rstrip(';') + ' FOR UPDATE;'
1831 return sql
1832
1833 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1834 if limitby:
1835 (lmin, lmax) = limitby
1836 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1837 return 'SELECT %s %s FROM %s%s%s;' % \
1838 (sql_s, sql_f, sql_t, sql_w, sql_o)
1839
1841 return self.cursor.fetchall()
1842
1844 args_get = attributes.get
1845 cache = args_get('cache',None)
1846 if not cache:
1847 self.execute(sql)
1848 rows = self._fetchall()
1849 else:
1850 (cache_model, time_expire) = cache
1851 key = self.uri + '/' + sql + '/rows'
1852 if len(key)>200: key = hashlib_md5(key).hexdigest()
1853 def _select_aux2():
1854 self.execute(sql)
1855 return self._fetchall()
1856 rows = cache_model(key,_select_aux2,time_expire)
1857 if isinstance(rows,tuple):
1858 rows = list(rows)
1859 limitby = args_get('limitby', None) or (0,)
1860 rows = self.rowslice(rows,limitby[0],None)
1861 processor = args_get('processor',self.parse)
1862 cacheable = args_get('cacheable',False)
1863 return processor(rows,fields,self._colnames,cacheable=cacheable)
1864
1865 - def select(self, query, fields, attributes):
1866 """
1867 Always returns a Rows object, possibly empty.
1868 """
1869 sql = self._select(query, fields, attributes)
1870 cache = attributes.get('cache', None)
1871 if cache and attributes.get('cacheable',False):
1872 del attributes['cache']
1873 (cache_model, time_expire) = cache
1874 key = self.uri + '/' + sql
1875 if len(key)>200: key = hashlib_md5(key).hexdigest()
1876 args = (sql,fields,attributes)
1877 return cache_model(
1878 key,
1879 lambda self=self,args=args:self._select_aux(*args),
1880 time_expire)
1881 else:
1882 return self._select_aux(sql,fields,attributes)
1883
1884 - def _count(self, query, distinct=None):
1885 tablenames = self.tables(query)
1886 if query:
1887 if use_common_filters(query):
1888 query = self.common_filter(query, tablenames)
1889 sql_w = ' WHERE ' + self.expand(query)
1890 else:
1891 sql_w = ''
1892 sql_t = ','.join(self.table_alias(t) for t in tablenames)
1893 if distinct:
1894 if isinstance(distinct,(list, tuple)):
1895 distinct = xorify(distinct)
1896 sql_d = self.expand(distinct)
1897 return 'SELECT count(DISTINCT %s) FROM %s%s;' % \
1898 (sql_d, sql_t, sql_w)
1899 return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
1900
1901 - def count(self, query, distinct=None):
1902 self.execute(self._count(query, distinct))
1903 return self.cursor.fetchone()[0]
1904
1916
1920
1924
1930
1933
1936
1939
1942
1945
1947 return '%s_%s__constraint' % (table,fieldname)
1948
1951
1952
1954 if not self.connection: raise ValueError(a[0])
1955 if not self.connection: return None
1956 command = a[0]
1957 if hasattr(self,'filter_sql_command'):
1958 command = self.filter_sql_command(command)
1959 if self.db._debug:
1960 LOGGER.debug('SQL: %s' % command)
1961 self.db._lastsql = command
1962 t0 = time.time()
1963 ret = self.cursor.execute(command, *a[1:], **b)
1964 self.db._timings.append((command,time.time()-t0))
1965 del self.db._timings[:-TIMINGSSIZE]
1966 return ret
1967
1970
1972 field_is_type = fieldtype.startswith
1973 if isinstance(obj, CALLABLETYPES):
1974 obj = obj()
1975 if isinstance(fieldtype, SQLCustomType):
1976 value = fieldtype.encoder(obj)
1977 if fieldtype.type in ('string','text', 'json'):
1978 return self.adapt(value)
1979 return value
1980 if isinstance(obj, (Expression, Field)):
1981 return str(obj)
1982 if field_is_type('list:'):
1983 if not obj:
1984 obj = []
1985 elif not isinstance(obj, (list, tuple)):
1986 obj = [obj]
1987 if field_is_type('list:string'):
1988 obj = map(str,obj)
1989 else:
1990 obj = map(int,[o for o in obj if o != ''])
1991
1992 if isinstance(obj, (list, tuple)) and (not fieldtype == "json"):
1993 obj = bar_encode(obj)
1994 if obj is None:
1995 return 'NULL'
1996 if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']:
1997 return 'NULL'
1998 r = self.represent_exceptions(obj, fieldtype)
1999 if not r is None:
2000 return r
2001 if fieldtype == 'boolean':
2002 if obj and not str(obj)[:1].upper() in '0F':
2003 return self.smart_adapt(self.TRUE)
2004 else:
2005 return self.smart_adapt(self.FALSE)
2006 if fieldtype == 'id' or fieldtype == 'integer':
2007 return str(long(obj))
2008 if field_is_type('decimal'):
2009 return str(obj)
2010 elif field_is_type('reference'):
2011
2012 referenced = fieldtype[9:].strip()
2013 if referenced in self.db.tables:
2014 return str(long(obj))
2015 p = referenced.partition('.')
2016 if p[2] != '':
2017 try:
2018 ftype = self.db[p[0]][p[2]].type
2019 return self.represent(obj, ftype)
2020 except (ValueError, KeyError):
2021 return repr(obj)
2022 elif isinstance(obj, (Row, Reference)):
2023 return str(obj['id'])
2024 return str(long(obj))
2025 elif fieldtype == 'double':
2026 return repr(float(obj))
2027 if isinstance(obj, unicode):
2028 obj = obj.encode(self.db_codec)
2029 if fieldtype == 'blob':
2030 obj = base64.b64encode(str(obj))
2031 elif fieldtype == 'date':
2032 if isinstance(obj, (datetime.date, datetime.datetime)):
2033 obj = obj.isoformat()[:10]
2034 else:
2035 obj = str(obj)
2036 elif fieldtype == 'datetime':
2037 if isinstance(obj, datetime.datetime):
2038 obj = obj.isoformat(self.T_SEP)[:19]
2039 elif isinstance(obj, datetime.date):
2040 obj = obj.isoformat()[:10]+self.T_SEP+'00:00:00'
2041 else:
2042 obj = str(obj)
2043 elif fieldtype == 'time':
2044 if isinstance(obj, datetime.time):
2045 obj = obj.isoformat()[:10]
2046 else:
2047 obj = str(obj)
2048 elif fieldtype == 'json':
2049 if not self.native_json:
2050 if have_serializers:
2051 obj = serializers.json(obj)
2052 elif simplejson:
2053 obj = simplejson.dumps(obj)
2054 else:
2055 raise RuntimeError("missing simplejson")
2056 if not isinstance(obj,bytes):
2057 obj = bytes(obj)
2058 try:
2059 obj.decode(self.db_codec)
2060 except:
2061 obj = obj.decode('latin1').encode(self.db_codec)
2062 return self.adapt(obj)
2063
2066
2069
2070 - def rowslice(self, rows, minimum=0, maximum=None):
2071 """
2072 By default this function does nothing;
2073 overload when db does not do slicing.
2074 """
2075 return rows
2076
2077 - def parse_value(self, value, field_type, blob_decode=True):
2078 if field_type != 'blob' and isinstance(value, str):
2079 try:
2080 value = value.decode(self.db._db_codec)
2081 except Exception:
2082 pass
2083 if isinstance(value, unicode):
2084 value = value.encode('utf-8')
2085 if isinstance(field_type, SQLCustomType):
2086 value = field_type.decoder(value)
2087 if not isinstance(field_type, str) or value is None:
2088 return value
2089 elif field_type in ('string', 'text', 'password', 'upload', 'dict'):
2090 return value
2091 elif field_type.startswith('geo'):
2092 return value
2093 elif field_type == 'blob' and not blob_decode:
2094 return value
2095 else:
2096 key = REGEX_TYPE.match(field_type).group(0)
2097 return self.parsemap[key](value,field_type)
2098
2100 referee = field_type[10:].strip()
2101 if not '.' in referee:
2102 value = Reference(value)
2103 value._table, value._record = self.db[referee], None
2104 return value
2105
2107 return value == self.TRUE or str(value)[:1].lower() == 't'
2108
2110 if isinstance(value, datetime.datetime):
2111 return value.date()
2112 if not isinstance(value, (datetime.date,datetime.datetime)):
2113 (y, m, d) = map(int, str(value)[:10].strip().split('-'))
2114 value = datetime.date(y, m, d)
2115 return value
2116
2118 if not isinstance(value, datetime.time):
2119 time_items = map(int,str(value)[:8].strip().split(':')[:3])
2120 if len(time_items) == 3:
2121 (h, mi, s) = time_items
2122 else:
2123 (h, mi, s) = time_items + [0]
2124 value = datetime.time(h, mi, s)
2125 return value
2126
2128 if not isinstance(value, datetime.datetime):
2129 value = str(value)
2130 date_part,time_part,timezone = value[:10],value[11:19],value[19:]
2131 if '+' in timezone:
2132 ms,tz = timezone.split('+')
2133 h,m = tz.split(':')
2134 dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
2135 elif '-' in timezone:
2136 ms,tz = timezone.split('-')
2137 h,m = tz.split(':')
2138 dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
2139 else:
2140 dt = None
2141 (y, m, d) = map(int,date_part.split('-'))
2142 time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
2143 while len(time_parts)<3: time_parts.append(0)
2144 time_items = map(int,time_parts)
2145 (h, mi, s) = time_items
2146 value = datetime.datetime(y, m, d, h, mi, s)
2147 if dt:
2148 value = value + dt
2149 return value
2150
2152 return base64.b64decode(str(value))
2153
2155 decimals = int(field_type[8:-1].split(',')[-1])
2156 if self.dbengine in ('sqlite', 'spatialite'):
2157 value = ('%.' + str(decimals) + 'f') % value
2158 if not isinstance(value, decimal.Decimal):
2159 value = decimal.Decimal(str(value))
2160 return value
2161
2166
2171
2176
2177 - def parse_id(self, value, field_type):
2179
2182
2185
2187 if not self.native_json:
2188 if not isinstance(value, basestring):
2189 raise RuntimeError('json data not a string')
2190 if isinstance(value, unicode):
2191 value = value.encode('utf-8')
2192 if have_serializers:
2193 value = serializers.loads_json(value)
2194 elif simplejson:
2195 value = simplejson.loads(value)
2196 else:
2197 raise RuntimeError("missing simplejson")
2198 return value
2199
2201 self.parsemap = {
2202 'id':self.parse_id,
2203 'integer':self.parse_integer,
2204 'bigint':self.parse_integer,
2205 'float':self.parse_double,
2206 'double':self.parse_double,
2207 'reference':self.parse_reference,
2208 'boolean':self.parse_boolean,
2209 'date':self.parse_date,
2210 'time':self.parse_time,
2211 'datetime':self.parse_datetime,
2212 'blob':self.parse_blob,
2213 'decimal':self.parse_decimal,
2214 'json':self.parse_json,
2215 'list:integer':self.parse_list_integers,
2216 'list:reference':self.parse_list_references,
2217 'list:string':self.parse_list_strings,
2218 }
2219
2220 - def parse(self, rows, fields, colnames, blob_decode=True,
2221 cacheable = False):
2222 db = self.db
2223 virtualtables = []
2224 new_rows = []
2225 tmps = []
2226 for colname in colnames:
2227 col_m = self.REGEX_TABLE_DOT_FIELD.match(colname)
2228 if not col_m:
2229 tmps.append(None)
2230 else:
2231 tablename, fieldname = col_m.groups()
2232 table = db[tablename]
2233 field = table[fieldname]
2234 ft = field.type
2235 tmps.append((tablename, fieldname, table, field, ft))
2236 for (i,row) in enumerate(rows):
2237 new_row = Row()
2238 for (j,colname) in enumerate(colnames):
2239 value = row[j]
2240 tmp = tmps[j]
2241 if tmp:
2242 (tablename,fieldname,table,field,ft) = tmp
2243 colset = new_row.get(tablename, None)
2244 if colset is None:
2245 colset = new_row[tablename] = Row()
2246 if tablename not in virtualtables:
2247 virtualtables.append(tablename)
2248 value = self.parse_value(value,ft,blob_decode)
2249 if field.filter_out:
2250 value = field.filter_out(value)
2251 colset[fieldname] = value
2252
2253
2254 if ft=='id' and fieldname!='id' and \
2255 not 'id' in table.fields:
2256 colset['id'] = value
2257
2258 if ft == 'id' and not cacheable:
2259
2260
2261
2262 if isinstance(self, GoogleDatastoreAdapter):
2263 id = value.key.id() if self.use_ndb else value.key().id_or_name()
2264 colset[fieldname] = id
2265 colset.gae_item = value
2266 else:
2267 id = value
2268 colset.update_record = RecordUpdater(colset,table,id)
2269 colset.delete_record = RecordDeleter(table,id)
2270 if table._db._lazy_tables:
2271 colset['__get_lazy_reference__'] = LazyReferenceGetter(table, id)
2272 for rfield in table._referenced_by:
2273 referee_link = db._referee_name and \
2274 db._referee_name % dict(
2275 table=rfield.tablename,field=rfield.name)
2276 if referee_link and not referee_link in colset:
2277 colset[referee_link] = LazySet(rfield,id)
2278 else:
2279 if not '_extra' in new_row:
2280 new_row['_extra'] = Row()
2281 new_row['_extra'][colname] = \
2282 self.parse_value(value,
2283 fields[j].type,blob_decode)
2284 new_column_name = \
2285 REGEX_SELECT_AS_PARSER.search(colname)
2286 if not new_column_name is None:
2287 column_name = new_column_name.groups(0)
2288 setattr(new_row,column_name[0],value)
2289 new_rows.append(new_row)
2290 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
2291
2292
2293 for tablename in virtualtables:
2294 table = db[tablename]
2295 fields_virtual = [(f,v) for (f,v) in table.iteritems()
2296 if isinstance(v,FieldVirtual)]
2297 fields_lazy = [(f,v) for (f,v) in table.iteritems()
2298 if isinstance(v,FieldMethod)]
2299 if fields_virtual or fields_lazy:
2300 for row in rowsobj.records:
2301 box = row[tablename]
2302 for f,v in fields_virtual:
2303 try:
2304 box[f] = v.f(row)
2305 except AttributeError:
2306 pass
2307 for f,v in fields_lazy:
2308 try:
2309 box[f] = (v.handler or VirtualCommand)(v.f,row)
2310 except AttributeError:
2311 pass
2312
2313
2314 for item in table.virtualfields:
2315 try:
2316 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
2317 except (KeyError, AttributeError):
2318
2319 pass
2320 return rowsobj
2321
2323 tenant_fieldname = self.db._request_tenant
2324
2325 for tablename in tablenames:
2326 table = self.db[tablename]
2327
2328
2329 if table._common_filter != None:
2330 query = query & table._common_filter(query)
2331
2332
2333 if tenant_fieldname in table:
2334 default = table[tenant_fieldname].default
2335 if not default is None:
2336 newquery = table[tenant_fieldname] == default
2337 if query is None:
2338 query = newquery
2339 else:
2340 query = query & newquery
2341 return query
2342
2343 - def CASE(self,query,t,f):
2344 def represent(x):
2345 types = {type(True):'boolean',type(0):'integer',type(1.0):'double'}
2346 if x is None: return 'NULL'
2347 elif isinstance(x,Expression): return str(x)
2348 else: return self.represent(x,types.get(type(x),'string'))
2349 return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \
2350 (self.expand(query),represent(t),represent(f)))
2351
2356
2359
2365 drivers = ('sqlite2','sqlite3')
2366
2367 can_select_for_update = None
2368
2370 return "web2py_extract('%s',%s)" % (what, self.expand(field))
2371
2372 @staticmethod
2374 table = {
2375 'year': (0, 4),
2376 'month': (5, 7),
2377 'day': (8, 10),
2378 'hour': (11, 13),
2379 'minute': (14, 16),
2380 'second': (17, 19),
2381 }
2382 try:
2383 if lookup != 'epoch':
2384 (i, j) = table[lookup]
2385 return int(s[i:j])
2386 else:
2387 return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
2388 except:
2389 return None
2390
2391 @staticmethod
2393 return re.compile(expression).search(item) is not None
2394
2395 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2396 credential_decoder=IDENTITY, driver_args={},
2397 adapter_args={}, do_connect=True, after_connection=None):
2398 self.db = db
2399 self.dbengine = "sqlite"
2400 self.uri = uri
2401 self.adapter_args = adapter_args
2402 if do_connect: self.find_driver(adapter_args)
2403 self.pool_size = 0
2404 self.folder = folder
2405 self.db_codec = db_codec
2406 self._after_connection = after_connection
2407 self.find_or_make_work_folder()
2408 path_encoding = sys.getfilesystemencoding() \
2409 or locale.getdefaultlocale()[1] or 'utf8'
2410 if uri.startswith('sqlite:memory'):
2411 self.dbpath = ':memory:'
2412 else:
2413 self.dbpath = uri.split('://',1)[1]
2414 if self.dbpath[0] != '/':
2415 if PYTHON_VERSION[0] == 2:
2416 self.dbpath = pjoin(
2417 self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
2418 else:
2419 self.dbpath = pjoin(self.folder, self.dbpath)
2420 if not 'check_same_thread' in driver_args:
2421 driver_args['check_same_thread'] = False
2422 if not 'detect_types' in driver_args and do_connect:
2423 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2424 def connector(dbpath=self.dbpath, driver_args=driver_args):
2425 return self.driver.Connection(dbpath, **driver_args)
2426 self.connector = connector
2427 if do_connect: self.reconnect()
2428
2437
2439 tablename = table._tablename
2440 return ['DELETE FROM %s;' % tablename,
2441 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
2442
2445
2446 - def REGEXP(self,first,second):
2447 return '(%s REGEXP %s)' % (self.expand(first),
2448 self.expand(second,'string'))
2449
2450 - def select(self, query, fields, attributes):
2451 """
2452 Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
2453 Note that the entire database, rather than one record, is locked
2454 (it will be locked eventually anyway by the following UPDATE).
2455 """
2456 if attributes.get('for_update', False) and not 'cache' in attributes:
2457 self.execute('BEGIN IMMEDIATE TRANSACTION;')
2458 return super(SQLiteAdapter, self).select(query, fields, attributes)
2459
2461 drivers = ('sqlite3','sqlite2')
2462
2463 types = copy.copy(BaseAdapter.types)
2464 types.update(geometry='GEOMETRY')
2465
2466 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
2467 credential_decoder=IDENTITY, driver_args={},
2468 adapter_args={}, do_connect=True, srid=4326, after_connection=None):
2469 self.db = db
2470 self.dbengine = "spatialite"
2471 self.uri = uri
2472 if do_connect: self.find_driver(adapter_args)
2473 self.pool_size = 0
2474 self.folder = folder
2475 self.db_codec = db_codec
2476 self._after_connection = after_connection
2477 self.find_or_make_work_folder()
2478 self.srid = srid
2479 path_encoding = sys.getfilesystemencoding() \
2480 or locale.getdefaultlocale()[1] or 'utf8'
2481 if uri.startswith('spatialite:memory'):
2482 self.dbpath = ':memory:'
2483 else:
2484 self.dbpath = uri.split('://',1)[1]
2485 if self.dbpath[0] != '/':
2486 self.dbpath = pjoin(
2487 self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
2488 if not 'check_same_thread' in driver_args:
2489 driver_args['check_same_thread'] = False
2490 if not 'detect_types' in driver_args and do_connect:
2491 driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
2492 def connector(dbpath=self.dbpath, driver_args=driver_args):
2493 return self.driver.Connection(dbpath, **driver_args)
2494 self.connector = connector
2495 if do_connect: self.reconnect()
2496
2509
2510
2511
2513 return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
2514 second['precision'], second['options'])
2515
2516 - def ST_ASTEXT(self, first):
2517 return 'AsText(%s)' %(self.expand(first))
2518
2522
2526
2530
2534
2538
2540 return 'Simplify(%s,%s)' %(self.expand(first),
2541 self.expand(second, 'double'))
2542
2546
2550
2552 field_is_type = fieldtype.startswith
2553 if field_is_type('geo'):
2554 srid = 4326
2555 geotype, parms = fieldtype[:-1].split('(')
2556 parms = parms.split(',')
2557 if len(parms) >= 2:
2558 schema, srid = parms[:2]
2559
2560 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
2561
2562
2563
2564
2565 return value
2566 return BaseAdapter.represent(self, obj, fieldtype)
2567
2570 drivers = ('zxJDBC_sqlite',)
2571
2572 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
2573 credential_decoder=IDENTITY, driver_args={},
2574 adapter_args={}, do_connect=True, after_connection=None):
2597 self.connector = connector
2598 if do_connect: self.reconnect()
2599
2604
2607
2610 drivers = ('MySQLdb','pymysql', 'mysqlconnector')
2611
2612 commit_on_alter_table = True
2613 support_distributed_transaction = True
2614 types = {
2615 'boolean': 'CHAR(1)',
2616 'string': 'VARCHAR(%(length)s)',
2617 'text': 'LONGTEXT',
2618 'json': 'LONGTEXT',
2619 'password': 'VARCHAR(%(length)s)',
2620 'blob': 'LONGBLOB',
2621 'upload': 'VARCHAR(%(length)s)',
2622 'integer': 'INT',
2623 'bigint': 'BIGINT',
2624 'float': 'FLOAT',
2625 'double': 'DOUBLE',
2626 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2627 'date': 'DATE',
2628 'time': 'TIME',
2629 'datetime': 'DATETIME',
2630 'id': 'INT AUTO_INCREMENT NOT NULL',
2631 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2632 'list:integer': 'LONGTEXT',
2633 'list:string': 'LONGTEXT',
2634 'list:reference': 'LONGTEXT',
2635 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
2636 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2637 'reference FK': ', CONSTRAINT `FK_%(constraint_name)s` FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2638 }
2639
2640 QUOTE_TEMPLATE = "`%s`"
2641
2644
2647
2649 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
2650 parameters[0], parameters[1])
2651
2652 - def EPOCH(self, first):
2654
2656 return 'CONCAT(%s)' % ','.join(self.expand(x,'string') for x in items)
2657
2658 - def REGEXP(self,first,second):
2659 return '(%s REGEXP %s)' % (self.expand(first),
2660 self.expand(second,'string'))
2661
2662 - def _drop(self,table,mode):
2663
2664 table_rname = table.sqlsafe
2665 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table_rname,
2666 'SET FOREIGN_KEY_CHECKS=1;']
2667
2669 return 'INSERT INTO %s VALUES (DEFAULT);' % (table.sqlsafe)
2670
2673
2677
2680
2683
2684 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
2685
2686 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2687 credential_decoder=IDENTITY, driver_args={},
2688 adapter_args={}, do_connect=True, after_connection=None):
2689 self.db = db
2690 self.dbengine = "mysql"
2691 self.uri = uri
2692 if do_connect: self.find_driver(adapter_args,uri)
2693 self.pool_size = pool_size
2694 self.folder = folder
2695 self.db_codec = db_codec
2696 self._after_connection = after_connection
2697 self.find_or_make_work_folder()
2698 ruri = uri.split('://',1)[1]
2699 m = self.REGEX_URI.match(ruri)
2700 if not m:
2701 raise SyntaxError(
2702 "Invalid URI string in DAL: %s" % self.uri)
2703 user = credential_decoder(m.group('user'))
2704 if not user:
2705 raise SyntaxError('User required')
2706 password = credential_decoder(m.group('password'))
2707 if not password:
2708 password = ''
2709 host = m.group('host')
2710 if not host:
2711 raise SyntaxError('Host name required')
2712 db = m.group('db')
2713 if not db:
2714 raise SyntaxError('Database name required')
2715 port = int(m.group('port') or '3306')
2716 charset = m.group('charset') or 'utf8'
2717 driver_args.update(db=db,
2718 user=credential_decoder(user),
2719 passwd=credential_decoder(password),
2720 host=host,
2721 port=port,
2722 charset=charset)
2723
2724
2725 def connector(driver_args=driver_args):
2726 return self.driver.connect(**driver_args)
2727 self.connector = connector
2728 if do_connect: self.reconnect()
2729
2731 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2732 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2733
2735 self.execute('select last_insert_id();')
2736 return int(self.cursor.fetchone()[0])
2737
2738
2739 -class PostgreSQLAdapter(BaseAdapter):
2740 drivers = ('psycopg2','pg8000')
2741
2742 QUOTE_TEMPLATE = '"%s"'
2743
2744 support_distributed_transaction = True
2745 types = {
2746 'boolean': 'CHAR(1)',
2747 'string': 'VARCHAR(%(length)s)',
2748 'text': 'TEXT',
2749 'json': 'TEXT',
2750 'password': 'VARCHAR(%(length)s)',
2751 'blob': 'BYTEA',
2752 'upload': 'VARCHAR(%(length)s)',
2753 'integer': 'INTEGER',
2754 'bigint': 'BIGINT',
2755 'float': 'FLOAT',
2756 'double': 'FLOAT8',
2757 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2758 'date': 'DATE',
2759 'time': 'TIME',
2760 'datetime': 'TIMESTAMP',
2761 'id': 'SERIAL PRIMARY KEY',
2762 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2763 'list:integer': 'TEXT',
2764 'list:string': 'TEXT',
2765 'list:reference': 'TEXT',
2766 'geometry': 'GEOMETRY',
2767 'geography': 'GEOGRAPHY',
2768 'big-id': 'BIGSERIAL PRIMARY KEY',
2769 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2770 'reference FK': ', CONSTRAINT "FK_%(constraint_name)s" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2771 'reference TFK': ' CONSTRAINT "FK_%(foreign_table)s_PK" FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2772
2773 }
2774
2775
2776 - def varquote(self,name):
2777 return varquote_aux(name,'"%s"')
2778
2779 - def adapt(self,obj):
2780 if self.driver_name == 'psycopg2':
2781 return psycopg2_adapt(obj).getquoted()
2782 elif self.driver_name == 'pg8000':
2783 return "'%s'" % str(obj).replace("%","%%").replace("'","''")
2784 else:
2785 return "'%s'" % str(obj).replace("'","''")
2786
2787 - def sequence_name(self,table):
2788 return self.QUOTE_TEMPLATE % (table + '_id_seq')
2789
2792
2793 - def ADD(self, first, second):
2794 t = first.type
2795 if t in ('text','string','password', 'json', 'upload','blob'):
2796 return '(%s || %s)' % (self.expand(first), self.expand(second, t))
2797 else:
2798 return '(%s + %s)' % (self.expand(first), self.expand(second, t))
2799
2802
2803 - def prepare(self,key):
2804 self.execute("PREPARE TRANSACTION '%s';" % key)
2805
2806 - def commit_prepared(self,key):
2807 self.execute("COMMIT PREPARED '%s';" % key)
2808
2809 - def rollback_prepared(self,key):
2810 self.execute("ROLLBACK PREPARED '%s';" % key)
2811
2812 - def create_sequence_and_triggers(self, query, table, **args):
2813
2814
2815
2816
2817 self.execute(query)
2818
2819 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
2820
2821 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2822 credential_decoder=IDENTITY, driver_args={},
2823 adapter_args={}, do_connect=True, srid=4326,
2824 after_connection=None):
2825 self.db = db
2826 self.dbengine = "postgres"
2827 self.uri = uri
2828 if do_connect: self.find_driver(adapter_args,uri)
2829 self.pool_size = pool_size
2830 self.folder = folder
2831 self.db_codec = db_codec
2832 self._after_connection = after_connection
2833 self.srid = srid
2834 self.find_or_make_work_folder()
2835 ruri = uri.split('://',1)[1]
2836 m = self.REGEX_URI.match(ruri)
2837 if not m:
2838 raise SyntaxError("Invalid URI string in DAL")
2839 user = credential_decoder(m.group('user'))
2840 if not user:
2841 raise SyntaxError('User required')
2842 password = credential_decoder(m.group('password'))
2843 if not password:
2844 password = ''
2845 host = m.group('host')
2846 if not host:
2847 raise SyntaxError('Host name required')
2848 db = m.group('db')
2849 if not db:
2850 raise SyntaxError('Database name required')
2851 port = m.group('port') or '5432'
2852 sslmode = m.group('sslmode')
2853 if sslmode:
2854 msg = ("dbname='%s' user='%s' host='%s' "
2855 "port=%s password='%s' sslmode='%s'") \
2856 % (db, user, host, port, password, sslmode)
2857 else:
2858 msg = ("dbname='%s' user='%s' host='%s' "
2859 "port=%s password='%s'") \
2860 % (db, user, host, port, password)
2861
2862 if self.driver:
2863 self.__version__ = "%s %s" % (self.driver.__name__,
2864 self.driver.__version__)
2865 else:
2866 self.__version__ = None
2867 def connector(msg=msg,driver_args=driver_args):
2868 return self.driver.connect(msg,**driver_args)
2869 self.connector = connector
2870 if do_connect: self.reconnect()
2871
2872 - def after_connection(self):
2873 self.connection.set_client_encoding('UTF8')
2874 self.execute("SET standard_conforming_strings=on;")
2875 self.try_json()
2876
2877 - def lastrowid(self,table = None):
2878 self.execute("select lastval()")
2879 return int(self.cursor.fetchone()[0])
2880
2881 - def try_json(self):
2882
2883
2884 if self.driver_name == "pg8000":
2885 supports_json = self.connection.server_version >= "9.2.0"
2886 elif (self.driver_name == "psycopg2") and \
2887 (self.driver.__version__ >= "2.0.12"):
2888 supports_json = self.connection.server_version >= 90200
2889 elif self.driver_name == "zxJDBC":
2890 supports_json = self.connection.dbversion >= "9.2.0"
2891 else: supports_json = None
2892 if supports_json:
2893 self.types["json"] = "JSON"
2894 self.native_json = True
2895 else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
2896
2897 - def LIKE(self,first,second):
2898 args = (self.expand(first), self.expand(second,'string'))
2899 if not first.type in ('string', 'text', 'json'):
2900 return '(%s LIKE %s)' % (
2901 self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
2902 else:
2903 return '(%s LIKE %s)' % args
2904
2905 - def ILIKE(self,first,second):
2906 args = (self.expand(first), self.expand(second,'string'))
2907 if not first.type in ('string', 'text', 'json'):
2908 return '(%s LIKE %s)' % (
2909 self.CAST(args[0], 'CHAR(%s)' % first.length), args[1])
2910 else:
2911 return '(%s ILIKE %s)' % args
2912
2913 - def REGEXP(self,first,second):
2914 return '(%s ~ %s)' % (self.expand(first),
2915 self.expand(second,'string'))
2916
2917 - def STARTSWITH(self,first,second):
2918 return '(%s ILIKE %s)' % (self.expand(first),
2919 self.expand(second+'%','string'))
2920
2921 - def ENDSWITH(self,first,second):
2922 return '(%s ILIKE %s)' % (self.expand(first),
2923 self.expand('%'+second,'string'))
2924
2925
2926
2927 - def ST_ASGEOJSON(self, first, second):
2928 """
2929 http://postgis.org/docs/ST_AsGeoJSON.html
2930 """
2931 return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
2932 self.expand(first), second['precision'], second['options'])
2933
2934 - def ST_ASTEXT(self, first):
2935 """
2936 http://postgis.org/docs/ST_AsText.html
2937 """
2938 return 'ST_AsText(%s)' %(self.expand(first))
2939
2940 - def ST_X(self, first):
2941 """
2942 http://postgis.org/docs/ST_X.html
2943 """
2944 return 'ST_X(%s)' %(self.expand(first))
2945
2946 - def ST_Y(self, first):
2947 """
2948 http://postgis.org/docs/ST_Y.html
2949 """
2950 return 'ST_Y(%s)' %(self.expand(first))
2951
2952 - def ST_CONTAINS(self, first, second):
2953 """
2954 http://postgis.org/docs/ST_Contains.html
2955 """
2956 return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2957
2958 - def ST_DISTANCE(self, first, second):
2959 """
2960 http://postgis.org/docs/ST_Distance.html
2961 """
2962 return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2963
2964 - def ST_EQUALS(self, first, second):
2965 """
2966 http://postgis.org/docs/ST_Equals.html
2967 """
2968 return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2969
2970 - def ST_INTERSECTS(self, first, second):
2971 """
2972 http://postgis.org/docs/ST_Intersects.html
2973 """
2974 return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2975
2976 - def ST_OVERLAPS(self, first, second):
2977 """
2978 http://postgis.org/docs/ST_Overlaps.html
2979 """
2980 return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2981
2982 - def ST_SIMPLIFY(self, first, second):
2983 """
2984 http://postgis.org/docs/ST_Simplify.html
2985 """
2986 return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
2987
2988 - def ST_TOUCHES(self, first, second):
2989 """
2990 http://postgis.org/docs/ST_Touches.html
2991 """
2992 return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2993
2994 - def ST_WITHIN(self, first, second):
2995 """
2996 http://postgis.org/docs/ST_Within.html
2997 """
2998 return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
2999
3000 - def ST_DWITHIN(self, first, (second, third)):
3001 """
3002 http://postgis.org/docs/ST_DWithin.html
3003 """
3004 return 'ST_DWithin(%s,%s,%s)' %(self.expand(first),
3005 self.expand(second, first.type),
3006 self.expand(third, 'double'))
3007
3008 - def represent(self, obj, fieldtype):
3009 field_is_type = fieldtype.startswith
3010 if field_is_type('geo'):
3011 srid = 4326
3012 geotype, parms = fieldtype[:-1].split('(')
3013 parms = parms.split(',')
3014 if len(parms) >= 2:
3015 schema, srid = parms[:2]
3016 if field_is_type('geometry'):
3017 value = "ST_GeomFromText('%s',%s)" %(obj, srid)
3018 elif field_is_type('geography'):
3019 value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
3020
3021
3022 return value
3023 return BaseAdapter.represent(self, obj, fieldtype)
3024
3025 - def _drop(self, table, mode='restrict'):
3026 if mode not in ['restrict', 'cascade', '']:
3027 raise ValueError('Invalid mode: %s' % mode)
3028 return ['DROP TABLE ' + table.sqlsafe + ' ' + str(mode) + ';']
3029
3030 -class NewPostgreSQLAdapter(PostgreSQLAdapter):
3031 drivers = ('psycopg2','pg8000')
3032
3033 types = {
3034 'boolean': 'CHAR(1)',
3035 'string': 'VARCHAR(%(length)s)',
3036 'text': 'TEXT',
3037 'json': 'TEXT',
3038 'password': 'VARCHAR(%(length)s)',
3039 'blob': 'BYTEA',
3040 'upload': 'VARCHAR(%(length)s)',
3041 'integer': 'INTEGER',
3042 'bigint': 'BIGINT',
3043 'float': 'FLOAT',
3044 'double': 'FLOAT8',
3045 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3046 'date': 'DATE',
3047 'time': 'TIME',
3048 'datetime': 'TIMESTAMP',
3049 'id': 'SERIAL PRIMARY KEY',
3050 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3051 'list:integer': 'BIGINT[]',
3052 'list:string': 'TEXT[]',
3053 'list:reference': 'BIGINT[]',
3054 'geometry': 'GEOMETRY',
3055 'geography': 'GEOGRAPHY',
3056 'big-id': 'BIGSERIAL PRIMARY KEY',
3057 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3058 }
3059
3060 - def parse_list_integers(self, value, field_type):
3062
3063 - def parse_list_references(self, value, field_type):
3064 return [self.parse_reference(r, field_type[5:]) for r in value]
3065
3066 - def parse_list_strings(self, value, field_type):
3068
3069 - def represent(self, obj, fieldtype):
3070 field_is_type = fieldtype.startswith
3071 if field_is_type('list:'):
3072 if not obj:
3073 obj = []
3074 elif not isinstance(obj, (list, tuple)):
3075 obj = [obj]
3076 if field_is_type('list:string'):
3077 obj = map(str,obj)
3078 else:
3079 obj = map(int,obj)
3080 return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
3081 return BaseAdapter.represent(self, obj, fieldtype)
3082
3083
3084 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
3085 drivers = ('zxJDBC',)
3086
3087 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
3088
3089 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3090 credential_decoder=IDENTITY, driver_args={},
3091 adapter_args={}, do_connect=True, after_connection=None ):
3092 self.db = db
3093 self.dbengine = "postgres"
3094 self.uri = uri
3095 if do_connect: self.find_driver(adapter_args,uri)
3096 self.pool_size = pool_size
3097 self.folder = folder
3098 self.db_codec = db_codec
3099 self._after_connection = after_connection
3100 self.find_or_make_work_folder()
3101 ruri = uri.split('://',1)[1]
3102 m = self.REGEX_URI.match(ruri)
3103 if not m:
3104 raise SyntaxError("Invalid URI string in DAL")
3105 user = credential_decoder(m.group('user'))
3106 if not user:
3107 raise SyntaxError('User required')
3108 password = credential_decoder(m.group('password'))
3109 if not password:
3110 password = ''
3111 host = m.group('host')
3112 if not host:
3113 raise SyntaxError('Host name required')
3114 db = m.group('db')
3115 if not db:
3116 raise SyntaxError('Database name required')
3117 port = m.group('port') or '5432'
3118 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
3119 def connector(msg=msg,driver_args=driver_args):
3120 return self.driver.connect(*msg,**driver_args)
3121 self.connector = connector
3122 if do_connect: self.reconnect()
3123
3124 - def after_connection(self):
3125 self.connection.set_client_encoding('UTF8')
3126 self.execute('BEGIN;')
3127 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
3128 self.try_json()
3129
3132 drivers = ('cx_Oracle',)
3133
3134 commit_on_alter_table = False
3135 types = {
3136 'boolean': 'CHAR(1)',
3137 'string': 'VARCHAR2(%(length)s)',
3138 'text': 'CLOB',
3139 'json': 'CLOB',
3140 'password': 'VARCHAR2(%(length)s)',
3141 'blob': 'CLOB',
3142 'upload': 'VARCHAR2(%(length)s)',
3143 'integer': 'INT',
3144 'bigint': 'NUMBER',
3145 'float': 'FLOAT',
3146 'double': 'BINARY_DOUBLE',
3147 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3148 'date': 'DATE',
3149 'time': 'CHAR(8)',
3150 'datetime': 'DATE',
3151 'id': 'NUMBER PRIMARY KEY',
3152 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3153 'list:integer': 'CLOB',
3154 'list:string': 'CLOB',
3155 'list:reference': 'CLOB',
3156 'big-id': 'NUMBER PRIMARY KEY',
3157 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3158 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3159 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3160 }
3161
3162
3164 return '%s_trigger' % tablename
3165
3167 return 'LEFT OUTER JOIN'
3168
3170 return 'dbms_random.value'
3171
3172 - def NOT_NULL(self,default,field_type):
3173 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3174
3175 - def _drop(self,table,mode):
3178
3179 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3180 if limitby:
3181 (lmin, lmax) = limitby
3182 if len(sql_w) > 1:
3183 sql_w_row = sql_w + ' AND w_row > %i' % lmin
3184 else:
3185 sql_w_row = 'WHERE w_row > %i' % lmin
3186 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
3187 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3188
3194
3196 if fieldtype == 'blob':
3197 obj = base64.b64encode(str(obj))
3198 return ":CLOB('%s')" % obj
3199 elif fieldtype == 'date':
3200 if isinstance(obj, (datetime.date, datetime.datetime)):
3201 obj = obj.isoformat()[:10]
3202 else:
3203 obj = str(obj)
3204 return "to_date('%s','yyyy-mm-dd')" % obj
3205 elif fieldtype == 'datetime':
3206 if isinstance(obj, datetime.datetime):
3207 obj = obj.isoformat()[:19].replace('T',' ')
3208 elif isinstance(obj, datetime.date):
3209 obj = obj.isoformat()[:10]+' 00:00:00'
3210 else:
3211 obj = str(obj)
3212 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
3213 return None
3214
3215 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3216 credential_decoder=IDENTITY, driver_args={},
3217 adapter_args={}, do_connect=True, after_connection=None):
3218 self.db = db
3219 self.dbengine = "oracle"
3220 self.uri = uri
3221 if do_connect: self.find_driver(adapter_args,uri)
3222 self.pool_size = pool_size
3223 self.folder = folder
3224 self.db_codec = db_codec
3225 self._after_connection = after_connection
3226 self.find_or_make_work_folder()
3227 ruri = uri.split('://',1)[1]
3228 if not 'threaded' in driver_args:
3229 driver_args['threaded']=True
3230 def connector(uri=ruri,driver_args=driver_args):
3231 return self.driver.connect(uri,**driver_args)
3232 self.connector = connector
3233 if do_connect: self.reconnect()
3234
3236 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3237 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
3238
3239 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
3240
3241 - def execute(self, command, args=None):
3242 args = args or []
3243 i = 1
3244 while True:
3245 m = self.oracle_fix.match(command)
3246 if not m:
3247 break
3248 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
3249 args.append(m.group('clob')[6:-2].replace("''", "'"))
3250 i += 1
3251 if command[-1:]==';':
3252 command = command[:-1]
3253 return self.log_execute(command, args)
3254
3256 tablename = table._tablename
3257 id_name = table._id.name
3258 sequence_name = table._sequence_name
3259 trigger_name = table._trigger_name
3260 self.execute(query)
3261 self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
3262 self.execute("""
3263 CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
3264 DECLARE
3265 curr_val NUMBER;
3266 diff_val NUMBER;
3267 PRAGMA autonomous_transaction;
3268 BEGIN
3269 IF :NEW.%(id)s IS NOT NULL THEN
3270 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3271 diff_val := :NEW.%(id)s - curr_val - 1;
3272 IF diff_val != 0 THEN
3273 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
3274 EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
3275 EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
3276 END IF;
3277 END IF;
3278 SELECT %(sequence_name)s.nextval INTO :NEW.%(id)s FROM DUAL;
3279 END;
3280 """ % dict(trigger_name=trigger_name, tablename=tablename,
3281 sequence_name=sequence_name,id=id_name))
3282
3287
3288
3289
3290
3291
3292
3293
3294
3295
3296
3298 if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description):
3299 return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
3300 for c in r]) for r in self.cursor]
3301 else:
3302 return self.cursor.fetchall()
3303
3309
3312 drivers = ('pyodbc',)
3313 T_SEP = 'T'
3314
3315 QUOTE_TEMPLATE = '"%s"'
3316
3317 types = {
3318 'boolean': 'BIT',
3319 'string': 'VARCHAR(%(length)s)',
3320 'text': 'TEXT',
3321 'json': 'TEXT',
3322 'password': 'VARCHAR(%(length)s)',
3323 'blob': 'IMAGE',
3324 'upload': 'VARCHAR(%(length)s)',
3325 'integer': 'INT',
3326 'bigint': 'BIGINT',
3327 'float': 'FLOAT',
3328 'double': 'FLOAT',
3329 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3330 'date': 'DATETIME',
3331 'time': 'CHAR(8)',
3332 'datetime': 'DATETIME',
3333 'id': 'INT IDENTITY PRIMARY KEY',
3334 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3335 'list:integer': 'TEXT',
3336 'list:string': 'TEXT',
3337 'list:reference': 'TEXT',
3338 'geometry': 'geometry',
3339 'geography': 'geography',
3340 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3341 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3342 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3343 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3344 }
3345
3347 return '; ALTER TABLE %s ADD ' % tablename
3348
3351
3353 return "DATEPART(%s,%s)" % (what, self.expand(field))
3354
3356 return 'LEFT OUTER JOIN'
3357
3360
3363
3364 - def CAST(self, first, second):
3366
3368 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
3369
3371 return 'PRIMARY KEY CLUSTERED (%s)' % key
3372
3374 if what == 'LENGTH':
3375 what = 'LEN'
3376 return "%s(%s)" % (what, self.expand(first))
3377
3378
3379 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3380 if limitby:
3381 (lmin, lmax) = limitby
3382 sql_s += ' TOP %i' % lmax
3383 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3384
3385 TRUE = 1
3386 FALSE = 0
3387
3388 REGEX_DSN = re.compile('^(?P<dsn>.+)$')
3389 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
3390 REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
3391
3392 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3393 credential_decoder=IDENTITY, driver_args={},
3394 adapter_args={}, do_connect=True, srid=4326,
3395 after_connection=None):
3396 self.db = db
3397 self.dbengine = "mssql"
3398 self.uri = uri
3399 if do_connect: self.find_driver(adapter_args,uri)
3400 self.pool_size = pool_size
3401 self.folder = folder
3402 self.db_codec = db_codec
3403 self._after_connection = after_connection
3404 self.srid = srid
3405 self.find_or_make_work_folder()
3406
3407 ruri = uri.split('://',1)[1]
3408 if '@' not in ruri:
3409 try:
3410 m = self.REGEX_DSN.match(ruri)
3411 if not m:
3412 raise SyntaxError(
3413 'Parsing uri string(%s) has no result' % self.uri)
3414 dsn = m.group('dsn')
3415 if not dsn:
3416 raise SyntaxError('DSN required')
3417 except SyntaxError:
3418 e = sys.exc_info()[1]
3419 LOGGER.error('NdGpatch error')
3420 raise e
3421
3422 cnxn = dsn
3423 else:
3424 m = self.REGEX_URI.match(ruri)
3425 if not m:
3426 raise SyntaxError(
3427 "Invalid URI string in DAL: %s" % self.uri)
3428 user = credential_decoder(m.group('user'))
3429 if not user:
3430 raise SyntaxError('User required')
3431 password = credential_decoder(m.group('password'))
3432 if not password:
3433 password = ''
3434 host = m.group('host')
3435 if not host:
3436 raise SyntaxError('Host name required')
3437 db = m.group('db')
3438 if not db:
3439 raise SyntaxError('Database name required')
3440 port = m.group('port') or '1433'
3441
3442
3443
3444 argsdict = { 'DRIVER':'{SQL Server}' }
3445 urlargs = m.group('urlargs') or ''
3446 for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
3447 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
3448 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
3449 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
3450 % (host, port, db, user, password, urlargs)
3451 def connector(cnxn=cnxn,driver_args=driver_args):
3452 return self.driver.connect(cnxn,**driver_args)
3453 self.connector = connector
3454 if do_connect: self.reconnect()
3455
3457
3458 self.execute('SELECT SCOPE_IDENTITY();')
3459 return long(self.cursor.fetchone()[0])
3460
3461 - def rowslice(self,rows,minimum=0,maximum=None):
3462 if maximum is None:
3463 return rows[minimum:]
3464 return rows[minimum:maximum]
3465
3466 - def EPOCH(self, first):
3467 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3468
3471
3472
3473
3474
3475
3476 - def ST_ASTEXT(self, first):
3477 return '%s.STAsText()' %(self.expand(first))
3478
3481
3484
3487
3490
3493
3494
3495
3498
3501
3503 field_is_type = fieldtype.startswith
3504 if field_is_type('geometry'):
3505 srid = 0
3506 geotype, parms = fieldtype[:-1].split('(')
3507 if parms:
3508 srid = parms
3509 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3510 elif fieldtype == 'geography':
3511 srid = 4326
3512 geotype, parms = fieldtype[:-1].split('(')
3513 if parms:
3514 srid = parms
3515 return "geography::STGeomFromText('%s',%s)" %(obj, srid)
3516
3517
3518 return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
3519 return BaseAdapter.represent(self, obj, fieldtype)
3520
3523 """ experimental support for pagination in MSSQL"""
3524 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3525 if limitby:
3526 (lmin, lmax) = limitby
3527 if lmin == 0:
3528 sql_s += ' TOP %i' % lmax
3529 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3530 lmin += 1
3531 sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
3532 sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
3533 sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
3534 sql_f_inner = [f for f in sql_f.split(',')]
3535 sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
3536 sql_f_iproxy = ', '.join(sql_f_iproxy)
3537 sql_f_oproxy = ', '.join(sql_f_outer)
3538 return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
3539 return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
3540 - def rowslice(self,rows,minimum=0,maximum=None):
3542
3544 """ support for true pagination in MSSQL >= 2012"""
3545
3546 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3547 if limitby:
3548 (lmin, lmax) = limitby
3549 if lmin == 0:
3550
3551
3552
3553 sql_s += ' TOP %i' % lmax
3554 else:
3555 if not sql_o:
3556
3557
3558 sql_o += ' ORDER BY %s' % self.RANDOM()
3559 sql_o += ' OFFSET %i ROWS FETCH NEXT %i ROWS ONLY' % (lmin, lmax - lmin)
3560 return 'SELECT %s %s FROM %s%s%s;' % \
3561 (sql_s, sql_f, sql_t, sql_w, sql_o)
3562
3563 - def rowslice(self,rows,minimum=0,maximum=None):
3565
3567 drivers = ('pyodbc',)
3568
3569 types = {
3570 'boolean': 'CHAR(1)',
3571 'string': 'NVARCHAR(%(length)s)',
3572 'text': 'NTEXT',
3573 'json': 'NTEXT',
3574 'password': 'NVARCHAR(%(length)s)',
3575 'blob': 'IMAGE',
3576 'upload': 'NVARCHAR(%(length)s)',
3577 'integer': 'INT',
3578 'bigint': 'BIGINT',
3579 'float': 'FLOAT',
3580 'double': 'FLOAT',
3581 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3582 'date': 'DATETIME',
3583 'time': 'CHAR(8)',
3584 'datetime': 'DATETIME',
3585 'id': 'INT IDENTITY PRIMARY KEY',
3586 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3587 'list:integer': 'NTEXT',
3588 'list:string': 'NTEXT',
3589 'list:reference': 'NTEXT',
3590 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3591 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3592 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3593 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3594 }
3595
3597 value = BaseAdapter.represent(self, obj, fieldtype)
3598 if fieldtype in ('string','text', 'json') and value[:1]=="'":
3599 value = 'N'+value
3600 return value
3601
3604
3606 drivers = ('pyodbc',)
3607 T_SEP = ' '
3608
3609 types = {
3610 'boolean': 'BOOLEAN',
3611 'string': 'VARCHAR(%(length)s)',
3612 'text': 'BYTEA',
3613 'json': 'VARCHAR(%(length)s)',
3614 'password': 'VARCHAR(%(length)s)',
3615 'blob': 'BYTEA',
3616 'upload': 'VARCHAR(%(length)s)',
3617 'integer': 'INT',
3618 'bigint': 'BIGINT',
3619 'float': 'FLOAT',
3620 'double': 'DOUBLE PRECISION',
3621 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3622 'date': 'DATE',
3623 'time': 'TIME',
3624 'datetime': 'DATETIME',
3625 'id': 'IDENTITY',
3626 'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3627 'list:integer': 'BYTEA',
3628 'list:string': 'BYTEA',
3629 'list:reference': 'BYTEA',
3630 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3631 }
3632
3633
3635 return "DATE_PART('%s', TIMESTAMP %s)" % (what, self.expand(first))
3636
3638 tablename = table._tablename
3639 return ['TRUNCATE %s %s;' % (tablename, mode or '')]
3640
3641 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3642 if limitby:
3643 (lmin, lmax) = limitby
3644 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
3645 return 'SELECT %s %s FROM %s%s%s;' % \
3646 (sql_s, sql_f, sql_t, sql_w, sql_o)
3647
3649 self.execute('SELECT LAST_INSERT_ID();')
3650 return long(self.cursor.fetchone()[0])
3651
3654
3656 drivers = ('Sybase',)
3657
3658 types = {
3659 'boolean': 'BIT',
3660 'string': 'CHAR VARYING(%(length)s)',
3661 'text': 'TEXT',
3662 'json': 'TEXT',
3663 'password': 'CHAR VARYING(%(length)s)',
3664 'blob': 'IMAGE',
3665 'upload': 'CHAR VARYING(%(length)s)',
3666 'integer': 'INT',
3667 'bigint': 'BIGINT',
3668 'float': 'FLOAT',
3669 'double': 'FLOAT',
3670 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
3671 'date': 'DATETIME',
3672 'time': 'CHAR(8)',
3673 'datetime': 'DATETIME',
3674 'id': 'INT IDENTITY PRIMARY KEY',
3675 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3676 'list:integer': 'TEXT',
3677 'list:string': 'TEXT',
3678 'list:reference': 'TEXT',
3679 'geometry': 'geometry',
3680 'geography': 'geography',
3681 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
3682 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3683 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3684 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
3685 }
3686
3687
3688 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3689 credential_decoder=IDENTITY, driver_args={},
3690 adapter_args={}, do_connect=True, srid=4326,
3691 after_connection=None):
3692 self.db = db
3693 self.dbengine = "sybase"
3694 self.uri = uri
3695 if do_connect: self.find_driver(adapter_args,uri)
3696 self.pool_size = pool_size
3697 self.folder = folder
3698 self.db_codec = db_codec
3699 self._after_connection = after_connection
3700 self.srid = srid
3701 self.find_or_make_work_folder()
3702
3703 ruri = uri.split('://',1)[1]
3704 if '@' not in ruri:
3705 try:
3706 m = self.REGEX_DSN.match(ruri)
3707 if not m:
3708 raise SyntaxError(
3709 'Parsing uri string(%s) has no result' % self.uri)
3710 dsn = m.group('dsn')
3711 if not dsn:
3712 raise SyntaxError('DSN required')
3713 except SyntaxError:
3714 e = sys.exc_info()[1]
3715 LOGGER.error('NdGpatch error')
3716 raise e
3717 else:
3718 m = self.REGEX_URI.match(uri)
3719 if not m:
3720 raise SyntaxError(
3721 "Invalid URI string in DAL: %s" % self.uri)
3722 user = credential_decoder(m.group('user'))
3723 if not user:
3724 raise SyntaxError('User required')
3725 password = credential_decoder(m.group('password'))
3726 if not password:
3727 password = ''
3728 host = m.group('host')
3729 if not host:
3730 raise SyntaxError('Host name required')
3731 db = m.group('db')
3732 if not db:
3733 raise SyntaxError('Database name required')
3734 port = m.group('port') or '1433'
3735
3736 dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
3737
3738 driver_args.update(user = credential_decoder(user),
3739 password = credential_decoder(password))
3740
3741 def connector(dsn=dsn,driver_args=driver_args):
3742 return self.driver.connect(dsn,**driver_args)
3743 self.connector = connector
3744 if do_connect: self.reconnect()
3745
3748 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3749
3750 commit_on_alter_table = False
3751 support_distributed_transaction = True
3752 types = {
3753 'boolean': 'CHAR(1)',
3754 'string': 'VARCHAR(%(length)s)',
3755 'text': 'BLOB SUB_TYPE 1',
3756 'json': 'BLOB SUB_TYPE 1',
3757 'password': 'VARCHAR(%(length)s)',
3758 'blob': 'BLOB SUB_TYPE 0',
3759 'upload': 'VARCHAR(%(length)s)',
3760 'integer': 'INTEGER',
3761 'bigint': 'BIGINT',
3762 'float': 'FLOAT',
3763 'double': 'DOUBLE PRECISION',
3764 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
3765 'date': 'DATE',
3766 'time': 'TIME',
3767 'datetime': 'TIMESTAMP',
3768 'id': 'INTEGER PRIMARY KEY',
3769 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3770 'list:integer': 'BLOB SUB_TYPE 1',
3771 'list:string': 'BLOB SUB_TYPE 1',
3772 'list:reference': 'BLOB SUB_TYPE 1',
3773 'big-id': 'BIGINT PRIMARY KEY',
3774 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
3775 }
3776
3779
3781 return 'trg_id_%s' % tablename
3782
3785
3786 - def EPOCH(self, first):
3787 return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
3788
3789 - def NOT_NULL(self,default,field_type):
3790 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
3791
3793 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
3794
3797
3798 - def CONTAINS(self,first,second,case_sensitive=False):
3804
3805 - def _drop(self,table,mode):
3808
3809 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
3810 if limitby:
3811 (lmin, lmax) = limitby
3812 sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
3813 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
3814
3816 return ['DELETE FROM %s;' % table._tablename,
3817 'SET GENERATOR %s TO 0;' % table._sequence_name]
3818
3819 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
3820
3821 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3822 credential_decoder=IDENTITY, driver_args={},
3823 adapter_args={}, do_connect=True, after_connection=None):
3824 self.db = db
3825 self.dbengine = "firebird"
3826 self.uri = uri
3827 if do_connect: self.find_driver(adapter_args,uri)
3828 self.pool_size = pool_size
3829 self.folder = folder
3830 self.db_codec = db_codec
3831 self._after_connection = after_connection
3832 self.find_or_make_work_folder()
3833 ruri = uri.split('://',1)[1]
3834 m = self.REGEX_URI.match(ruri)
3835 if not m:
3836 raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
3837 user = credential_decoder(m.group('user'))
3838 if not user:
3839 raise SyntaxError('User required')
3840 password = credential_decoder(m.group('password'))
3841 if not password:
3842 password = ''
3843 host = m.group('host')
3844 if not host:
3845 raise SyntaxError('Host name required')
3846 port = int(m.group('port') or 3050)
3847 db = m.group('db')
3848 if not db:
3849 raise SyntaxError('Database name required')
3850 charset = m.group('charset') or 'UTF8'
3851 driver_args.update(dsn='%s/%s:%s' % (host,port,db),
3852 user = credential_decoder(user),
3853 password = credential_decoder(password),
3854 charset = charset)
3855
3856 def connector(driver_args=driver_args):
3857 return self.driver.connect(**driver_args)
3858 self.connector = connector
3859 if do_connect: self.reconnect()
3860
3869
3874
3877 drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
3878
3879 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
3880
3881 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3882 credential_decoder=IDENTITY, driver_args={},
3883 adapter_args={}, do_connect=True, after_connection=None):
3884 self.db = db
3885 self.dbengine = "firebird"
3886 self.uri = uri
3887 if do_connect: self.find_driver(adapter_args,uri)
3888 self.pool_size = pool_size
3889 self.folder = folder
3890 self.db_codec = db_codec
3891 self._after_connection = after_connection
3892 self.find_or_make_work_folder()
3893 ruri = uri.split('://',1)[1]
3894 m = self.REGEX_URI.match(ruri)
3895 if not m:
3896 raise SyntaxError(
3897 "Invalid URI string in DAL: %s" % self.uri)
3898 user = credential_decoder(m.group('user'))
3899 if not user:
3900 raise SyntaxError('User required')
3901 password = credential_decoder(m.group('password'))
3902 if not password:
3903 password = ''
3904 pathdb = m.group('path')
3905 if not pathdb:
3906 raise SyntaxError('Path required')
3907 charset = m.group('charset')
3908 if not charset:
3909 charset = 'UTF8'
3910 host = ''
3911 driver_args.update(host=host,
3912 database=pathdb,
3913 user=credential_decoder(user),
3914 password=credential_decoder(password),
3915 charset=charset)
3916
3917 def connector(driver_args=driver_args):
3918 return self.driver.connect(**driver_args)
3919 self.connector = connector
3920 if do_connect: self.reconnect()
3921
4027
4032
4035
4047
4049 drivers = ('pyodbc',)
4050
4051 types = {
4052 'boolean': 'CHAR(1)',
4053 'string': 'VARCHAR(%(length)s)',
4054 'text': 'CLOB',
4055 'json': 'CLOB',
4056 'password': 'VARCHAR(%(length)s)',
4057 'blob': 'BLOB',
4058 'upload': 'VARCHAR(%(length)s)',
4059 'integer': 'INT',
4060 'bigint': 'BIGINT',
4061 'float': 'REAL',
4062 'double': 'DOUBLE',
4063 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4064 'date': 'DATE',
4065 'time': 'TIME',
4066 'datetime': 'TIMESTAMP',
4067 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
4068 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4069 'list:integer': 'CLOB',
4070 'list:string': 'CLOB',
4071 'list:reference': 'CLOB',
4072 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
4073 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4074 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4075 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4076 }
4077
4079 return 'LEFT OUTER JOIN'
4080
4083
4084 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4085 if limitby:
4086 (lmin, lmax) = limitby
4087 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
4088 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4089
4091 if fieldtype == 'blob':
4092 obj = base64.b64encode(str(obj))
4093 return "BLOB('%s')" % obj
4094 elif fieldtype == 'datetime':
4095 if isinstance(obj, datetime.datetime):
4096 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
4097 elif isinstance(obj, datetime.date):
4098 obj = obj.isoformat()[:10]+'-00.00.00'
4099 return "'%s'" % obj
4100 return None
4101
4102 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4103 credential_decoder=IDENTITY, driver_args={},
4104 adapter_args={}, do_connect=True, after_connection=None):
4117 self.connector = connector
4118 if do_connect: self.reconnect()
4119
4121 if command[-1:]==';':
4122 command = command[:-1]
4123 return self.log_execute(command)
4124
4126 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
4127 return long(self.cursor.fetchone()[0])
4128
4129 - def rowslice(self,rows,minimum=0,maximum=None):
4130 if maximum is None:
4131 return rows[minimum:]
4132 return rows[minimum:maximum]
4133
4136 drivers = ('pyodbc',)
4137
4138 types = {
4139 'boolean': 'CHAR(1)',
4140 'string': 'VARCHAR(%(length)s)',
4141 'text': 'VARCHAR(2000)',
4142 'json': 'VARCHAR(4000)',
4143 'password': 'VARCHAR(%(length)s)',
4144 'blob': 'BLOB',
4145 'upload': 'VARCHAR(%(length)s)',
4146 'integer': 'INT',
4147 'bigint': 'BIGINT',
4148 'float': 'REAL',
4149 'double': 'DOUBLE',
4150 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4151 'date': 'DATE',
4152 'time': 'TIME',
4153 'datetime': 'TIMESTAMP',
4154
4155
4156 'id': 'INT GENERATED ALWAYS AS IDENTITY',
4157 'reference': 'INT',
4158 'list:integer': 'VARCHAR(4000)',
4159 'list:string': 'VARCHAR(4000)',
4160 'list:reference': 'VARCHAR(4000)',
4161 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY',
4162 'big-reference': 'BIGINT',
4163 'reference FK': ' REFERENCES %(foreign_key)s',
4164 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
4165 }
4166
4167 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4168 credential_decoder=IDENTITY, driver_args={},
4169 adapter_args={}, do_connect=True, after_connection=None):
4182 self.connector = connector
4183 if do_connect: self.reconnect()
4184
4185 - def close(self,action='commit',really=True):
4190
4192 return 'LEFT OUTER JOIN'
4193
4194
4195 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4196 if limitby:
4197 (lmin, lmax) = limitby
4198 sql_s += ' TOP %i' % lmax
4199 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4200
4202 tablename = table._tablename
4203 return ['DELETE FROM %s ALL;' % (tablename)]
4204
4205 INGRES_SEQNAME='ii***lineitemsequence'
4210 drivers = ('pyodbc',)
4211
4212 types = {
4213 'boolean': 'CHAR(1)',
4214 'string': 'VARCHAR(%(length)s)',
4215 'text': 'CLOB',
4216 'json': 'CLOB',
4217 'password': 'VARCHAR(%(length)s)',
4218 'blob': 'BLOB',
4219 'upload': 'VARCHAR(%(length)s)',
4220 'integer': 'INTEGER4',
4221 'bigint': 'BIGINT',
4222 'float': 'FLOAT',
4223 'double': 'FLOAT8',
4224 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4225 'date': 'ANSIDATE',
4226 'time': 'TIME WITHOUT TIME ZONE',
4227 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4228 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
4229 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4230 'list:integer': 'CLOB',
4231 'list:string': 'CLOB',
4232 'list:reference': 'CLOB',
4233 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
4234 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4235 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4236 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4237 }
4238
4240 return 'LEFT OUTER JOIN'
4241
4244
4245 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4246 if limitby:
4247 (lmin, lmax) = limitby
4248 fetch_amt = lmax - lmin
4249 if fetch_amt:
4250 sql_s += ' FIRST %d ' % (fetch_amt, )
4251 if lmin:
4252
4253 sql_o += ' OFFSET %d' % (lmin, )
4254 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4255
4256 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4257 credential_decoder=IDENTITY, driver_args={},
4258 adapter_args={}, do_connect=True, after_connection=None):
4259 self.db = db
4260 self.dbengine = "ingres"
4261 self._driver = pyodbc
4262 self.uri = uri
4263 if do_connect: self.find_driver(adapter_args,uri)
4264 self.pool_size = pool_size
4265 self.folder = folder
4266 self.db_codec = db_codec
4267 self._after_connection = after_connection
4268 self.find_or_make_work_folder()
4269 connstr = uri.split(':', 1)[1]
4270
4271 connstr = connstr.lstrip()
4272 while connstr.startswith('/'):
4273 connstr = connstr[1:]
4274 if '=' in connstr:
4275
4276 ruri = connstr
4277 else:
4278
4279 database_name = connstr
4280 default_driver_name = 'Ingres'
4281 vnode = '(local)'
4282 servertype = 'ingres'
4283 ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
4284 def connector(cnxn=ruri,driver_args=driver_args):
4285 return self.driver.connect(cnxn,**driver_args)
4286
4287 self.connector = connector
4288
4289
4290 if do_connect: self.reconnect()
4291
4293
4294
4295
4296 if hasattr(table,'_primarykey'):
4297 modify_tbl_sql = 'modify %s to btree unique on %s' % \
4298 (table._tablename,
4299 ', '.join(["'%s'" % x for x in table.primarykey]))
4300 self.execute(modify_tbl_sql)
4301 else:
4302 tmp_seqname='%s_iisq' % table._tablename
4303 query=query.replace(INGRES_SEQNAME, tmp_seqname)
4304 self.execute('create sequence %s' % tmp_seqname)
4305 self.execute(query)
4306 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
4307
4308
4310 tmp_seqname='%s_iisq' % table
4311 self.execute('select current value for %s' % tmp_seqname)
4312 return long(self.cursor.fetchone()[0])
4313
4316
4317 drivers = ('pyodbc',)
4318
4319 types = {
4320 'boolean': 'CHAR(1)',
4321 'string': 'NVARCHAR(%(length)s)',
4322 'text': 'NCLOB',
4323 'json': 'NCLOB',
4324 'password': 'NVARCHAR(%(length)s)',
4325 'blob': 'BLOB',
4326 'upload': 'VARCHAR(%(length)s)',
4327 'integer': 'INTEGER4',
4328 'bigint': 'BIGINT',
4329 'float': 'FLOAT',
4330 'double': 'FLOAT8',
4331 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
4332 'date': 'ANSIDATE',
4333 'time': 'TIME WITHOUT TIME ZONE',
4334 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
4335 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
4336 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4337 'list:integer': 'NCLOB',
4338 'list:string': 'NCLOB',
4339 'list:reference': 'NCLOB',
4340 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
4341 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4342 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4343 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
4344 }
4345
4347 drivers = ('sapdb',)
4348
4349 support_distributed_transaction = False
4350 types = {
4351 'boolean': 'CHAR(1)',
4352 'string': 'VARCHAR(%(length)s)',
4353 'text': 'LONG',
4354 'json': 'LONG',
4355 'password': 'VARCHAR(%(length)s)',
4356 'blob': 'LONG',
4357 'upload': 'VARCHAR(%(length)s)',
4358 'integer': 'INT',
4359 'bigint': 'BIGINT',
4360 'float': 'FLOAT',
4361 'double': 'DOUBLE PRECISION',
4362 'decimal': 'FIXED(%(precision)s,%(scale)s)',
4363 'date': 'DATE',
4364 'time': 'TIME',
4365 'datetime': 'TIMESTAMP',
4366 'id': 'INT PRIMARY KEY',
4367 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4368 'list:integer': 'LONG',
4369 'list:string': 'LONG',
4370 'list:reference': 'LONG',
4371 'big-id': 'BIGINT PRIMARY KEY',
4372 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
4373 }
4374
4377
4378 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
4379 if limitby:
4380 (lmin, lmax) = limitby
4381 if len(sql_w) > 1:
4382 sql_w_row = sql_w + ' AND w_row > %i' % lmin
4383 else:
4384 sql_w_row = 'WHERE w_row > %i' % lmin
4385 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
4386 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
4387
4389
4390 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
4391 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
4392 % (table._tablename, table._id.name, table._sequence_name))
4393 self.execute(query)
4394
4395 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
4396
4397
4398 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4399 credential_decoder=IDENTITY, driver_args={},
4400 adapter_args={}, do_connect=True, after_connection=None):
4401 self.db = db
4402 self.dbengine = "sapdb"
4403 self.uri = uri
4404 if do_connect: self.find_driver(adapter_args,uri)
4405 self.pool_size = pool_size
4406 self.folder = folder
4407 self.db_codec = db_codec
4408 self._after_connection = after_connection
4409 self.find_or_make_work_folder()
4410 ruri = uri.split('://',1)[1]
4411 m = self.REGEX_URI.match(ruri)
4412 if not m:
4413 raise SyntaxError("Invalid URI string in DAL")
4414 user = credential_decoder(m.group('user'))
4415 if not user:
4416 raise SyntaxError('User required')
4417 password = credential_decoder(m.group('password'))
4418 if not password:
4419 password = ''
4420 host = m.group('host')
4421 if not host:
4422 raise SyntaxError('Host name required')
4423 db = m.group('db')
4424 if not db:
4425 raise SyntaxError('Database name required')
4426 def connector(user=user, password=password, database=db,
4427 host=host, driver_args=driver_args):
4428 return self.driver.Connection(user, password, database,
4429 host, **driver_args)
4430 self.connector = connector
4431 if do_connect: self.reconnect()
4432
4434 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
4435 return long(self.cursor.fetchone()[0])
4436
4438 drivers = ('cubriddb',)
4439
4440 REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
4441
4442 - def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
4443 credential_decoder=IDENTITY, driver_args={},
4444 adapter_args={}, do_connect=True, after_connection=None):
4445 self.db = db
4446 self.dbengine = "cubrid"
4447 self.uri = uri
4448 if do_connect: self.find_driver(adapter_args,uri)
4449 self.pool_size = pool_size
4450 self.folder = folder
4451 self.db_codec = db_codec
4452 self._after_connection = after_connection
4453 self.find_or_make_work_folder()
4454 ruri = uri.split('://',1)[1]
4455 m = self.REGEX_URI.match(ruri)
4456 if not m:
4457 raise SyntaxError(
4458 "Invalid URI string in DAL: %s" % self.uri)
4459 user = credential_decoder(m.group('user'))
4460 if not user:
4461 raise SyntaxError('User required')
4462 password = credential_decoder(m.group('password'))
4463 if not password:
4464 password = ''
4465 host = m.group('host')
4466 if not host:
4467 raise SyntaxError('Host name required')
4468 db = m.group('db')
4469 if not db:
4470 raise SyntaxError('Database name required')
4471 port = int(m.group('port') or '30000')
4472 charset = m.group('charset') or 'utf8'
4473 user = credential_decoder(user)
4474 passwd = credential_decoder(password)
4475 def connector(host=host,port=port,db=db,
4476 user=user,passwd=password,driver_args=driver_args):
4477 return self.driver.connect(host,port,db,user,passwd,**driver_args)
4478 self.connector = connector
4479 if do_connect: self.reconnect()
4480
4482 self.execute('SET FOREIGN_KEY_CHECKS=1;')
4483 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4484
4489
4490 web2py_filesystem = False
4491
4493 return self.db._adapter.escape(obj)
4494
4496 if not db._adapter.dbengine in ('mysql', 'postgres', 'sqlite'):
4497 raise RuntimeError("only MySQL/Postgres/SQLite can store metadata .table files in database for now")
4498 self.db = db
4499 self.filename = filename
4500 self.mode = mode
4501 if not self.web2py_filesystem:
4502 if db._adapter.dbengine == 'mysql':
4503 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
4504 elif db._adapter.dbengine in ('postgres', 'sqlite'):
4505 sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
4506 self.db.executesql(sql)
4507 DatabaseStoredFile.web2py_filesystem = True
4508 self.p=0
4509 self.data = ''
4510 if mode in ('r','rw','a'):
4511 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
4512 % filename
4513 rows = self.db.executesql(query)
4514 if rows:
4515 self.data = rows[0][0]
4516 elif exists(filename):
4517 datafile = open(filename, 'r')
4518 try:
4519 self.data = datafile.read()
4520 finally:
4521 datafile.close()
4522 elif mode in ('r','rw'):
4523 raise RuntimeError("File %s does not exist" % filename)
4524
4525 - def read(self, bytes):
4526 data = self.data[self.p:self.p+bytes]
4527 self.p += len(data)
4528 return data
4529
4531 i = self.data.find('\n',self.p)+1
4532 if i>0:
4533 data, self.p = self.data[self.p:i], i
4534 else:
4535 data, self.p = self.data[self.p:], len(self.data)
4536 return data
4537
4540
4542 if self.db is not None:
4543 self.db.executesql(
4544 "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
4545 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
4546 % (self.filename, self.data.replace("'","''"))
4547 self.db.executesql(query)
4548 self.db.commit()
4549 self.db = None
4550
4553
4554 @staticmethod
4556 if exists(filename):
4557 return True
4558 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
4559 try:
4560 if db.executesql(query):
4561 return True
4562 except Exception, e:
4563 if not (db._adapter.isOperationalError(e) or
4564 db._adapter.isProgrammingError(e)):
4565 raise
4566
4567 tb = traceback.format_exc()
4568 LOGGER.error("Could not retrieve %s\n%s" % (filename, tb))
4569 return False
4570
4573
4576
4577 - def file_open(self, filename, mode='rb', lock=True):
4579
4582
4584 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
4585 self.db.executesql(query)
4586 self.db.commit()
4587
4589 uploads_in_blob = True
4590
4591 REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
4592
4593 - def __init__(self, db, uri='google:sql://realm:domain/database',
4594 pool_size=0, folder=None, db_codec='UTF-8',
4595 credential_decoder=IDENTITY, driver_args={},
4596 adapter_args={}, do_connect=True, after_connection=None):
4597
4598 self.db = db
4599 self.dbengine = "mysql"
4600 self.uri = uri
4601 self.pool_size = pool_size
4602 self.db_codec = db_codec
4603 self._after_connection = after_connection
4604 if do_connect: self.find_driver(adapter_args, uri)
4605 self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
4606 os.sep+'applications'+os.sep,1)[1])
4607 ruri = uri.split("://")[1]
4608 m = self.REGEX_URI.match(ruri)
4609 if not m:
4610 raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
4611 instance = credential_decoder(m.group('instance'))
4612 self.dbstring = db = credential_decoder(m.group('db'))
4613 driver_args['instance'] = instance
4614 if not 'charset' in driver_args:
4615 driver_args['charset'] = 'utf8'
4616 self.createdb = createdb = adapter_args.get('createdb',True)
4617 if not createdb:
4618 driver_args['database'] = db
4619 def connector(driver_args=driver_args):
4620 return rdbms.connect(**driver_args)
4621 self.connector = connector
4622 if do_connect: self.reconnect()
4623
4625 if self.createdb:
4626
4627 self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
4628 self.execute('USE %s' % self.dbstring)
4629 self.execute("SET FOREIGN_KEY_CHECKS=1;")
4630 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
4631
4632 - def execute(self, command, *a, **b):
4634
4636 self.adapter_args = adapter_args
4637 self.driver = "google"
4638
4640 can_select_for_update = False
4641 QUOTE_TEMPLATE = '%s'
4642
4643 @staticmethod
4645 if isinstance(obj, str):
4646 return obj.decode('utf8')
4647 elif not isinstance(obj, unicode):
4648 return unicode(obj)
4649 return obj
4650
4652 return table._id > 0
4653
4655 field_is_type = fieldtype.startswith
4656 if isinstance(obj, CALLABLETYPES):
4657 obj = obj()
4658 if isinstance(fieldtype, SQLCustomType):
4659 return fieldtype.encoder(obj)
4660 if isinstance(obj, (Expression, Field)):
4661 raise SyntaxError("non supported on GAE")
4662 if self.dbengine == 'google:datastore':
4663 if isinstance(fieldtype, gae.Property):
4664 return obj
4665 is_string = isinstance(fieldtype,str)
4666 is_list = is_string and field_is_type('list:')
4667 if is_list:
4668 if not obj:
4669 obj = []
4670 if not isinstance(obj, (list, tuple)):
4671 obj = [obj]
4672 if obj == '' and not \
4673 (is_string and fieldtype[:2] in ['st','te', 'pa','up']):
4674 return None
4675 if not obj is None:
4676 if isinstance(obj, list) and not is_list:
4677 obj = [self.represent(o, fieldtype) for o in obj]
4678 elif fieldtype in ('integer','bigint','id'):
4679 obj = long(obj)
4680 elif fieldtype == 'double':
4681 obj = float(obj)
4682 elif is_string and field_is_type('reference'):
4683 if isinstance(obj, (Row, Reference)):
4684 obj = obj['id']
4685 obj = long(obj)
4686 elif fieldtype == 'boolean':
4687 if obj and not str(obj)[0].upper() in '0F':
4688 obj = True
4689 else:
4690 obj = False
4691 elif fieldtype == 'date':
4692 if not isinstance(obj, datetime.date):
4693 (y, m, d) = map(int,str(obj).strip().split('-'))
4694 obj = datetime.date(y, m, d)
4695 elif isinstance(obj,datetime.datetime):
4696 (y, m, d) = (obj.year, obj.month, obj.day)
4697 obj = datetime.date(y, m, d)
4698 elif fieldtype == 'time':
4699 if not isinstance(obj, datetime.time):
4700 time_items = map(int,str(obj).strip().split(':')[:3])
4701 if len(time_items) == 3:
4702 (h, mi, s) = time_items
4703 else:
4704 (h, mi, s) = time_items + [0]
4705 obj = datetime.time(h, mi, s)
4706 elif fieldtype == 'datetime':
4707 if not isinstance(obj, datetime.datetime):
4708 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
4709 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
4710 while len(time_items)<3:
4711 time_items.append(0)
4712 (h, mi, s) = time_items
4713 obj = datetime.datetime(y, m, d, h, mi, s)
4714 elif fieldtype == 'blob':
4715 pass
4716 elif fieldtype == 'json':
4717 if isinstance(obj, basestring):
4718 obj = self.to_unicode(obj)
4719 if have_serializers:
4720 obj = serializers.loads_json(obj)
4721 elif simplejson:
4722 obj = simplejson.loads(obj)
4723 else:
4724 raise RuntimeError("missing simplejson")
4725 elif is_string and field_is_type('list:string'):
4726 return map(self.to_unicode,obj)
4727 elif is_list:
4728 return map(int,obj)
4729 else:
4730 obj = self.to_unicode(obj)
4731 return obj
4732
4734 return 'insert %s in %s' % (fields, table)
4735
4736 - def _count(self,query,distinct=None):
4737 return 'count %s' % repr(query)
4738
4739 - def _select(self,query,fields,attributes):
4740 return 'select %s where %s' % (repr(fields), repr(query))
4741
4742 - def _delete(self,tablename, query):
4743 return 'delete %s where %s' % (repr(tablename),repr(query))
4744
4745 - def _update(self,tablename,query,fields):
4746 return 'update %s (%s) where %s' % (repr(tablename),
4747 repr(fields),repr(query))
4748
4750 """
4751 remember: no transactions on many NoSQL
4752 """
4753 pass
4754
4756 """
4757 remember: no transactions on many NoSQL
4758 """
4759 pass
4760
4762 """
4763 remember: no transactions on many NoSQL
4764 """
4765 pass
4766
4767
4768
4769 - def OR(self,first,second): raise SyntaxError("Not supported")
4770 - def AND(self,first,second): raise SyntaxError("Not supported")
4771 - def AS(self,first,second): raise SyntaxError("Not supported")
4772 - def ON(self,first,second): raise SyntaxError("Not supported")
4773 - def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
4774 - def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
4775 - def ADD(self,first,second): raise SyntaxError("Not supported")
4776 - def SUB(self,first,second): raise SyntaxError("Not supported")
4777 - def MUL(self,first,second): raise SyntaxError("Not supported")
4778 - def DIV(self,first,second): raise SyntaxError("Not supported")
4779 - def LOWER(self,first): raise SyntaxError("Not supported")
4780 - def UPPER(self,first): raise SyntaxError("Not supported")
4782 - def LENGTH(self, first): raise SyntaxError("Not supported")
4783 - def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
4784 - def LEFT_JOIN(self): raise SyntaxError("Not supported")
4785 - def RANDOM(self): raise SyntaxError("Not supported")
4786 - def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
4787 - def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
4788 - def ILIKE(self,first,second): raise SyntaxError("Not supported")
4789 - def drop(self,table,mode): raise SyntaxError("Not supported")
4790 - def alias(self,table,alias): raise SyntaxError("Not supported")
4791 - def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
4793 - def prepare(self,key): raise SyntaxError("Not supported")
4796 - def concat_add(self,table): raise SyntaxError("Not supported")
4797 - def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
4799 - def log_execute(self,*a,**b): raise SyntaxError("Not supported")
4800 - def execute(self,*a,**b): raise SyntaxError("Not supported")
4802 - def lastrowid(self,table): raise SyntaxError("Not supported")
4803 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
4804
4805
4806 -class GAEF(object):
4807 - def __init__(self,name,op,value,apply):
4808 self.name=name=='id' and '__key__' or name
4809 self.op=op
4810 self.value=value
4811 self.apply=apply
4813 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
4814
4816 """
4817 NDB:
4818
4819 You can enable NDB by using adapter_args:
4820
4821 db = DAL('google:datastore', adapter_args={'ndb_settings':ndb_settings, 'use_ndb':True})
4822
4823 ndb_settings is optional and can be used for per model caching settings.
4824 It must be a dict in this form:
4825 ndb_settings = {<table_name>:{<variable_name>:<variable_value>}}
4826 See: https://developers.google.com/appengine/docs/python/ndb/cache
4827 """
4828
4829 uploads_in_blob = True
4830 types = {}
4831
4832 reconnect = lambda *args, **kwargs: None
4833
4835 - def file_open(self, filename, mode='rb', lock=True): pass
4837
4838 REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
4839
4840 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
4841 credential_decoder=IDENTITY, driver_args={},
4842 adapter_args={}, do_connect=True, after_connection=None):
4843 self.use_ndb = ('use_ndb' in adapter_args) and adapter_args['use_ndb']
4844 if self.use_ndb is True:
4845 self.types.update({
4846 'boolean': ndb.BooleanProperty,
4847 'string': (lambda **kwargs: ndb.StringProperty(**kwargs)),
4848 'text': ndb.TextProperty,
4849 'json': ndb.TextProperty,
4850 'password': ndb.StringProperty,
4851 'blob': ndb.BlobProperty,
4852 'upload': ndb.StringProperty,
4853 'integer': ndb.IntegerProperty,
4854 'bigint': ndb.IntegerProperty,
4855 'float': ndb.FloatProperty,
4856 'double': ndb.FloatProperty,
4857 'decimal': NDBDecimalProperty,
4858 'date': ndb.DateProperty,
4859 'time': ndb.TimeProperty,
4860 'datetime': ndb.DateTimeProperty,
4861 'id': None,
4862 'reference': ndb.IntegerProperty,
4863 'list:string': (lambda **kwargs: ndb.StringProperty(repeated=True,default=None, **kwargs)),
4864 'list:integer': (lambda **kwargs: ndb.IntegerProperty(repeated=True,default=None, **kwargs)),
4865 'list:reference': (lambda **kwargs: ndb.IntegerProperty(repeated=True,default=None, **kwargs)),
4866 })
4867 else:
4868 self.types.update({
4869 'boolean': gae.BooleanProperty,
4870 'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)),
4871 'text': gae.TextProperty,
4872 'json': gae.TextProperty,
4873 'password': gae.StringProperty,
4874 'blob': gae.BlobProperty,
4875 'upload': gae.StringProperty,
4876 'integer': gae.IntegerProperty,
4877 'bigint': gae.IntegerProperty,
4878 'float': gae.FloatProperty,
4879 'double': gae.FloatProperty,
4880 'decimal': GAEDecimalProperty,
4881 'date': gae.DateProperty,
4882 'time': gae.TimeProperty,
4883 'datetime': gae.DateTimeProperty,
4884 'id': None,
4885 'reference': gae.IntegerProperty,
4886 'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)),
4887 'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4888 'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
4889 })
4890 self.db = db
4891 self.uri = uri
4892 self.dbengine = 'google:datastore'
4893 self.folder = folder
4894 db['_lastsql'] = ''
4895 self.db_codec = 'UTF-8'
4896 self._after_connection = after_connection
4897 self.pool_size = 0
4898 match = self.REGEX_NAMESPACE.match(uri)
4899 if match:
4900 namespace_manager.set_namespace(match.group('namespace'))
4901 self.keyfunc = (self.use_ndb and ndb.Key) or Key.from_path
4902
4903 self.ndb_settings = None
4904 if 'ndb_settings' in adapter_args:
4905 self.ndb_settings = adapter_args['ndb_settings']
4906
4907 - def parse_id(self, value, field_type):
4909
4910 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
4911 myfields = {}
4912 for field in table:
4913 if isinstance(polymodel,Table) and field.name in polymodel.fields():
4914 continue
4915 attr = {}
4916 if isinstance(field.custom_qualifier, dict):
4917
4918 attr = field.custom_qualifier
4919 field_type = field.type
4920 if isinstance(field_type, SQLCustomType):
4921 ftype = self.types[field_type.native or field_type.type](**attr)
4922 elif isinstance(field_type, ((self.use_ndb and ndb.Property) or gae.Property)):
4923 ftype = field_type
4924 elif field_type.startswith('id'):
4925 continue
4926 elif field_type.startswith('decimal'):
4927 precision, scale = field_type[7:].strip('()').split(',')
4928 precision = int(precision)
4929 scale = int(scale)
4930 dec_cls = (self.use_ndb and NDBDecimalProperty) or GAEDecimalProperty
4931 ftype = dec_cls(precision, scale, **attr)
4932 elif field_type.startswith('reference'):
4933 if field.notnull:
4934 attr = dict(required=True)
4935 ftype = self.types[field_type[:9]](**attr)
4936 elif field_type.startswith('list:reference'):
4937 if field.notnull:
4938 attr['required'] = True
4939 ftype = self.types[field_type[:14]](**attr)
4940 elif field_type.startswith('list:'):
4941 ftype = self.types[field_type](**attr)
4942 elif not field_type in self.types\
4943 or not self.types[field_type]:
4944 raise SyntaxError('Field: unknown field type: %s' % field_type)
4945 else:
4946 ftype = self.types[field_type](**attr)
4947 myfields[field.name] = ftype
4948 if not polymodel:
4949 model_cls = (self.use_ndb and ndb.Model) or gae.Model
4950 table._tableobj = classobj(table._tablename, (model_cls, ), myfields)
4951 if self.use_ndb:
4952
4953 if self.ndb_settings and (table._tablename in self.ndb_settings):
4954 for k, v in self.ndb_settings.iteritems():
4955 setattr(table._tableobj, k, v)
4956 elif polymodel==True:
4957 pm_cls = (self.use_ndb and NDBPolyModel) or PolyModel
4958 table._tableobj = classobj(table._tablename, (pm_cls, ), myfields)
4959 elif isinstance(polymodel,Table):
4960 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
4961 else:
4962 raise SyntaxError("polymodel must be None, True, a table or a tablename")
4963 return None
4964
4965 - def expand(self,expression,field_type=None):
4966 if isinstance(expression,Field):
4967 if expression.type in ('text', 'blob', 'json'):
4968 raise SyntaxError('AppEngine does not index by: %s' % expression.type)
4969 return expression.name
4970 elif isinstance(expression, (Expression, Query)):
4971 if not expression.second is None:
4972 return expression.op(expression.first, expression.second)
4973 elif not expression.first is None:
4974 return expression.op(expression.first)
4975 else:
4976 return expression.op()
4977 elif field_type:
4978 return self.represent(expression,field_type)
4979 elif isinstance(expression,(list,tuple)):
4980 return ','.join([self.represent(item,field_type) for item in expression])
4981 else:
4982 return str(expression)
4983
4984
4985 - def AND(self,first,second):
4991
4992 - def EQ(self,first,second=None):
4993 if isinstance(second, Key):
4994 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
4995 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
4996
4997 - def NE(self,first,second=None):
4998 if first.type != 'id':
4999 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
5000 else:
5001 if not second is None:
5002 second = Key.from_path(first._tablename, long(second))
5003 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
5004
5005 - def LT(self,first,second=None):
5006 if first.type != 'id':
5007 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
5008 else:
5009 second = Key.from_path(first._tablename, long(second))
5010 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
5011
5012 - def LE(self,first,second=None):
5013 if first.type != 'id':
5014 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
5015 else:
5016 second = Key.from_path(first._tablename, long(second))
5017 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
5018
5019 - def GT(self,first,second=None):
5020 if first.type != 'id' or second==0 or second == '0':
5021 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
5022 else:
5023 second = Key.from_path(first._tablename, long(second))
5024 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
5025
5026 - def GE(self,first,second=None):
5027 if first.type != 'id':
5028 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
5029 else:
5030 second = Key.from_path(first._tablename, long(second))
5031 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
5032
5035
5036 - def COMMA(self,first,second):
5038
5039 - def BELONGS(self,first,second=None):
5040 if not isinstance(second,(list, tuple, set)):
5041 raise SyntaxError("Not supported")
5042 if not self.use_ndb:
5043 if isinstance(second,set):
5044 second = list(second)
5045 if first.type == 'id':
5046 second = [Key.from_path(first._tablename, int(i)) for i in second]
5047 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
5048
5049 - def CONTAINS(self,first,second,case_sensitive=False):
5054
5055 - def NOT(self,first):
5056 nops = { self.EQ: self.NE,
5057 self.NE: self.EQ,
5058 self.LT: self.GE,
5059 self.GT: self.LE,
5060 self.LE: self.GT,
5061 self.GE: self.LT}
5062 if not isinstance(first,Query):
5063 raise SyntaxError("Not suported")
5064 nop = nops.get(first.op,None)
5065 if not nop:
5066 raise SyntaxError("Not suported %s" % first.op.__name__)
5067 first.op = nop
5068 return self.expand(first)
5069
5072
5073 GAE_FILTER_OPTIONS = {
5074 '=': lambda q, t, p, v: q.filter(getattr(t,p) == v),
5075 '>': lambda q, t, p, v: q.filter(getattr(t,p) > v),
5076 '<': lambda q, t, p, v: q.filter(getattr(t,p) < v),
5077 '<=': lambda q, t, p, v: q.filter(getattr(t,p) <= v),
5078 '>=': lambda q, t, p, v: q.filter(getattr(t,p) >= v),
5079 '!=': lambda q, t, p, v: q.filter(getattr(t,p) != v),
5080 'in': lambda q, t, p, v: q.filter(getattr(t,p).IN(v)),
5081 }
5082
5083 - def filter(self, query, tableobj, prop, op, value):
5085
5086 - def select_raw(self,query,fields=None,attributes=None):
5087 db = self.db
5088 fields = fields or []
5089 attributes = attributes or {}
5090 args_get = attributes.get
5091 new_fields = []
5092 for item in fields:
5093 if isinstance(item,SQLALL):
5094 new_fields += item._table
5095 else:
5096 new_fields.append(item)
5097 fields = new_fields
5098 if query:
5099 tablename = self.get_table(query)
5100 elif fields:
5101 tablename = fields[0].tablename
5102 query = db._adapter.id_query(fields[0].table)
5103 else:
5104 raise SyntaxError("Unable to determine a tablename")
5105
5106 if query:
5107 if use_common_filters(query):
5108 query = self.common_filter(query,[tablename])
5109
5110
5111 tableobj = db[tablename]._tableobj
5112 filters = self.expand(query)
5113
5114 projection = None
5115 if len(db[tablename].fields) == len(fields):
5116
5117 projection = None
5118 elif args_get('projection') == True:
5119 projection = []
5120 for f in fields:
5121 if f.type in ['text', 'blob', 'json']:
5122 raise SyntaxError(
5123 "text and blob field types not allowed in projection queries")
5124 else:
5125 projection.append(f.name)
5126 elif args_get('filterfields') == True:
5127 projection = []
5128 for f in fields:
5129 projection.append(f.name)
5130
5131
5132
5133 query_projection = [
5134 p for p in projection if \
5135 p != db[tablename]._id.name] if projection and \
5136 args_get('projection') == True\
5137 else None
5138
5139 cursor = None
5140 if isinstance(args_get('reusecursor'), str):
5141 cursor = args_get('reusecursor')
5142 if self.use_ndb:
5143 qo = ndb.QueryOptions(projection=query_projection, cursor=cursor)
5144 items = tableobj.query(default_options=qo)
5145 else:
5146 items = gae.Query(tableobj, projection=query_projection,
5147 cursor=cursor)
5148
5149 for filter in filters:
5150 if args_get('projection') == True and \
5151 filter.name in query_projection and \
5152 filter.op in ['=', '<=', '>=']:
5153 raise SyntaxError(
5154 "projection fields cannot have equality filters")
5155 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
5156 continue
5157 elif filter.name=='__key__' and filter.op=='=':
5158 if filter.value==0:
5159 items = []
5160 elif isinstance(filter.value, (self.use_ndb and ndb.Key) or Key):
5161
5162
5163
5164 item = filter.value.get() if self.use_ndb else tableobj.get(filter.value)
5165 items = (item and [item]) or []
5166 else:
5167
5168
5169
5170 item = tableobj.get_by_id(filter.value)
5171 items = (item and [item]) or []
5172 elif isinstance(items,list):
5173 items = [i for i in items if filter.apply(
5174 getattr(item,filter.name),filter.value)]
5175 else:
5176 if filter.name=='__key__' and filter.op != 'in':
5177 if self.use_ndb:
5178 items.order(tableobj._key)
5179 else:
5180 items.order('__key__')
5181 items = self.filter(items, tableobj, filter.name,
5182 filter.op, filter.value) \
5183 if self.use_ndb else \
5184 items.filter('%s %s' % (filter.name,filter.op),
5185 filter.value)
5186
5187 if not isinstance(items,list):
5188 if args_get('left', None):
5189 raise SyntaxError('Set: no left join in appengine')
5190 if args_get('groupby', None):
5191 raise SyntaxError('Set: no groupby in appengine')
5192 orderby = args_get('orderby', False)
5193 if orderby:
5194
5195 if isinstance(orderby, (list, tuple)):
5196 orderby = xorify(orderby)
5197 if isinstance(orderby,Expression):
5198 orderby = self.expand(orderby)
5199 orders = orderby.split(', ')
5200 for order in orders:
5201 if self.use_ndb:
5202
5203 def make_order(o):
5204 s = str(o)
5205 desc = s[0] == '-'
5206 s = (desc and s[1:]) or s
5207 return (desc and -getattr(tableobj, s)) or getattr(tableobj, s)
5208 _order = {'-id':-tableobj._key,'id':tableobj._key}.get(order)
5209 if _order is None:
5210 _order = make_order(order)
5211 items = items.order(_order)
5212 else:
5213 order={'-id':'-__key__','id':'__key__'}.get(order,order)
5214 items = items.order(order)
5215 if args_get('limitby', None):
5216 (lmin, lmax) = attributes['limitby']
5217 (limit, offset) = (lmax - lmin, lmin)
5218 if self.use_ndb:
5219 rows, cursor, more = items.fetch_page(limit,offset=offset)
5220 else:
5221 rows = items.fetch(limit,offset=offset)
5222
5223
5224 if args_get('reusecursor'):
5225 db['_lastcursor'] = cursor if self.use_ndb else items.cursor()
5226 items = rows
5227 return (items, tablename, projection or db[tablename].fields)
5228
5229 - def select(self,query,fields,attributes):
5230 """
5231 This is the GAE version of select. some notes to consider:
5232 - db['_lastsql'] is not set because there is not SQL statement string
5233 for a GAE query
5234 - 'nativeRef' is a magical fieldname used for self references on GAE
5235 - optional attribute 'projection' when set to True will trigger
5236 use of the GAE projection queries. note that there are rules for
5237 what is accepted imposed by GAE: each field must be indexed,
5238 projection queries cannot contain blob or text fields, and you
5239 cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
5240 - optional attribute 'filterfields' when set to True web2py will only
5241 parse the explicitly listed fields into the Rows object, even though
5242 all fields are returned in the query. This can be used to reduce
5243 memory usage in cases where true projection queries are not
5244 usable.
5245 - optional attribute 'reusecursor' allows use of cursor with queries
5246 that have the limitby attribute. Set the attribute to True for the
5247 first query, set it to the value of db['_lastcursor'] to continue
5248 a previous query. The user must save the cursor value between
5249 requests, and the filters must be identical. It is up to the user
5250 to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
5251 """
5252
5253 (items, tablename, fields) = self.select_raw(query,fields,attributes)
5254
5255 rows = [[(t==self.db[tablename]._id.name and item) or \
5256 (t=='nativeRef' and item) or getattr(item, t) \
5257 for t in fields] for item in items]
5258 colnames = ['%s.%s' % (tablename, t) for t in fields]
5259 processor = attributes.get('processor',self.parse)
5260 return processor(rows,fields,colnames,False)
5261
5263 return value[:] if self.use_ndb else value
5264
5266 return value[:] if self.use_ndb else value
5267
5268 - def count(self,query,distinct=None,limit=None):
5269 if distinct:
5270 raise RuntimeError("COUNT DISTINCT not supported")
5271 (items, tablename, fields) = self.select_raw(query)
5272
5273 try:
5274 return len(items)
5275 except TypeError:
5276 return items.count(limit=limit)
5277
5278 - def delete(self,tablename, query):
5279 """
5280 This function was changed on 2010-05-04 because according to
5281 http://code.google.com/p/googleappengine/issues/detail?id=3119
5282 GAE no longer supports deleting more than 1000 records.
5283 """
5284
5285 (items, tablename, fields) = self.select_raw(query)
5286
5287 if not isinstance(items,list):
5288
5289
5290 leftitems = items.fetch(1000, keys_only=True)
5291 counter = 0
5292 while len(leftitems):
5293 counter += len(leftitems)
5294 if self.use_ndb:
5295 ndb.delete_multi(leftitems)
5296 else:
5297 gae.delete(leftitems)
5298 leftitems = items.fetch(1000, keys_only=True)
5299 else:
5300 counter = len(items)
5301 if self.use_ndb:
5302 ndb.delete_multi([item.key for item in items])
5303 else:
5304 gae.delete(items)
5305 return counter
5306
5307 - def update(self,tablename,query,update_fields):
5308
5309 (items, tablename, fields) = self.select_raw(query)
5310 counter = 0
5311 for item in items:
5312 for field, value in update_fields:
5313 setattr(item, field.name, self.represent(value,field.type))
5314 item.put()
5315 counter += 1
5316 LOGGER.info(str(counter))
5317 return counter
5318
5319 - def insert(self,table,fields):
5320 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
5321
5322 tmp = table._tableobj(**dfields)
5323 tmp.put()
5324 key = tmp.key if self.use_ndb else tmp.key()
5325 rid = Reference(key.id())
5326 (rid._table, rid._record, rid._gaekey) = (table, None, key)
5327 return rid
5328
5330 parsed_items = []
5331 for item in items:
5332 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
5333 parsed_items.append(table._tableobj(**dfields))
5334 if self.use_ndb:
5335 ndb.put_multi(parsed_items)
5336 else:
5337 gae.put(parsed_items)
5338 return True
5339
5341 return uuid.UUID(uuidv).int
5342
5344 return str(uuid.UUID(int=n))
5345
5347 drivers = ('couchdb',)
5348
5349 uploads_in_blob = True
5350 types = {
5351 'boolean': bool,
5352 'string': str,
5353 'text': str,
5354 'json': str,
5355 'password': str,
5356 'blob': str,
5357 'upload': str,
5358 'integer': long,
5359 'bigint': long,
5360 'float': float,
5361 'double': float,
5362 'date': datetime.date,
5363 'time': datetime.time,
5364 'datetime': datetime.datetime,
5365 'id': long,
5366 'reference': long,
5367 'list:string': list,
5368 'list:integer': list,
5369 'list:reference': list,
5370 }
5371
5373 - def file_open(self, filename, mode='rb', lock=True): pass
5375
5376 - def expand(self,expression,field_type=None):
5377 if isinstance(expression,Field):
5378 if expression.type=='id':
5379 return "%s._id" % expression.tablename
5380 return BaseAdapter.expand(self,expression,field_type)
5381
5382 - def AND(self,first,second):
5384
5385 - def OR(self,first,second):
5387
5388 - def EQ(self,first,second):
5392
5393 - def NE(self,first,second):
5397
5398 - def COMMA(self,first,second):
5400
5402 value = NoSQLAdapter.represent(self, obj, fieldtype)
5403 if fieldtype=='id':
5404 return repr(str(long(value)))
5405 elif fieldtype in ('date','time','datetime','boolean'):
5406 return serializers.json(value)
5407 return repr(not isinstance(value,unicode) and value \
5408 or value and value.encode('utf8'))
5409
5410 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
5411 pool_size=0,folder=None,db_codec ='UTF-8',
5412 credential_decoder=IDENTITY, driver_args={},
5413 adapter_args={}, do_connect=True, after_connection=None):
5414 self.db = db
5415 self.uri = uri
5416 if do_connect: self.find_driver(adapter_args)
5417 self.dbengine = 'couchdb'
5418 self.folder = folder
5419 db['_lastsql'] = ''
5420 self.db_codec = 'UTF-8'
5421 self._after_connection = after_connection
5422 self.pool_size = pool_size
5423
5424 url='http://'+uri[10:]
5425 def connector(url=url,driver_args=driver_args):
5426 return self.driver.Server(url,**driver_args)
5427 self.reconnect(connector,cursor=False)
5428
5429 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
5430 if migrate:
5431 try:
5432 self.connection.create(table._tablename)
5433 except:
5434 pass
5435
5436 - def insert(self,table,fields):
5443
5444 - def _select(self,query,fields,attributes):
5445 if not isinstance(query,Query):
5446 raise SyntaxError("Not Supported")
5447 for key in set(attributes.keys())-SELECT_ARGS:
5448 raise SyntaxError('invalid select attribute: %s' % key)
5449 new_fields=[]
5450 for item in fields:
5451 if isinstance(item,SQLALL):
5452 new_fields += item._table
5453 else:
5454 new_fields.append(item)
5455 def uid(fd):
5456 return fd=='id' and '_id' or fd
5457 def get(row,fd):
5458 return fd=='id' and long(row['_id']) or row.get(fd,None)
5459 fields = new_fields
5460 tablename = self.get_table(query)
5461 fieldnames = [f.name for f in (fields or self.db[tablename])]
5462 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
5463 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
5464 fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
5465 dict(t=tablename,
5466 query=self.expand(query),
5467 order='%s._id' % tablename,
5468 fields=fields)
5469 return fn, colnames
5470
5471 - def select(self,query,fields,attributes):
5472 if not isinstance(query,Query):
5473 raise SyntaxError("Not Supported")
5474 fn, colnames = self._select(query,fields,attributes)
5475 tablename = colnames[0].split('.')[0]
5476 ctable = self.connection[tablename]
5477 rows = [cols['value'] for cols in ctable.query(fn)]
5478 processor = attributes.get('processor',self.parse)
5479 return processor(rows,fields,colnames,False)
5480
5481 - def delete(self,tablename,query):
5482 if not isinstance(query,Query):
5483 raise SyntaxError("Not Supported")
5484 if query.first.type=='id' and query.op==self.EQ:
5485 id = query.second
5486 tablename = query.first.tablename
5487 assert(tablename == query.first.tablename)
5488 ctable = self.connection[tablename]
5489 try:
5490 del ctable[str(id)]
5491 return 1
5492 except couchdb.http.ResourceNotFound:
5493 return 0
5494 else:
5495 tablename = self.get_table(query)
5496 rows = self.select(query,[self.db[tablename]._id],{})
5497 ctable = self.connection[tablename]
5498 for row in rows:
5499 del ctable[str(row.id)]
5500 return len(rows)
5501
5502 - def update(self,tablename,query,fields):
5503 if not isinstance(query,Query):
5504 raise SyntaxError("Not Supported")
5505 if query.first.type=='id' and query.op==self.EQ:
5506 id = query.second
5507 tablename = query.first.tablename
5508 ctable = self.connection[tablename]
5509 try:
5510 doc = ctable[str(id)]
5511 for key,value in fields:
5512 doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
5513 ctable.save(doc)
5514 return 1
5515 except couchdb.http.ResourceNotFound:
5516 return 0
5517 else:
5518 tablename = self.get_table(query)
5519 rows = self.select(query,[self.db[tablename]._id],{})
5520 ctable = self.connection[tablename]
5521 table = self.db[tablename]
5522 for row in rows:
5523 doc = ctable[str(row.id)]
5524 for key,value in fields:
5525 doc[key.name] = self.represent(value,table[key.name].type)
5526 ctable.save(doc)
5527 return len(rows)
5528
5529 - def count(self,query,distinct=None):
5530 if distinct:
5531 raise RuntimeError("COUNT DISTINCT not supported")
5532 if not isinstance(query,Query):
5533 raise SyntaxError("Not Supported")
5534 tablename = self.get_table(query)
5535 rows = self.select(query,[self.db[tablename]._id],{})
5536 return len(rows)
5537
5539 """
5540 validates that the given text is clean: only contains [0-9a-zA-Z_]
5541 """
5542
5543
5544 return text
5545
5547 native_json = True
5548 drivers = ('pymongo',)
5549
5550 uploads_in_blob = False
5551
5552 types = {
5553 'boolean': bool,
5554 'string': str,
5555 'text': str,
5556 'json': str,
5557 'password': str,
5558 'blob': str,
5559 'upload': str,
5560 'integer': long,
5561 'bigint': long,
5562 'float': float,
5563 'double': float,
5564 'date': datetime.date,
5565 'time': datetime.time,
5566 'datetime': datetime.datetime,
5567 'id': long,
5568 'reference': long,
5569 'list:string': list,
5570 'list:integer': list,
5571 'list:reference': list,
5572 }
5573
5574 error_messages = {"javascript_needed": "This must yet be replaced" +
5575 " with javascript in order to work."}
5576
5577 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
5578 pool_size=0, folder=None, db_codec ='UTF-8',
5579 credential_decoder=IDENTITY, driver_args={},
5580 adapter_args={}, do_connect=True, after_connection=None):
5581
5582 self.db = db
5583 self.uri = uri
5584 if do_connect: self.find_driver(adapter_args)
5585 import random
5586 from bson.objectid import ObjectId
5587 from bson.son import SON
5588 import pymongo.uri_parser
5589
5590 m = pymongo.uri_parser.parse_uri(uri)
5591
5592 self.SON = SON
5593 self.ObjectId = ObjectId
5594 self.random = random
5595
5596 self.dbengine = 'mongodb'
5597 self.folder = folder
5598 db['_lastsql'] = ''
5599 self.db_codec = 'UTF-8'
5600 self._after_connection = after_connection
5601 self.pool_size = pool_size
5602
5603
5604 self.minimumreplication = adapter_args.get('minimumreplication',0)
5605
5606
5607
5608
5609 self.safe = adapter_args.get('safe',True)
5610
5611 self.uploads_in_blob = adapter_args.get('uploads_in_blob', False)
5612
5613 if isinstance(m,tuple):
5614 m = {"database" : m[1]}
5615 if m.get('database')==None:
5616 raise SyntaxError("Database is required!")
5617
5618 def connector(uri=self.uri,m=m):
5619
5620 if hasattr(self.driver, "MongoClient"):
5621 Connection = self.driver.MongoClient
5622 else:
5623 Connection = self.driver.Connection
5624 return Connection(uri)[m.get('database')]
5625
5626 self.reconnect(connector,cursor=False)
5627
5629 """ Convert input to a valid Mongodb ObjectId instance
5630
5631 self.object_id("<random>") -> ObjectId (not unique) instance """
5632 if not arg:
5633 arg = 0
5634 if isinstance(arg, basestring):
5635
5636 rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
5637 if arg.isdigit() and (not rawhex):
5638 arg = int(arg)
5639 elif arg == "<random>":
5640 arg = int("0x%sL" % \
5641 "".join([self.random.choice("0123456789abcdef") \
5642 for x in range(24)]), 0)
5643 elif arg.isalnum():
5644 if not arg.startswith("0x"):
5645 arg = "0x%s" % arg
5646 try:
5647 arg = int(arg, 0)
5648 except ValueError, e:
5649 raise ValueError(
5650 "invalid objectid argument string: %s" % e)
5651 else:
5652 raise ValueError("Invalid objectid argument string. " +
5653 "Requires an integer or base 16 value")
5654 elif isinstance(arg, self.ObjectId):
5655 return arg
5656
5657 if not isinstance(arg, (int, long)):
5658 raise TypeError("object_id argument must be of type " +
5659 "ObjectId or an objectid representable integer")
5660 hexvalue = hex(arg)[2:].rstrip('L').zfill(24)
5661 return self.ObjectId(hexvalue)
5662
5664
5665 if isinstance(value, self.ObjectId):
5666 value = long(str(value), 16)
5667 return super(MongoDBAdapter,
5668 self).parse_reference(value, field_type)
5669
5670 - def parse_id(self, value, field_type):
5671 if isinstance(value, self.ObjectId):
5672 value = long(str(value), 16)
5673 return super(MongoDBAdapter,
5674 self).parse_id(value, field_type)
5675
5677
5678 if isinstance(obj, self.ObjectId):
5679 value = obj
5680 else:
5681 value = NoSQLAdapter.represent(self, obj, fieldtype)
5682
5683 if fieldtype =='date':
5684 if value == None:
5685 return value
5686
5687 t = datetime.time(0, 0, 0)
5688
5689
5690 return datetime.datetime.combine(value, t)
5691 elif fieldtype == 'time':
5692 if value == None:
5693 return value
5694
5695 d = datetime.date(2000, 1, 1)
5696
5697
5698 return datetime.datetime.combine(d, value)
5699 elif fieldtype == "blob":
5700 if value== None:
5701 return value
5702 from bson import Binary
5703 if not isinstance(value, Binary):
5704 if not isinstance(value, basestring):
5705 return Binary(str(value))
5706 return Binary(value)
5707 return value
5708 elif (isinstance(fieldtype, basestring) and
5709 fieldtype.startswith('list:')):
5710 if fieldtype.startswith('list:reference'):
5711 newval = []
5712 for v in value:
5713 newval.append(self.object_id(v))
5714 return newval
5715 return value
5716 elif ((isinstance(fieldtype, basestring) and
5717 fieldtype.startswith("reference")) or
5718 (isinstance(fieldtype, Table)) or fieldtype=="id"):
5719 value = self.object_id(value)
5720 return value
5721
5722 - def create_table(self, table, migrate=True, fake_migrate=False,
5723 polymodel=None, isCapped=False):
5724 if isCapped:
5725 raise RuntimeError("Not implemented")
5726
5727 - def count(self, query, distinct=None, snapshot=True):
5728 if distinct:
5729 raise RuntimeError("COUNT DISTINCT not supported")
5730 if not isinstance(query,Query):
5731 raise SyntaxError("Not Supported")
5732 tablename = self.get_table(query)
5733 return long(self.select(query,[self.db[tablename]._id], {},
5734 count=True,snapshot=snapshot)['count'])
5735
5736
5737
5738
5739
5740 - def expand(self, expression, field_type=None):
5741 if isinstance(expression, Query):
5742
5743
5744
5745
5746
5747 if isinstance(expression.first,Field) and \
5748 ((expression.first.type == 'id') or \
5749 ("reference" in expression.first.type)):
5750 if expression.first.type == 'id':
5751 expression.first.name = '_id'
5752
5753 if isinstance(expression.second, (tuple, list, set)):
5754 expression.second = [self.object_id(item) for
5755 item in expression.second]
5756 else:
5757 expression.second = self.object_id(expression.second)
5758 result = expression.op(expression.first, expression.second)
5759
5760 if isinstance(expression, Field):
5761 if expression.type=='id':
5762 result = "_id"
5763 else:
5764 result = expression.name
5765 elif isinstance(expression, (Expression, Query)):
5766 if not expression.second is None:
5767 result = expression.op(expression.first, expression.second)
5768 elif not expression.first is None:
5769 result = expression.op(expression.first)
5770 elif not isinstance(expression.op, str):
5771 result = expression.op()
5772 else:
5773 result = expression.op
5774 elif field_type:
5775 result = self.represent(expression,field_type)
5776 elif isinstance(expression,(list,tuple)):
5777 result = ','.join(self.represent(item,field_type) for
5778 item in expression)
5779 else:
5780 result = expression
5781 return result
5782
5783 - def drop(self, table, mode=''):
5786
5787 - def truncate(self, table, mode, safe=None):
5788 if safe == None:
5789 safe=self.safe
5790 ctable = self.connection[table._tablename]
5791 ctable.remove(None, safe=True)
5792
5793 - def select(self, query, fields, attributes, count=False,
5794 snapshot=False):
5795 mongofields_dict = self.SON()
5796 mongoqry_dict = {}
5797 new_fields=[]
5798 mongosort_list = []
5799
5800 orderby = attributes.get('orderby', False)
5801 limitby = attributes.get('limitby', False)
5802
5803 if 'for_update' in attributes:
5804 logging.warn('mongodb does not support for_update')
5805 for key in set(attributes.keys())-set(('limitby',
5806 'orderby','for_update')):
5807 if attributes[key]!=None:
5808 logging.warn('select attribute not implemented: %s' % key)
5809 if limitby:
5810 limitby_skip, limitby_limit = limitby[0], int(limitby[1])
5811 else:
5812 limitby_skip = limitby_limit = 0
5813 if orderby:
5814 if isinstance(orderby, (list, tuple)):
5815 orderby = xorify(orderby)
5816
5817 for f in self.expand(orderby).split(','):
5818 if f.startswith('-'):
5819 mongosort_list.append((f[1:], -1))
5820 else:
5821 mongosort_list.append((f, 1))
5822 for item in fields:
5823 if isinstance(item, SQLALL):
5824 new_fields += item._table
5825 else:
5826 new_fields.append(item)
5827 fields = new_fields
5828 if isinstance(query,Query):
5829 tablename = self.get_table(query)
5830 elif len(fields) != 0:
5831 tablename = fields[0].tablename
5832 else:
5833 raise SyntaxError("The table name could not be found in " +
5834 "the query nor from the select statement.")
5835 mongoqry_dict = self.expand(query)
5836 fields = fields or self.db[tablename]
5837 for field in fields:
5838 mongofields_dict[field.name] = 1
5839 ctable = self.connection[tablename]
5840 if count:
5841 return {'count' : ctable.find(
5842 mongoqry_dict, mongofields_dict,
5843 skip=limitby_skip, limit=limitby_limit,
5844 sort=mongosort_list, snapshot=snapshot).count()}
5845 else:
5846
5847 mongo_list_dicts = ctable.find(mongoqry_dict,
5848 mongofields_dict, skip=limitby_skip,
5849 limit=limitby_limit, sort=mongosort_list,
5850 snapshot=snapshot)
5851 rows = []
5852
5853
5854 colnames = []
5855 newnames = []
5856 for field in fields:
5857 colname = str(field)
5858 colnames.append(colname)
5859 tablename, fieldname = colname.split(".")
5860 if fieldname == "_id":
5861
5862 field.name = "id"
5863 newnames.append(".".join((tablename, field.name)))
5864
5865 for record in mongo_list_dicts:
5866 row=[]
5867 for colname in colnames:
5868 tablename, fieldname = colname.split(".")
5869
5870
5871 if fieldname == "id": fieldname = "_id"
5872 if fieldname in record:
5873 value = record[fieldname]
5874 else:
5875 value = None
5876 row.append(value)
5877 rows.append(row)
5878 processor = attributes.get('processor', self.parse)
5879 result = processor(rows, fields, newnames, False)
5880 return result
5881
5882 - def insert(self, table, fields, safe=None):
5883 """Safe determines whether a asynchronious request is done or a
5884 synchronious action is done
5885 For safety, we use by default synchronous requests"""
5886
5887 values = dict()
5888 if safe==None:
5889 safe = self.safe
5890 ctable = self.connection[table._tablename]
5891 for k, v in fields:
5892 if not k.name in ["id", "safe"]:
5893 fieldname = k.name
5894 fieldtype = table[k.name].type
5895 values[fieldname] = self.represent(v, fieldtype)
5896
5897 ctable.insert(values, safe=safe)
5898 return long(str(values['_id']), 16)
5899
5900 - def update(self, tablename, query, fields, safe=None):
5901 if safe == None:
5902 safe = self.safe
5903
5904
5905 if not isinstance(query, Query):
5906 raise RuntimeError("Not implemented")
5907 amount = self.count(query, False)
5908 if not isinstance(query, Query):
5909 raise SyntaxError("Not Supported")
5910 filter = None
5911 if query:
5912 filter = self.expand(query)
5913
5914 modify = {'$set': dict((k.name, self.represent(v, k.type)) for
5915 k, v in fields if (not k.name in ("_id", "id")))}
5916 try:
5917 result = self.connection[tablename].update(filter,
5918 modify, multi=True, safe=safe)
5919 if safe:
5920 try:
5921
5922 return result["n"]
5923 except (KeyError, AttributeError, TypeError):
5924 return amount
5925 else:
5926 return amount
5927 except Exception, e:
5928
5929 raise RuntimeError("uncaught exception when updating rows: %s" % e)
5930
5931 - def delete(self, tablename, query, safe=None):
5932 if safe is None:
5933 safe = self.safe
5934 amount = 0
5935 amount = self.count(query, False)
5936 if not isinstance(query, Query):
5937 raise RuntimeError("query type %s is not supported" % \
5938 type(query))
5939 filter = self.expand(query)
5940 self.connection[tablename].remove(filter, safe=safe)
5941 return amount
5942
5944 return [self.insert(table,item) for item in items]
5945
5946
5950
5951
5952 - def NOT(self, first):
5954
5955 - def AND(self,first,second):
5958
5959 - def OR(self,first,second):
5962
5963 - def BELONGS(self, first, second):
5964 if isinstance(second, str):
5965 return {self.expand(first) : {"$in" : [ second[:-1]]} }
5966 elif second==[] or second==() or second==set():
5967 return {1:0}
5968 items = [self.expand(item, first.type) for item in second]
5969 return {self.expand(first) : {"$in" : items} }
5970
5971 - def EQ(self,first,second=None):
5972 result = {}
5973 result[self.expand(first)] = self.expand(second)
5974 return result
5975
5976 - def NE(self, first, second=None):
5977 result = {}
5978 result[self.expand(first)] = {'$ne': self.expand(second)}
5979 return result
5980
5981 - def LT(self,first,second=None):
5982 if second is None:
5983 raise RuntimeError("Cannot compare %s < None" % first)
5984 result = {}
5985 result[self.expand(first)] = {'$lt': self.expand(second)}
5986 return result
5987
5988 - def LE(self,first,second=None):
5989 if second is None:
5990 raise RuntimeError("Cannot compare %s <= None" % first)
5991 result = {}
5992 result[self.expand(first)] = {'$lte': self.expand(second)}
5993 return result
5994
5995 - def GT(self,first,second):
5996 result = {}
5997 result[self.expand(first)] = {'$gt': self.expand(second)}
5998 return result
5999
6000 - def GE(self,first,second=None):
6001 if second is None:
6002 raise RuntimeError("Cannot compare %s >= None" % first)
6003 result = {}
6004 result[self.expand(first)] = {'$gte': self.expand(second)}
6005 return result
6006
6007 - def ADD(self, first, second):
6011
6012 - def SUB(self, first, second):
6016
6017 - def MUL(self, first, second):
6021
6022 - def DIV(self, first, second):
6026
6027 - def MOD(self, first, second):
6031
6032 - def AS(self, first, second):
6033 raise NotImplementedError(self.error_messages["javascript_needed"])
6034 return '%s AS %s' % (self.expand(first), second)
6035
6036
6037
6038
6039 - def ON(self, first, second):
6040 raise NotImplementedError("This is not possible in NoSQL" +
6041 " but can be simulated with a wrapper.")
6042 return '%s ON %s' % (self.expand(first), self.expand(second))
6043
6044
6045
6046
6047 - def COMMA(self, first, second):
6049
6050 - def LIKE(self, first, second):
6054
6055 - def ILIKE(self, first, second):
6056 val = second if isinstance(second,self.ObjectId) else {
6057 '$regex': second.replace('%', ''), '$options': 'i'}
6058 return {self.expand(first): val}
6059
6061
6062 return {self.expand(first): ('/^%s/' % \
6063 self.expand(second, 'string'))}
6064
6066
6067 return {self.expand(first): ('/%s^/' % \
6068 self.expand(second, 'string'))}
6069
6070 - def CONTAINS(self, first, second, case_sensitive=False):
6071
6072
6073
6074 val = second if isinstance(second,self.ObjectId) else \
6075 {'$regex':".*" + re.escape(self.expand(second, 'string')) + ".*"}
6076 return {self.expand(first) : val}
6077
6078 - def LIKE(self, first, second):
6083
6084
6086
6087 import re
6088 return {self.expand(first): {'$regex' : '^' +
6089 re.escape(self.expand(second,
6090 'string'))}}
6091
6092
6094
6095
6096
6097
6098 import re
6099 return {self.expand(first): {'$regex': \
6100 re.escape(self.expand(second, 'string')) + '$'}}
6101
6102
6103 - def CONTAINS(self, first, second, case_sensitive=False):
6104
6105
6106
6107
6108 return {self.expand(first) : {'$regex': \
6109 ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
6110
6113 drivers = ('imaplib',)
6114
6115 """ IMAP server adapter
6116
6117 This class is intended as an interface with
6118 email IMAP servers to perform simple queries in the
6119 web2py DAL query syntax, so email read, search and
6120 other related IMAP mail services (as those implemented
6121 by brands like Google(r), and Yahoo!(r)
6122 can be managed from web2py applications.
6123
6124 The code uses examples by Yuji Tomita on this post:
6125 http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
6126 and is based in docs for Python imaplib, python email
6127 and email IETF's (i.e. RFC2060 and RFC3501)
6128
6129 This adapter was tested with a small set of operations with Gmail(r). Other
6130 services requests could raise command syntax and response data issues.
6131
6132 It creates its table and field names "statically",
6133 meaning that the developer should leave the table and field
6134 definitions to the DAL instance by calling the adapter's
6135 .define_tables() method. The tables are defined with the
6136 IMAP server mailbox list information.
6137
6138 .define_tables() returns a dictionary mapping dal tablenames
6139 to the server mailbox names with the following structure:
6140
6141 {<tablename>: str <server mailbox name>}
6142
6143 Here is a list of supported fields:
6144
6145 Field Type Description
6146 ################################################################
6147 uid string
6148 answered boolean Flag
6149 created date
6150 content list:string A list of dict text or html parts
6151 to string
6152 cc string
6153 bcc string
6154 size integer the amount of octets of the message*
6155 deleted boolean Flag
6156 draft boolean Flag
6157 flagged boolean Flag
6158 sender string
6159 recent boolean Flag
6160 seen boolean Flag
6161 subject string
6162 mime string The mime header declaration
6163 email string The complete RFC822 message**
6164 attachments <type list> Each non text part as dict
6165 encoding string The main detected encoding
6166
6167 *At the application side it is measured as the length of the RFC822
6168 message string
6169
6170 WARNING: As row id's are mapped to email sequence numbers,
6171 make sure your imap client web2py app does not delete messages
6172 during select or update actions, to prevent
6173 updating or deleting different messages.
6174 Sequence numbers change whenever the mailbox is updated.
6175 To avoid this sequence numbers issues, it is recommended the use
6176 of uid fields in query references (although the update and delete
6177 in separate actions rule still applies).
6178
6179 # This is the code recommended to start imap support
6180 # at the app's model:
6181
6182 imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
6183 imapdb.define_tables()
6184
6185 Here is an (incomplete) list of possible imap commands:
6186
6187 # Count today's unseen messages
6188 # smaller than 6000 octets from the
6189 # inbox mailbox
6190
6191 q = imapdb.INBOX.seen == False
6192 q &= imapdb.INBOX.created == datetime.date.today()
6193 q &= imapdb.INBOX.size < 6000
6194 unread = imapdb(q).count()
6195
6196 # Fetch last query messages
6197 rows = imapdb(q).select()
6198
6199 # it is also possible to filter query select results with limitby and
6200 # sequences of mailbox fields
6201
6202 set.select(<fields sequence>, limitby=(<int>, <int>))
6203
6204 # Mark last query messages as seen
6205 messages = [row.uid for row in rows]
6206 seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
6207
6208 # Delete messages in the imap database that have mails from mr. Gumby
6209
6210 deleted = 0
6211 for mailbox in imapdb.tables
6212 deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
6213
6214 # It is possible also to mark messages for deletion instead of ereasing them
6215 # directly with set.update(deleted=True)
6216
6217
6218 # This object give access
6219 # to the adapter auto mailbox
6220 # mapped names (which native
6221 # mailbox has what table name)
6222
6223 imapdb.mailboxes <dict> # tablename, server native name pairs
6224
6225 # To retrieve a table native mailbox name use:
6226 imapdb.<table>.mailbox
6227
6228 ### New features v2.4.1:
6229
6230 # Declare mailboxes statically with tablename, name pairs
6231 # This avoids the extra server names retrieval
6232
6233 imapdb.define_tables({"inbox": "INBOX"})
6234
6235 # Selects without content/attachments/email columns will only
6236 # fetch header and flags
6237
6238 imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
6239 """
6240
6241 types = {
6242 'string': str,
6243 'text': str,
6244 'date': datetime.date,
6245 'datetime': datetime.datetime,
6246 'id': long,
6247 'boolean': bool,
6248 'integer': int,
6249 'bigint': long,
6250 'blob': str,
6251 'list:string': str,
6252 }
6253
6254 dbengine = 'imap'
6255
6256 REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
6257
6258 - def __init__(self,
6259 db,
6260 uri,
6261 pool_size=0,
6262 folder=None,
6263 db_codec ='UTF-8',
6264 credential_decoder=IDENTITY,
6265 driver_args={},
6266 adapter_args={},
6267 do_connect=True,
6268 after_connection=None):
6269
6270
6271
6272
6273 self.db = db
6274 self.uri = uri
6275 if do_connect: self.find_driver(adapter_args)
6276 self.pool_size=pool_size
6277 self.folder = folder
6278 self.db_codec = db_codec
6279 self._after_connection = after_connection
6280 self.credential_decoder = credential_decoder
6281 self.driver_args = driver_args
6282 self.adapter_args = adapter_args
6283 self.mailbox_size = None
6284 self.static_names = None
6285 self.charset = sys.getfilesystemencoding()
6286
6287 self.imap4 = None
6288 uri = uri.split("://")[1]
6289
6290 """ MESSAGE is an identifier for sequence number"""
6291
6292 self.flags = {'deleted': '\\Deleted', 'draft': '\\Draft',
6293 'flagged': '\\Flagged', 'recent': '\\Recent',
6294 'seen': '\\Seen', 'answered': '\\Answered'}
6295 self.search_fields = {
6296 'id': 'MESSAGE', 'created': 'DATE',
6297 'uid': 'UID', 'sender': 'FROM',
6298 'to': 'TO', 'cc': 'CC',
6299 'bcc': 'BCC', 'content': 'TEXT',
6300 'size': 'SIZE', 'deleted': '\\Deleted',
6301 'draft': '\\Draft', 'flagged': '\\Flagged',
6302 'recent': '\\Recent', 'seen': '\\Seen',
6303 'subject': 'SUBJECT', 'answered': '\\Answered',
6304 'mime': None, 'email': None,
6305 'attachments': None
6306 }
6307
6308 db['_lastsql'] = ''
6309
6310 m = self.REGEX_URI.match(uri)
6311 user = m.group('user')
6312 password = m.group('password')
6313 host = m.group('host')
6314 port = int(m.group('port'))
6315 over_ssl = False
6316 if port==993:
6317 over_ssl = True
6318
6319 driver_args.update(host=host,port=port, password=password, user=user)
6320 def connector(driver_args=driver_args):
6321
6322
6323 if over_ssl:
6324 self.imap4 = self.driver.IMAP4_SSL
6325 else:
6326 self.imap4 = self.driver.IMAP4
6327 connection = self.imap4(driver_args["host"], driver_args["port"])
6328 data = connection.login(driver_args["user"], driver_args["password"])
6329
6330
6331 connection.mailbox_names = None
6332
6333
6334 connection.cursor = lambda : True
6335
6336 return connection
6337
6338 self.db.define_tables = self.define_tables
6339 self.connector = connector
6340 if do_connect: self.reconnect()
6341
6386
6388 last_message = None
6389
6390 if not isinstance(self.connection.mailbox_names, dict):
6391 self.get_mailboxes()
6392 try:
6393 result = self.connection.select(
6394 self.connection.mailbox_names[tablename])
6395 last_message = int(result[1][0])
6396
6397 if last_message == 0:
6398 last_message = 1
6399 except (IndexError, ValueError, TypeError, KeyError):
6400 e = sys.exc_info()[1]
6401 LOGGER.debug("Error retrieving the last mailbox" +
6402 " sequence number. %s" % str(e))
6403 return last_message
6404
6406 if not isinstance(self.connection.mailbox_names, dict):
6407 self.get_mailboxes()
6408
6409
6410 last_message = self.get_last_message(tablename)
6411 result, data = self.connection.uid("search", None, "(ALL)")
6412 uid_list = data[0].strip().split()
6413 if len(uid_list) <= 0:
6414 return None
6415 else:
6416 return (uid_list[0], uid_list[-1])
6417
6419 if add is None:
6420 add = datetime.timedelta()
6421 """ Convert a date object to a string
6422 with d-Mon-Y style for IMAP or the inverse
6423 case
6424
6425 add <timedelta> adds to the date object
6426 """
6427 months = [None, "JAN","FEB","MAR","APR","MAY","JUN",
6428 "JUL", "AUG","SEP","OCT","NOV","DEC"]
6429 if isinstance(date, basestring):
6430
6431 try:
6432 if "," in date:
6433 dayname, datestring = date.split(",")
6434 else:
6435 dayname, datestring = None, date
6436 date_list = datestring.strip().split()
6437 year = int(date_list[2])
6438 month = months.index(date_list[1].upper())
6439 day = int(date_list[0])
6440 hms = map(int, date_list[3].split(":"))
6441 return datetime.datetime(year, month, day,
6442 hms[0], hms[1], hms[2]) + add
6443 except (ValueError, AttributeError, IndexError), e:
6444 LOGGER.error("Could not parse date text: %s. %s" %
6445 (date, e))
6446 return None
6447 elif isinstance(date, (datetime.date, datetime.datetime)):
6448 if imf: date_format = "%a, %d %b %Y %H:%M:%S %z"
6449 else: date_format = "%d-%b-%Y"
6450 return (date + add).strftime(date_format)
6451 else:
6452 return None
6453
6454 @staticmethod
6456 from email.header import decode_header
6457 text, encoding = decode_header(f)[0]
6458 if encoding:
6459 text = text.decode(encoding).encode('utf-8')
6460 return text
6461
6462 - def encode_text(self, text, charset, errors="replace"):
6463 """ convert text for mail to unicode"""
6464 if text is None:
6465 text = ""
6466 else:
6467 if isinstance(text, str):
6468 if charset is None:
6469 text = unicode(text, "utf-8", errors)
6470 else:
6471 text = unicode(text, charset, errors)
6472 else:
6473 raise Exception("Unsupported mail text type %s" % type(text))
6474 return text.encode("utf-8")
6475
6477 charset = message.get_content_charset()
6478 return charset
6479
6481 """ Query the mail database for mailbox names """
6482 if self.static_names:
6483
6484 self.connection.mailbox_names = self.static_names
6485 return self.static_names.keys()
6486
6487 mailboxes_list = self.connection.list()
6488 self.connection.mailbox_names = dict()
6489 mailboxes = list()
6490 x = 0
6491 for item in mailboxes_list[1]:
6492 x = x + 1
6493 item = item.strip()
6494 if not "NOSELECT" in item.upper():
6495 sub_items = item.split("\"")
6496 sub_items = [sub_item for sub_item in sub_items \
6497 if len(sub_item.strip()) > 0]
6498
6499 mailbox = sub_items[-1].strip()
6500
6501
6502 mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox)))
6503 mailboxes.append(mailbox_name)
6504 self.connection.mailbox_names[mailbox_name] = mailbox
6505
6506 return mailboxes
6507
6509 nofield = True
6510 tablename = None
6511 attr = query
6512 while nofield:
6513 if hasattr(attr, "first"):
6514 attr = attr.first
6515 if isinstance(attr, Field):
6516 return attr.tablename
6517 elif isinstance(attr, Query):
6518 pass
6519 else:
6520 return None
6521 else:
6522 return None
6523 return tablename
6524
6526 if self.search_fields.get(flag, None) in self.flags.values():
6527 return True
6528 else:
6529 return False
6530
6532 """
6533 Auto create common IMAP fileds
6534
6535 This function creates fields definitions "statically"
6536 meaning that custom fields as in other adapters should
6537 not be supported and definitions handled on a service/mode
6538 basis (local syntax for Gmail(r), Ymail(r)
6539
6540 Returns a dictionary with tablename, server native mailbox name
6541 pairs.
6542 """
6543 if mailbox_names:
6544
6545 self.static_names = mailbox_names
6546 else:
6547 self.static_names = None
6548 if not isinstance(self.connection.mailbox_names, dict):
6549 self.get_mailboxes()
6550
6551 names = self.connection.mailbox_names.keys()
6552
6553 for name in names:
6554 self.db.define_table("%s" % name,
6555 Field("uid", writable=False),
6556 Field("created", "datetime", writable=False),
6557 Field("content", "text", writable=False),
6558 Field("to", writable=False),
6559 Field("cc", writable=False),
6560 Field("bcc", writable=False),
6561 Field("sender", writable=False),
6562 Field("size", "integer", writable=False),
6563 Field("subject", writable=False),
6564 Field("mime", writable=False),
6565 Field("email", "text", writable=False, readable=False),
6566 Field("attachments", "text", writable=False, readable=False),
6567 Field("encoding", writable=False),
6568 Field("answered", "boolean"),
6569 Field("deleted", "boolean"),
6570 Field("draft", "boolean"),
6571 Field("flagged", "boolean"),
6572 Field("recent", "boolean", writable=False),
6573 Field("seen", "boolean")
6574 )
6575
6576
6577
6578 self.db[name].mailbox = \
6579 self.connection.mailbox_names[name]
6580
6581
6582 self.db[name].to.represent = self.db[name].cc.represent = \
6583 self.db[name].bcc.represent = self.db[name].sender.represent = \
6584 self.db[name].subject.represent = self.header_represent
6585
6586
6587 self.db.mailboxes = self.connection.mailbox_names
6588 return self.db.mailboxes
6589
6594
6595 - def select(self, query, fields, attributes):
6596 """ Search and Fetch records and return web2py rows
6597 """
6598
6599 if use_common_filters(query):
6600 query = self.common_filter(query, [self.get_query_mailbox(query),])
6601
6602 import email
6603
6604
6605 tablename = None
6606 fetch_results = list()
6607
6608 if isinstance(query, Query):
6609 tablename = self.get_table(query)
6610 mailbox = self.connection.mailbox_names.get(tablename, None)
6611 if mailbox is None:
6612 raise ValueError("Mailbox name not found: %s" % mailbox)
6613 else:
6614
6615 result, selected = self.connection.select(mailbox, True)
6616 if result != "OK":
6617 raise Exception("IMAP error: %s" % selected)
6618 self.mailbox_size = int(selected[0])
6619 search_query = "(%s)" % str(query).strip()
6620 search_result = self.connection.uid("search", None, search_query)
6621
6622 if search_result[0] == "OK":
6623
6624
6625
6626
6627 limitby = attributes.get('limitby', None)
6628 messages_set = search_result[1][0].split()
6629
6630 messages_set.reverse()
6631 if limitby is not None:
6632
6633 messages_set = messages_set[int(limitby[0]):int(limitby[1])]
6634
6635
6636 if any([(field.name in ["content", "size",
6637 "attachments", "email"]) for
6638 field in fields]):
6639 imap_fields = "(RFC822 FLAGS)"
6640 else:
6641 imap_fields = "(RFC822.HEADER FLAGS)"
6642
6643 if len(messages_set) > 0:
6644
6645
6646
6647
6648 for uid in messages_set:
6649
6650 typ, data = self.connection.uid("fetch", uid, imap_fields)
6651 if typ == "OK":
6652 fr = {"message": int(data[0][0].split()[0]),
6653 "uid": long(uid),
6654 "email": email.message_from_string(data[0][1]),
6655 "raw_message": data[0][1]}
6656 fr["multipart"] = fr["email"].is_multipart()
6657
6658 fr["flags"] = self.driver.ParseFlags(data[1])
6659 fetch_results.append(fr)
6660 else:
6661
6662 raise Exception("IMAP error retrieving the body: %s" % data)
6663 else:
6664 raise Exception("IMAP search error: %s" % search_result[1])
6665 elif isinstance(query, (Expression, basestring)):
6666 raise NotImplementedError()
6667 else:
6668 raise TypeError("Unexpected query type")
6669
6670 imapqry_dict = {}
6671 imapfields_dict = {}
6672
6673 if len(fields) == 1 and isinstance(fields[0], SQLALL):
6674 allfields = True
6675 elif len(fields) == 0:
6676 allfields = True
6677 else:
6678 allfields = False
6679 if allfields:
6680 colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
6681 else:
6682 colnames = ["%s.%s" % (tablename, field.name) for field in fields]
6683
6684 for k in colnames:
6685 imapfields_dict[k] = k
6686
6687 imapqry_list = list()
6688 imapqry_array = list()
6689 for fr in fetch_results:
6690 attachments = []
6691 content = []
6692 size = 0
6693 n = int(fr["message"])
6694 item_dict = dict()
6695 message = fr["email"]
6696 uid = fr["uid"]
6697 charset = self.get_charset(message)
6698 flags = fr["flags"]
6699 raw_message = fr["raw_message"]
6700
6701
6702
6703
6704
6705
6706
6707
6708
6709
6710 if "%s.id" % tablename in colnames:
6711 item_dict["%s.id" % tablename] = n
6712 if "%s.created" % tablename in colnames:
6713 item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
6714 if "%s.uid" % tablename in colnames:
6715 item_dict["%s.uid" % tablename] = uid
6716 if "%s.sender" % tablename in colnames:
6717
6718
6719
6720 item_dict["%s.sender" % tablename] = message["From"]
6721 if "%s.to" % tablename in colnames:
6722 item_dict["%s.to" % tablename] = message["To"]
6723 if "%s.cc" % tablename in colnames:
6724 if "Cc" in message.keys():
6725 item_dict["%s.cc" % tablename] = message["Cc"]
6726 else:
6727 item_dict["%s.cc" % tablename] = ""
6728 if "%s.bcc" % tablename in colnames:
6729 if "Bcc" in message.keys():
6730 item_dict["%s.bcc" % tablename] = message["Bcc"]
6731 else:
6732 item_dict["%s.bcc" % tablename] = ""
6733 if "%s.deleted" % tablename in colnames:
6734 item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
6735 if "%s.draft" % tablename in colnames:
6736 item_dict["%s.draft" % tablename] = "\\Draft" in flags
6737 if "%s.flagged" % tablename in colnames:
6738 item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
6739 if "%s.recent" % tablename in colnames:
6740 item_dict["%s.recent" % tablename] = "\\Recent" in flags
6741 if "%s.seen" % tablename in colnames:
6742 item_dict["%s.seen" % tablename] = "\\Seen" in flags
6743 if "%s.subject" % tablename in colnames:
6744 item_dict["%s.subject" % tablename] = message["Subject"]
6745 if "%s.answered" % tablename in colnames:
6746 item_dict["%s.answered" % tablename] = "\\Answered" in flags
6747 if "%s.mime" % tablename in colnames:
6748 item_dict["%s.mime" % tablename] = message.get_content_type()
6749 if "%s.encoding" % tablename in colnames:
6750 item_dict["%s.encoding" % tablename] = charset
6751
6752
6753
6754
6755
6756
6757 if "%s.email" % tablename in colnames:
6758
6759 item_dict["%s.email" % tablename] = raw_message
6760
6761
6762
6763
6764
6765
6766 for part in message.walk():
6767 maintype = part.get_content_maintype()
6768 if ("%s.attachments" % tablename in colnames) or \
6769 ("%s.content" % tablename in colnames):
6770 payload = part.get_payload(decode=True)
6771 if payload:
6772 filename = part.get_filename()
6773 values = {"mime": part.get_content_type()}
6774 if ((filename or not "text" in maintype) and
6775 ("%s.attachments" % tablename in colnames)):
6776 values.update({"payload": payload,
6777 "filename": filename,
6778 "encoding": part.get_content_charset(),
6779 "disposition": part["Content-Disposition"]})
6780 attachments.append(values)
6781 elif (("text" in maintype) and
6782 ("%s.content" % tablename in colnames)):
6783 values.update({"text": self.encode_text(payload,
6784 self.get_charset(part))})
6785 content.append(values)
6786
6787 if "%s.size" % tablename in colnames:
6788 if part is not None:
6789 size += len(str(part))
6790 item_dict["%s.content" % tablename] = content
6791 item_dict["%s.attachments" % tablename] = attachments
6792 item_dict["%s.size" % tablename] = size
6793 imapqry_list.append(item_dict)
6794
6795
6796
6797 for item_dict in imapqry_list:
6798 imapqry_array_item = list()
6799 for fieldname in colnames:
6800 imapqry_array_item.append(item_dict[fieldname])
6801 imapqry_array.append(imapqry_array_item)
6802
6803
6804 colnames = colnames
6805 processor = attributes.get('processor',self.parse)
6806 return processor(imapqry_array, fields, colnames)
6807
6808 - def insert(self, table, fields):
6809 def add_payload(message, obj):
6810 payload = Message()
6811 encoding = obj.get("encoding", "utf-8")
6812 if encoding and (encoding.upper() in
6813 ("BASE64", "7BIT", "8BIT", "BINARY")):
6814 payload.add_header("Content-Transfer-Encoding", encoding)
6815 else:
6816 payload.set_charset(encoding)
6817 mime = obj.get("mime", None)
6818 if mime:
6819 payload.set_type(mime)
6820 if "text" in obj:
6821 payload.set_payload(obj["text"])
6822 elif "payload" in obj:
6823 payload.set_payload(obj["payload"])
6824 if "filename" in obj and obj["filename"]:
6825 payload.add_header("Content-Disposition",
6826 "attachment", filename=obj["filename"])
6827 message.attach(payload)
6828
6829 mailbox = table.mailbox
6830 d = dict(((k.name, v) for k, v in fields))
6831 date_time = d.get("created") or datetime.datetime.now()
6832 struct_time = date_time.timetuple()
6833 if len(d) > 0:
6834 message = d.get("email", None)
6835 attachments = d.get("attachments", [])
6836 content = d.get("content", [])
6837 flags = " ".join(["\\%s" % flag.capitalize() for flag in
6838 ("answered", "deleted", "draft", "flagged",
6839 "recent", "seen") if d.get(flag, False)])
6840 if not message:
6841 from email.message import Message
6842 mime = d.get("mime", None)
6843 charset = d.get("encoding", None)
6844 message = Message()
6845 message["from"] = d.get("sender", "")
6846 message["subject"] = d.get("subject", "")
6847 message["date"] = self.convert_date(date_time, imf=True)
6848
6849 if mime:
6850 message.set_type(mime)
6851 if charset:
6852 message.set_charset(charset)
6853 for item in ("to", "cc", "bcc"):
6854 value = d.get(item, "")
6855 if isinstance(value, basestring):
6856 message[item] = value
6857 else:
6858 message[item] = ";".join([i for i in
6859 value])
6860 if (not message.is_multipart() and
6861 (not message.get_content_type().startswith(
6862 "multipart"))):
6863 if isinstance(content, basestring):
6864 message.set_payload(content)
6865 elif len(content) > 0:
6866 message.set_payload(content[0]["text"])
6867 else:
6868 [add_payload(message, c) for c in content]
6869 [add_payload(message, a) for a in attachments]
6870 message = message.as_string()
6871
6872 result, data = self.connection.append(mailbox, flags, struct_time, message)
6873 if result == "OK":
6874 uid = int(re.findall("\d+", str(data))[-1])
6875 return self.db(table.uid==uid).select(table.id).first().id
6876 else:
6877 raise Exception("IMAP message append failed: %s" % data)
6878 else:
6879 raise NotImplementedError("IMAP empty insert is not implemented")
6880
6881 - def update(self, tablename, query, fields):
6882
6883 commands = list()
6884 rowcount = 0
6885 if use_common_filters(query):
6886 query = self.common_filter(query, [tablename,])
6887 mark = []
6888 unmark = []
6889 if query:
6890 for item in fields:
6891 field = item[0]
6892 name = field.name
6893 value = item[1]
6894 if self.is_flag(name):
6895 flag = self.search_fields[name]
6896 if (value is not None) and (flag != "\\Recent"):
6897 if value:
6898 mark.append(flag)
6899 else:
6900 unmark.append(flag)
6901 result, data = self.connection.select(
6902 self.connection.mailbox_names[tablename])
6903 string_query = "(%s)" % query
6904 result, data = self.connection.search(None, string_query)
6905 store_list = [item.strip() for item in data[0].split()
6906 if item.strip().isdigit()]
6907
6908 for number in store_list:
6909 result = None
6910 if len(mark) > 0:
6911 commands.append((number, "+FLAGS", "(%s)" % " ".join(mark)))
6912 if len(unmark) > 0:
6913 commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark)))
6914
6915 for command in commands:
6916 result, data = self.connection.store(*command)
6917 if result == "OK":
6918 rowcount += 1
6919 else:
6920 raise Exception("IMAP storing error: %s" % data)
6921 return rowcount
6922
6923 - def count(self,query,distinct=None):
6935
6936 - def delete(self, tablename, query):
6937 counter = 0
6938 if query:
6939 if use_common_filters(query):
6940 query = self.common_filter(query, [tablename,])
6941 result, data = self.connection.select(self.connection.mailbox_names[tablename])
6942 string_query = "(%s)" % query
6943 result, data = self.connection.search(None, string_query)
6944 store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
6945 for number in store_list:
6946 result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
6947 if result == "OK":
6948 counter += 1
6949 else:
6950 raise Exception("IMAP store error: %s" % data)
6951 if counter > 0:
6952 result, data = self.connection.expunge()
6953 return counter
6954
6955 - def BELONGS(self, first, second):
6956 result = None
6957 name = self.search_fields[first.name]
6958 if name == "MESSAGE":
6959 values = [str(val) for val in second if str(val).isdigit()]
6960 result = "%s" % ",".join(values).strip()
6961
6962 elif name == "UID":
6963 values = [str(val) for val in second if str(val).isdigit()]
6964 result = "UID %s" % ",".join(values).strip()
6965
6966 else:
6967 raise Exception("Operation not supported")
6968
6969 return result
6970
6971 - def CONTAINS(self, first, second, case_sensitive=False):
6972
6973 result = None
6974 name = self.search_fields[first.name]
6975
6976 if name in ("FROM", "TO", "SUBJECT", "TEXT"):
6977 result = "%s \"%s\"" % (name, self.expand(second))
6978 else:
6979 if first.name in ("cc", "bcc"):
6980 result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
6981 elif first.name == "mime":
6982 result = "HEADER Content-Type \"%s\"" % self.expand(second)
6983 else:
6984 raise Exception("Operation not supported")
6985 return result
6986
6987 - def GT(self, first, second):
6988 result = None
6989 name = self.search_fields[first.name]
6990 if name == "MESSAGE":
6991 last_message = self.get_last_message(first.tablename)
6992 result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
6993 elif name == "UID":
6994
6995
6996
6997 try:
6998 pedestal, threshold = self.get_uid_bounds(first.tablename)
6999 except TypeError:
7000 e = sys.exc_info()[1]
7001 LOGGER.debug("Error requesting uid bounds: %s", str(e))
7002 return ""
7003 try:
7004 lower_limit = int(self.expand(second)) + 1
7005 except (ValueError, TypeError):
7006 e = sys.exc_info()[1]
7007 raise Exception("Operation not supported (non integer UID)")
7008 result = "UID %s:%s" % (lower_limit, threshold)
7009 elif name == "DATE":
7010 result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
7011 elif name == "SIZE":
7012 result = "LARGER %s" % self.expand(second)
7013 else:
7014 raise Exception("Operation not supported")
7015 return result
7016
7017 - def GE(self, first, second):
7018 result = None
7019 name = self.search_fields[first.name]
7020 if name == "MESSAGE":
7021 last_message = self.get_last_message(first.tablename)
7022 result = "%s:%s" % (self.expand(second), last_message)
7023 elif name == "UID":
7024
7025
7026
7027 try:
7028 pedestal, threshold = self.get_uid_bounds(first.tablename)
7029 except TypeError:
7030 e = sys.exc_info()[1]
7031 LOGGER.debug("Error requesting uid bounds: %s", str(e))
7032 return ""
7033 lower_limit = self.expand(second)
7034 result = "UID %s:%s" % (lower_limit, threshold)
7035 elif name == "DATE":
7036 result = "SINCE %s" % self.convert_date(second)
7037 else:
7038 raise Exception("Operation not supported")
7039 return result
7040
7041 - def LT(self, first, second):
7042 result = None
7043 name = self.search_fields[first.name]
7044 if name == "MESSAGE":
7045 result = "%s:%s" % (1, int(self.expand(second)) - 1)
7046 elif name == "UID":
7047 try:
7048 pedestal, threshold = self.get_uid_bounds(first.tablename)
7049 except TypeError:
7050 e = sys.exc_info()[1]
7051 LOGGER.debug("Error requesting uid bounds: %s", str(e))
7052 return ""
7053 try:
7054 upper_limit = int(self.expand(second)) - 1
7055 except (ValueError, TypeError):
7056 e = sys.exc_info()[1]
7057 raise Exception("Operation not supported (non integer UID)")
7058 result = "UID %s:%s" % (pedestal, upper_limit)
7059 elif name == "DATE":
7060 result = "BEFORE %s" % self.convert_date(second)
7061 elif name == "SIZE":
7062 result = "SMALLER %s" % self.expand(second)
7063 else:
7064 raise Exception("Operation not supported")
7065 return result
7066
7067 - def LE(self, first, second):
7068 result = None
7069 name = self.search_fields[first.name]
7070 if name == "MESSAGE":
7071 result = "%s:%s" % (1, self.expand(second))
7072 elif name == "UID":
7073 try:
7074 pedestal, threshold = self.get_uid_bounds(first.tablename)
7075 except TypeError:
7076 e = sys.exc_info()[1]
7077 LOGGER.debug("Error requesting uid bounds: %s", str(e))
7078 return ""
7079 upper_limit = int(self.expand(second))
7080 result = "UID %s:%s" % (pedestal, upper_limit)
7081 elif name == "DATE":
7082 result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
7083 else:
7084 raise Exception("Operation not supported")
7085 return result
7086
7087 - def NE(self, first, second=None):
7088 if (second is None) and isinstance(first, Field):
7089
7090 if first.type == "id":
7091 return self.GE(first, 1)
7092 result = self.NOT(self.EQ(first, second))
7093 result = result.replace("NOT NOT", "").strip()
7094 return result
7095
7096 - def EQ(self,first,second):
7097 name = self.search_fields[first.name]
7098 result = None
7099 if name is not None:
7100 if name == "MESSAGE":
7101
7102 result = "%s" % self.expand(second)
7103 elif name == "UID":
7104 result = "UID %s" % self.expand(second)
7105 elif name == "DATE":
7106 result = "ON %s" % self.convert_date(second)
7107
7108 elif name in self.flags.values():
7109 if second:
7110 result = "%s" % (name.upper()[1:])
7111 else:
7112 result = "NOT %s" % (name.upper()[1:])
7113 else:
7114 raise Exception("Operation not supported")
7115 else:
7116 raise Exception("Operation not supported")
7117 return result
7118
7119 - def AND(self, first, second):
7122
7123 - def OR(self, first, second):
7126
7127 - def NOT(self, first):
7128 result = "NOT %s" % self.expand(first)
7129 return result
7130
7131
7132
7133
7134
7135 ADAPTERS = {
7136 'sqlite': SQLiteAdapter,
7137 'spatialite': SpatiaLiteAdapter,
7138 'sqlite:memory': SQLiteAdapter,
7139 'spatialite:memory': SpatiaLiteAdapter,
7140 'mysql': MySQLAdapter,
7141 'postgres': PostgreSQLAdapter,
7142 'postgres:psycopg2': PostgreSQLAdapter,
7143 'postgres:pg8000': PostgreSQLAdapter,
7144 'postgres2:psycopg2': NewPostgreSQLAdapter,
7145 'postgres2:pg8000': NewPostgreSQLAdapter,
7146 'oracle': OracleAdapter,
7147 'mssql': MSSQLAdapter,
7148 'mssql2': MSSQL2Adapter,
7149 'mssql3': MSSQL3Adapter,
7150 'mssql4' : MSSQL4Adapter,
7151 'vertica': VerticaAdapter,
7152 'sybase': SybaseAdapter,
7153 'db2': DB2Adapter,
7154 'teradata': TeradataAdapter,
7155 'informix': InformixAdapter,
7156 'informix-se': InformixSEAdapter,
7157 'firebird': FireBirdAdapter,
7158 'firebird_embedded': FireBirdAdapter,
7159 'ingres': IngresAdapter,
7160 'ingresu': IngresUnicodeAdapter,
7161 'sapdb': SAPDBAdapter,
7162 'cubrid': CubridAdapter,
7163 'jdbc:sqlite': JDBCSQLiteAdapter,
7164 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
7165 'jdbc:postgres': JDBCPostgreSQLAdapter,
7166 'gae': GoogleDatastoreAdapter,
7167 'google:datastore': GoogleDatastoreAdapter,
7168 'google:sql': GoogleSQLAdapter,
7169 'couchdb': CouchDBAdapter,
7170 'mongodb': MongoDBAdapter,
7171 'imap': IMAPAdapter
7172 }
7175 """
7176 Field type validation, using web2py's validators mechanism.
7177
7178 makes sure the content of a field is in line with the declared
7179 fieldtype
7180 """
7181 db = field.db
7182 try:
7183 from gluon import validators
7184 except ImportError:
7185 return []
7186 field_type, field_length = field.type, field.length
7187 if isinstance(field_type, SQLCustomType):
7188 if hasattr(field_type, 'validator'):
7189 return field_type.validator
7190 else:
7191 field_type = field_type.type
7192 elif not isinstance(field_type,str):
7193 return []
7194 requires=[]
7195 def ff(r,id):
7196 row=r(id)
7197 if not row:
7198 return id
7199 elif hasattr(r, '_format') and isinstance(r._format,str):
7200 return r._format % row
7201 elif hasattr(r, '_format') and callable(r._format):
7202 return r._format(row)
7203 else:
7204 return id
7205 if field_type in (('string', 'text', 'password')):
7206 requires.append(validators.IS_LENGTH(field_length))
7207 elif field_type == 'json':
7208 requires.append(validators.IS_EMPTY_OR(validators.IS_JSON(native_json=field.db._adapter.native_json)))
7209 elif field_type == 'double' or field_type == 'float':
7210 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
7211 elif field_type == 'integer':
7212 requires.append(validators.IS_INT_IN_RANGE(-2**31, 2**31))
7213 elif field_type == 'bigint':
7214 requires.append(validators.IS_INT_IN_RANGE(-2**63, 2**63))
7215 elif field_type.startswith('decimal'):
7216 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
7217 elif field_type == 'date':
7218 requires.append(validators.IS_DATE())
7219 elif field_type == 'time':
7220 requires.append(validators.IS_TIME())
7221 elif field_type == 'datetime':
7222 requires.append(validators.IS_DATETIME())
7223 elif db and field_type.startswith('reference') and \
7224 field_type.find('.') < 0 and \
7225 field_type[10:] in db.tables:
7226 referenced = db[field_type[10:]]
7227 def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
7228 field.represent = field.represent or repr_ref
7229 if hasattr(referenced, '_format') and referenced._format:
7230 requires = validators.IS_IN_DB(db,referenced._id,
7231 referenced._format)
7232 if field.unique:
7233 requires._and = validators.IS_NOT_IN_DB(db,field)
7234 if field.tablename == field_type[10:]:
7235 return validators.IS_EMPTY_OR(requires)
7236 return requires
7237 elif db and field_type.startswith('list:reference') and \
7238 field_type.find('.') < 0 and \
7239 field_type[15:] in db.tables:
7240 referenced = db[field_type[15:]]
7241 def list_ref_repr(ids, row=None, r=referenced, f=ff):
7242 if not ids:
7243 return None
7244 refs = None
7245 db, id = r._db, r._id
7246 if isinstance(db._adapter, GoogleDatastoreAdapter):
7247 def count(values): return db(id.belongs(values)).select(id)
7248 rx = range(0, len(ids), 30)
7249 refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
7250 else:
7251 refs = db(id.belongs(ids)).select(id)
7252 return (refs and ', '.join(f(r,x.id) for x in refs) or '')
7253 field.represent = field.represent or list_ref_repr
7254 if hasattr(referenced, '_format') and referenced._format:
7255 requires = validators.IS_IN_DB(db,referenced._id,
7256 referenced._format,multiple=True)
7257 else:
7258 requires = validators.IS_IN_DB(db,referenced._id,
7259 multiple=True)
7260 if field.unique:
7261 requires._and = validators.IS_NOT_IN_DB(db,field)
7262 if not field.notnull:
7263 requires = validators.IS_EMPTY_OR(requires)
7264 return requires
7265 elif field_type.startswith('list:'):
7266 def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
7267 field.represent = field.represent or repr_list
7268 if field.unique:
7269 requires.insert(0,validators.IS_NOT_IN_DB(db,field))
7270 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
7271 if field.notnull and not field_type[:2] in sff:
7272 requires.insert(0, validators.IS_NOT_EMPTY())
7273 elif not field.notnull and field_type[:2] in sff and requires:
7274 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
7275 return requires
7276
7279 return str(item).replace('|', '||')
7280
7283
7285 if not hasattr(value,'split') and hasattr(value,'read'):
7286 value = value.read()
7287 return [long(x) for x in value.split('|') if x.strip()]
7288
7292
7293
7294 -class Row(object):
7295
7296 """
7297 a dictionary that lets you do d['a'] as well as d.a
7298 this is only used to store a Row
7299 """
7300
7301 __init__ = lambda self,*args,**kwargs: self.__dict__.update(*args,**kwargs)
7302
7304 if isinstance(k, Table):
7305 try:
7306 return ogetattr(self, k._tablename)
7307 except (KeyError,AttributeError,TypeError):
7308 pass
7309 elif isinstance(k, Field):
7310 try:
7311 return ogetattr(self, k.name)
7312 except (KeyError,AttributeError,TypeError):
7313 pass
7314 try:
7315 return ogetattr(ogetattr(self, k.tablename), k.name)
7316 except (KeyError,AttributeError,TypeError):
7317 pass
7318
7319 key=str(k)
7320 _extra = ogetattr(self, '__dict__').get('_extra', None)
7321 if _extra is not None:
7322 v = _extra.get(key, DEFAULT)
7323 if v != DEFAULT:
7324 return v
7325 try:
7326 return ogetattr(self, key)
7327 except (KeyError,AttributeError,TypeError):
7328 pass
7329
7330 m = REGEX_TABLE_DOT_FIELD.match(key)
7331 if m:
7332 try:
7333 return ogetattr(self, m.group(1))[m.group(2)]
7334 except (KeyError,AttributeError,TypeError):
7335 key = m.group(2)
7336 try:
7337 return ogetattr(self, key)
7338 except (KeyError,AttributeError,TypeError), ae:
7339 try:
7340 self[key] = ogetattr(self,'__get_lazy_reference__')(key)
7341 return self[key]
7342 except:
7343 raise ae
7344
7345 __setitem__ = lambda self, key, value: setattr(self, str(key), value)
7346
7347 __delitem__ = object.__delattr__
7348
7349 __copy__ = lambda self: Row(self)
7350
7351 __call__ = __getitem__
7352
7353
7354 - def get(self, key, default=None):
7355 try:
7356 return self.__getitem__(key)
7357 except(KeyError, AttributeError, TypeError):
7358 return self.__dict__.get(key,default)
7359
7360 has_key = __contains__ = lambda self, key: key in self.__dict__
7361
7362 __nonzero__ = lambda self: len(self.__dict__)>0
7363
7364 update = lambda self, *args, **kwargs: self.__dict__.update(*args, **kwargs)
7365
7366 keys = lambda self: self.__dict__.keys()
7367
7368 items = lambda self: self.__dict__.items()
7369
7370 values = lambda self: self.__dict__.values()
7371
7372 __iter__ = lambda self: self.__dict__.__iter__()
7373
7374 iteritems = lambda self: self.__dict__.iteritems()
7375
7376 __str__ = __repr__ = lambda self: '<Row %s>' % self.as_dict()
7377
7378 __int__ = lambda self: object.__getattribute__(self,'id')
7379
7380 __long__ = lambda self: long(object.__getattribute__(self,'id'))
7381
7382 __getattr__ = __getitem__
7383
7384
7385
7386
7387
7388
7389
7390
7391
7392
7394 try:
7395 return self.as_dict() == other.as_dict()
7396 except AttributeError:
7397 return False
7398
7400 return not (self == other)
7401
7403 return Row(dict(self))
7404
7405 - def as_dict(self, datetime_to_str=False, custom_types=None):
7406 SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict]
7407 if isinstance(custom_types,(list,tuple,set)):
7408 SERIALIZABLE_TYPES += custom_types
7409 elif custom_types:
7410 SERIALIZABLE_TYPES.append(custom_types)
7411 d = dict(self)
7412 for k in copy.copy(d.keys()):
7413 v=d[k]
7414 if d[k] is None:
7415 continue
7416 elif isinstance(v,Row):
7417 d[k]=v.as_dict()
7418 elif isinstance(v,Reference):
7419 d[k]=long(v)
7420 elif isinstance(v,decimal.Decimal):
7421 d[k]=float(v)
7422 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
7423 if datetime_to_str:
7424 d[k] = v.isoformat().replace('T',' ')[:19]
7425 elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
7426 del d[k]
7427 return d
7428
7429 - def as_xml(self, row_name="row", colnames=None, indent=' '):
7430 def f(row,field,indent=' '):
7431 if isinstance(row,Row):
7432 spc = indent+' \n'
7433 items = [f(row[x],x,indent+' ') for x in row]
7434 return '%s<%s>\n%s\n%s</%s>' % (
7435 indent,
7436 field,
7437 spc.join(item for item in items if item),
7438 indent,
7439 field)
7440 elif not callable(row):
7441 if REGEX_ALPHANUMERIC.match(field):
7442 return '%s<%s>%s</%s>' % (indent,field,row,field)
7443 else:
7444 return '%s<extra name="%s">%s</extra>' % \
7445 (indent,field,row)
7446 else:
7447 return None
7448 return f(self, row_name, indent=indent)
7449
7450 - def as_json(self, mode="object", default=None, colnames=None,
7451 serialize=True, **kwargs):
7452 """
7453 serializes the row to a JSON object
7454 kwargs are passed to .as_dict method
7455 only "object" mode supported
7456
7457 serialize = False used by Rows.as_json
7458 TODO: return array mode with query column order
7459
7460 mode and colnames are not implemented
7461 """
7462
7463 item = self.as_dict(**kwargs)
7464 if serialize:
7465 if have_serializers:
7466 return serializers.json(item,
7467 default=default or
7468 serializers.custom_json)
7469 elif simplejson:
7470 return simplejson.dumps(item)
7471 else:
7472 raise RuntimeError("missing simplejson")
7473 else:
7474 return item
7475
7485
7487 if not isinstance(fields,(list,tuple)):
7488 fields = [fields]
7489 new_fields = []
7490 for field in fields:
7491 if isinstance(field,Field):
7492 new_fields.append(field)
7493 elif isinstance(field,Table):
7494 for ofield in field:
7495 new_fields.append(ofield)
7496 else:
7497 raise RuntimeError("fields must be a list of fields")
7498 fields = new_fields
7499 field_map = {}
7500 for field in fields:
7501 n = field.name.lower()
7502 if not n in field_map:
7503 field_map[n] = field
7504 n = str(field).lower()
7505 if not n in field_map:
7506 field_map[n] = field
7507 constants = {}
7508 i = 0
7509 while True:
7510 m = REGEX_CONST_STRING.search(text)
7511 if not m: break
7512 text = text[:m.start()]+('#%i' % i)+text[m.end():]
7513 constants[str(i)] = m.group()[1:-1]
7514 i+=1
7515 text = re.sub('\s+',' ',text).lower()
7516 for a,b in [('&','and'),
7517 ('|','or'),
7518 ('~','not'),
7519 ('==','='),
7520 ('<','<'),
7521 ('>','>'),
7522 ('<=','<='),
7523 ('>=','>='),
7524 ('<>','!='),
7525 ('=<','<='),
7526 ('=>','>='),
7527 ('=','='),
7528 (' less or equal than ','<='),
7529 (' greater or equal than ','>='),
7530 (' equal or less than ','<='),
7531 (' equal or greater than ','>='),
7532 (' less or equal ','<='),
7533 (' greater or equal ','>='),
7534 (' equal or less ','<='),
7535 (' equal or greater ','>='),
7536 (' not equal to ','!='),
7537 (' not equal ','!='),
7538 (' equal to ','='),
7539 (' equal ','='),
7540 (' equals ','='),
7541 (' less than ','<'),
7542 (' greater than ','>'),
7543 (' starts with ','startswith'),
7544 (' ends with ','endswith'),
7545 (' not in ' , 'notbelongs'),
7546 (' in ' , 'belongs'),
7547 (' is ','=')]:
7548 if a[0]==' ':
7549 text = text.replace(' is'+a,' %s ' % b)
7550 text = text.replace(a,' %s ' % b)
7551 text = re.sub('\s+',' ',text).lower()
7552 text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
7553 query = field = neg = op = logic = None
7554 for item in text.split():
7555 if field is None:
7556 if item == 'not':
7557 neg = True
7558 elif not neg and not logic and item in ('and','or'):
7559 logic = item
7560 elif item in field_map:
7561 field = field_map[item]
7562 else:
7563 raise RuntimeError("Invalid syntax")
7564 elif not field is None and op is None:
7565 op = item
7566 elif not op is None:
7567 if item.startswith('#'):
7568 if not item[1:] in constants:
7569 raise RuntimeError("Invalid syntax")
7570 value = constants[item[1:]]
7571 else:
7572 value = item
7573 if field.type in ('text', 'string', 'json'):
7574 if op == '=': op = 'like'
7575 if op == '=': new_query = field==value
7576 elif op == '<': new_query = field<value
7577 elif op == '>': new_query = field>value
7578 elif op == '<=': new_query = field<=value
7579 elif op == '>=': new_query = field>=value
7580 elif op == '!=': new_query = field!=value
7581 elif op == 'belongs': new_query = field.belongs(value.split(','))
7582 elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
7583 elif field.type in ('text', 'string', 'json'):
7584 if op == 'contains': new_query = field.contains(value)
7585 elif op == 'like': new_query = field.like(value)
7586 elif op == 'startswith': new_query = field.startswith(value)
7587 elif op == 'endswith': new_query = field.endswith(value)
7588 else: raise RuntimeError("Invalid operation")
7589 elif field._db._adapter.dbengine=='google:datastore' and \
7590 field.type in ('list:integer', 'list:string', 'list:reference'):
7591 if op == 'contains': new_query = field.contains(value)
7592 else: raise RuntimeError("Invalid operation")
7593 else: raise RuntimeError("Invalid operation")
7594 if neg: new_query = ~new_query
7595 if query is None:
7596 query = new_query
7597 elif logic == 'and':
7598 query &= new_query
7599 elif logic == 'or':
7600 query |= new_query
7601 field = op = neg = logic = None
7602 return query
7603
7605
7606 """
7607 an instance of this class represents a database connection
7608
7609 Example::
7610
7611 db = DAL('sqlite://test.db')
7612
7613 or
7614
7615 db = DAL(**{"uri": ..., "tables": [...]...}) # experimental
7616
7617 db.define_table('tablename', Field('fieldname1'),
7618 Field('fieldname2'))
7619 """
7620
7621 - def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
7648
7649 @staticmethod
7651 """
7652 # ## this allows gluon to set a folder for this thread
7653 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
7654 """
7655 BaseAdapter.set_folder(folder)
7656
7657 @staticmethod
7659 """
7660 Returns a dictionary with uri as key with timings and defined tables
7661 {'sqlite://storage.sqlite': {
7662 'dbstats': [(select auth_user.email from auth_user, 0.02009)],
7663 'dbtables': {
7664 'defined': ['auth_cas', 'auth_event', 'auth_group',
7665 'auth_membership', 'auth_permission', 'auth_user'],
7666 'lazy': '[]'
7667 }
7668 }
7669 }
7670 """
7671 dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
7672 infos = {}
7673 for db_uid, db_group in dbs:
7674 for db in db_group:
7675 if not db._uri:
7676 continue
7677 k = hide_password(db._adapter.uri)
7678 infos[k] = dict(
7679 dbstats = [(row[0], row[1]) for row in db._timings],
7680 dbtables = {'defined': sorted(
7681 list(set(db.tables)-set(db._LAZY_TABLES.keys()))),
7682 'lazy': sorted(db._LAZY_TABLES.keys())})
7683 return infos
7684
7685 @staticmethod
7698
7699 @staticmethod
7721
7722 - def __init__(self, uri=DEFAULT_URI,
7723 pool_size=0, folder=None,
7724 db_codec='UTF-8', check_reserved=None,
7725 migrate=True, fake_migrate=False,
7726 migrate_enabled=True, fake_migrate_all=False,
7727 decode_credentials=False, driver_args=None,
7728 adapter_args=None, attempts=5, auto_import=False,
7729 bigint_id=False, debug=False, lazy_tables=False,
7730 db_uid=None, do_connect=True,
7731 after_connection=None, tables=None, ignore_field_case=True,
7732 entity_quoting=False):
7733 """
7734 Creates a new Database Abstraction Layer instance.
7735
7736 Keyword arguments:
7737
7738 :uri: string that contains information for connecting to a database.
7739 (default: 'sqlite://dummy.db')
7740
7741 experimental: you can specify a dictionary as uri
7742 parameter i.e. with
7743 db = DAL({"uri": "sqlite://storage.sqlite",
7744 "tables": {...}, ...})
7745
7746 for an example of dict input you can check the output
7747 of the scaffolding db model with
7748
7749 db.as_dict()
7750
7751 Note that for compatibility with Python older than
7752 version 2.6.5 you should cast your dict input keys
7753 to str due to a syntax limitation on kwarg names.
7754 for proper DAL dictionary input you can use one of:
7755
7756 obj = serializers.cast_keys(dict, [encoding="utf-8"])
7757
7758 or else (for parsing json input)
7759
7760 obj = serializers.loads_json(data, unicode_keys=False)
7761
7762 :pool_size: How many open connections to make to the database object.
7763 :folder: where .table files will be created.
7764 automatically set within web2py
7765 use an explicit path when using DAL outside web2py
7766 :db_codec: string encoding of the database (default: 'UTF-8')
7767 :check_reserved: list of adapters to check tablenames and column names
7768 against sql/nosql reserved keywords. (Default None)
7769
7770 * 'common' List of sql keywords that are common to all database types
7771 such as "SELECT, INSERT". (recommended)
7772 * 'all' Checks against all known SQL keywords. (not recommended)
7773 <adaptername> Checks against the specific adapters list of keywords
7774 (recommended)
7775 * '<adaptername>_nonreserved' Checks against the specific adapters
7776 list of nonreserved keywords. (if available)
7777 :migrate (defaults to True) sets default migrate behavior for all tables
7778 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
7779 :migrate_enabled (defaults to True). If set to False disables ALL migrations
7780 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
7781 :attempts (defaults to 5). Number of times to attempt connecting
7782 :auto_import (defaults to False). If set, import automatically table definitions from the
7783 databases folder
7784 :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields
7785 :lazy_tables (defaults to False): delay table definition until table access
7786 :after_connection (defaults to None): a callable that will be execute after the connection
7787 """
7788 if uri == '<zombie>' and db_uid is not None: return
7789 if not decode_credentials:
7790 credential_decoder = lambda cred: cred
7791 else:
7792 credential_decoder = lambda cred: urllib.unquote(cred)
7793 self._folder = folder
7794 if folder:
7795 self.set_folder(folder)
7796 self._uri = uri
7797 self._pool_size = pool_size
7798 self._db_codec = db_codec
7799 self._lastsql = ''
7800 self._timings = []
7801 self._pending_references = {}
7802 self._request_tenant = 'request_tenant'
7803 self._common_fields = []
7804 self._referee_name = '%(table)s'
7805 self._bigint_id = bigint_id
7806 self._debug = debug
7807 self._migrated = []
7808 self._LAZY_TABLES = {}
7809 self._lazy_tables = lazy_tables
7810 self._tables = SQLCallableList()
7811 self._driver_args = driver_args
7812 self._adapter_args = adapter_args
7813 self._check_reserved = check_reserved
7814 self._decode_credentials = decode_credentials
7815 self._attempts = attempts
7816 self._do_connect = do_connect
7817 self._ignore_field_case = ignore_field_case
7818
7819 if not str(attempts).isdigit() or attempts < 0:
7820 attempts = 5
7821 if uri:
7822 uris = isinstance(uri,(list,tuple)) and uri or [uri]
7823 error = ''
7824 connected = False
7825 for k in range(attempts):
7826 for uri in uris:
7827 try:
7828 if is_jdbc and not uri.startswith('jdbc:'):
7829 uri = 'jdbc:'+uri
7830 self._dbname = REGEX_DBNAME.match(uri).group()
7831 if not self._dbname in ADAPTERS:
7832 raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname)
7833
7834
7835 kwargs = dict(db=self,uri=uri,
7836 pool_size=pool_size,
7837 folder=folder,
7838 db_codec=db_codec,
7839 credential_decoder=credential_decoder,
7840 driver_args=driver_args or {},
7841 adapter_args=adapter_args or {},
7842 do_connect=do_connect,
7843 after_connection=after_connection,
7844 entity_quoting=entity_quoting)
7845 self._adapter = ADAPTERS[self._dbname](**kwargs)
7846 types = ADAPTERS[self._dbname].types
7847
7848 self._adapter.types = copy.copy(types)
7849 self._adapter.build_parsemap()
7850 self._adapter.ignore_field_case = ignore_field_case
7851 if bigint_id:
7852 if 'big-id' in types and 'reference' in types:
7853 self._adapter.types['id'] = types['big-id']
7854 self._adapter.types['reference'] = types['big-reference']
7855 connected = True
7856 break
7857 except SyntaxError:
7858 raise
7859 except Exception:
7860 tb = traceback.format_exc()
7861 LOGGER.debug('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
7862 if connected:
7863 break
7864 else:
7865 time.sleep(1)
7866 if not connected:
7867 raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb))
7868 else:
7869 self._adapter = BaseAdapter(db=self,pool_size=0,
7870 uri='None',folder=folder,
7871 db_codec=db_codec, after_connection=after_connection,
7872 entity_quoting=entity_quoting)
7873 migrate = fake_migrate = False
7874 adapter = self._adapter
7875 self._uri_hash = hashlib_md5(adapter.uri).hexdigest()
7876 self.check_reserved = check_reserved
7877 if self.check_reserved:
7878 from reserved_sql_keywords import ADAPTERS as RSK
7879 self.RSK = RSK
7880 self._migrate = migrate
7881 self._fake_migrate = fake_migrate
7882 self._migrate_enabled = migrate_enabled
7883 self._fake_migrate_all = fake_migrate_all
7884 if auto_import or tables:
7885 self.import_table_definitions(adapter.folder,
7886 tables=tables)
7887
7888 @property
7891
7894 if tables:
7895 for table in tables:
7896 self.define_table(**table)
7897 else:
7898 pattern = pjoin(path,self._uri_hash+'_*.table')
7899 for filename in glob.glob(pattern):
7900 tfile = self._adapter.file_open(filename, 'r')
7901 try:
7902 sql_fields = pickle.load(tfile)
7903 name = filename[len(pattern)-7:-6]
7904 mf = [(value['sortable'],
7905 Field(key,
7906 type=value['type'],
7907 length=value.get('length',None),
7908 notnull=value.get('notnull',False),
7909 unique=value.get('unique',False))) \
7910 for key, value in sql_fields.iteritems()]
7911 mf.sort(lambda a,b: cmp(a[0],b[0]))
7912 self.define_table(name,*[item[1] for item in mf],
7913 **dict(migrate=migrate,
7914 fake_migrate=fake_migrate))
7915 finally:
7916 self._adapter.file_close(tfile)
7917
7919 """
7920 Validates ``name`` against SQL keywords
7921 Uses self.check_reserve which is a list of
7922 operators to use.
7923 self.check_reserved
7924 ['common', 'postgres', 'mysql']
7925 self.check_reserved
7926 ['all']
7927 """
7928 for backend in self.check_reserved:
7929 if name.upper() in self.RSK[backend]:
7930 raise SyntaxError(
7931 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
7932
7933 - def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
7934 """
7935 EXAMPLE:
7936
7937 db.define_table('person',Field('name'),Field('info'))
7938 db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info'))
7939
7940 @request.restful()
7941 def index():
7942 def GET(*args,**vars):
7943 patterns = [
7944 "/friends[person]",
7945 "/{person.name}/:field",
7946 "/{person.name}/pets[pet.ownedby]",
7947 "/{person.name}/pets[pet.ownedby]/{pet.name}",
7948 "/{person.name}/pets[pet.ownedby]/{pet.name}/:field",
7949 ("/dogs[pet]", db.pet.info=='dog'),
7950 ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'),
7951 ]
7952 parser = db.parse_as_rest(patterns,args,vars)
7953 if parser.status == 200:
7954 return dict(content=parser.response)
7955 else:
7956 raise HTTP(parser.status,parser.error)
7957
7958 def POST(table_name,**vars):
7959 if table_name == 'person':
7960 return db.person.validate_and_insert(**vars)
7961 elif table_name == 'pet':
7962 return db.pet.validate_and_insert(**vars)
7963 else:
7964 raise HTTP(400)
7965 return locals()
7966 """
7967
7968 db = self
7969 re1 = REGEX_SEARCH_PATTERN
7970 re2 = REGEX_SQUARE_BRACKETS
7971
7972 def auto_table(table,base='',depth=0):
7973 patterns = []
7974 for field in db[table].fields:
7975 if base:
7976 tag = '%s/%s' % (base,field.replace('_','-'))
7977 else:
7978 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
7979 f = db[table][field]
7980 if not f.readable: continue
7981 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
7982 tag += '/{%s.%s}' % (table,field)
7983 patterns.append(tag)
7984 patterns.append(tag+'/:field')
7985 elif f.type.startswith('boolean'):
7986 tag += '/{%s.%s}' % (table,field)
7987 patterns.append(tag)
7988 patterns.append(tag+'/:field')
7989 elif f.type in ('float','double','integer','bigint'):
7990 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
7991 patterns.append(tag)
7992 patterns.append(tag+'/:field')
7993 elif f.type.startswith('list:'):
7994 tag += '/{%s.%s.contains}' % (table,field)
7995 patterns.append(tag)
7996 patterns.append(tag+'/:field')
7997 elif f.type in ('date','datetime'):
7998 tag+= '/{%s.%s.year}' % (table,field)
7999 patterns.append(tag)
8000 patterns.append(tag+'/:field')
8001 tag+='/{%s.%s.month}' % (table,field)
8002 patterns.append(tag)
8003 patterns.append(tag+'/:field')
8004 tag+='/{%s.%s.day}' % (table,field)
8005 patterns.append(tag)
8006 patterns.append(tag+'/:field')
8007 if f.type in ('datetime','time'):
8008 tag+= '/{%s.%s.hour}' % (table,field)
8009 patterns.append(tag)
8010 patterns.append(tag+'/:field')
8011 tag+='/{%s.%s.minute}' % (table,field)
8012 patterns.append(tag)
8013 patterns.append(tag+'/:field')
8014 tag+='/{%s.%s.second}' % (table,field)
8015 patterns.append(tag)
8016 patterns.append(tag+'/:field')
8017 if depth>0:
8018 for f in db[table]._referenced_by:
8019 tag+='/%s[%s.%s]' % (table,f.tablename,f.name)
8020 patterns.append(tag)
8021 patterns += auto_table(table,base=tag,depth=depth-1)
8022 return patterns
8023
8024 if patterns == 'auto':
8025 patterns=[]
8026 for table in db.tables:
8027 if not table.startswith('auth_'):
8028 patterns.append('/%s[%s]' % (table,table))
8029 patterns += auto_table(table,base='',depth=1)
8030 else:
8031 i = 0
8032 while i<len(patterns):
8033 pattern = patterns[i]
8034 if not isinstance(pattern,str):
8035 pattern = pattern[0]
8036 tokens = pattern.split('/')
8037 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
8038 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
8039 '/'.join(tokens[:-1]))
8040 patterns = patterns[:i]+new_patterns+patterns[i+1:]
8041 i += len(new_patterns)
8042 else:
8043 i += 1
8044 if '/'.join(args) == 'patterns':
8045 return Row({'status':200,'pattern':'list',
8046 'error':None,'response':patterns})
8047 for pattern in patterns:
8048 basequery, exposedfields = None, []
8049 if isinstance(pattern,tuple):
8050 if len(pattern)==2:
8051 pattern, basequery = pattern
8052 elif len(pattern)>2:
8053 pattern, basequery, exposedfields = pattern[0:3]
8054 otable=table=None
8055 if not isinstance(queries,dict):
8056 dbset=db(queries)
8057 if basequery is not None:
8058 dbset = dbset(basequery)
8059 i=0
8060 tags = pattern[1:].split('/')
8061 if len(tags)!=len(args):
8062 continue
8063 for tag in tags:
8064 if re1.match(tag):
8065
8066 tokens = tag[1:-1].split('.')
8067 table, field = tokens[0], tokens[1]
8068 if not otable or table == otable:
8069 if len(tokens)==2 or tokens[2]=='eq':
8070 query = db[table][field]==args[i]
8071 elif tokens[2]=='ne':
8072 query = db[table][field]!=args[i]
8073 elif tokens[2]=='lt':
8074 query = db[table][field]<args[i]
8075 elif tokens[2]=='gt':
8076 query = db[table][field]>args[i]
8077 elif tokens[2]=='ge':
8078 query = db[table][field]>=args[i]
8079 elif tokens[2]=='le':
8080 query = db[table][field]<=args[i]
8081 elif tokens[2]=='year':
8082 query = db[table][field].year()==args[i]
8083 elif tokens[2]=='month':
8084 query = db[table][field].month()==args[i]
8085 elif tokens[2]=='day':
8086 query = db[table][field].day()==args[i]
8087 elif tokens[2]=='hour':
8088 query = db[table][field].hour()==args[i]
8089 elif tokens[2]=='minute':
8090 query = db[table][field].minutes()==args[i]
8091 elif tokens[2]=='second':
8092 query = db[table][field].seconds()==args[i]
8093 elif tokens[2]=='startswith':
8094 query = db[table][field].startswith(args[i])
8095 elif tokens[2]=='contains':
8096 query = db[table][field].contains(args[i])
8097 else:
8098 raise RuntimeError("invalid pattern: %s" % pattern)
8099 if len(tokens)==4 and tokens[3]=='not':
8100 query = ~query
8101 elif len(tokens)>=4:
8102 raise RuntimeError("invalid pattern: %s" % pattern)
8103 if not otable and isinstance(queries,dict):
8104 dbset = db(queries[table])
8105 if basequery is not None:
8106 dbset = dbset(basequery)
8107 dbset=dbset(query)
8108 else:
8109 raise RuntimeError("missing relation in pattern: %s" % pattern)
8110 elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
8111 ref = tag[tag.find('[')+1:-1]
8112 if '.' in ref and otable:
8113 table,field = ref.split('.')
8114 selfld = '_id'
8115 if db[table][field].type.startswith('reference '):
8116 refs = [ x.name for x in db[otable] if x.type == db[table][field].type ]
8117 else:
8118 refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ]
8119 if refs:
8120 selfld = refs[0]
8121 if nested_select:
8122 try:
8123 dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld])))
8124 except ValueError:
8125 return Row({'status':400,'pattern':pattern,
8126 'error':'invalid path','response':None})
8127 else:
8128 items = [item.id for item in dbset.select(db[otable][selfld])]
8129 dbset=db(db[table][field].belongs(items))
8130 else:
8131 table = ref
8132 if not otable and isinstance(queries,dict):
8133 dbset = db(queries[table])
8134 dbset=dbset(db[table])
8135 elif tag==':field' and table:
8136
8137 field = args[i]
8138 if not field in db[table]: break
8139
8140 if not db[table][field].readable:
8141 return Row({'status':418,'pattern':pattern,
8142 'error':'I\'m a teapot','response':None})
8143 try:
8144 distinct = vars.get('distinct', False) == 'True'
8145 offset = long(vars.get('offset',None) or 0)
8146 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
8147 except ValueError:
8148 return Row({'status':400,'error':'invalid limits','response':None})
8149 items = dbset.select(db[table][field], distinct=distinct, limitby=limits)
8150 if items:
8151 return Row({'status':200,'response':items,
8152 'pattern':pattern})
8153 else:
8154 return Row({'status':404,'pattern':pattern,
8155 'error':'no record found','response':None})
8156 elif tag != args[i]:
8157 break
8158 otable = table
8159 i += 1
8160 if i == len(tags) and table:
8161 if hasattr(db[table], '_id'):
8162 ofields = vars.get('order', db[table]._id.name).split('|')
8163 else:
8164 ofields = vars.get('order', db[table]._primarykey[0]).split('|')
8165 try:
8166 orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
8167 except (KeyError, AttributeError):
8168 return Row({'status':400,'error':'invalid orderby','response':None})
8169 if exposedfields:
8170 fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable]
8171 else:
8172 fields = [field for field in db[table] if field.readable]
8173 count = dbset.count()
8174 try:
8175 offset = long(vars.get('offset',None) or 0)
8176 limits = (offset,long(vars.get('limit',None) or 1000)+offset)
8177 except ValueError:
8178 return Row({'status':400,'error':'invalid limits','response':None})
8179
8180
8181 try:
8182 response = dbset.select(limitby=limits,orderby=orderby,*fields)
8183 except ValueError:
8184 return Row({'status':400,'pattern':pattern,
8185 'error':'invalid path','response':None})
8186 return Row({'status':200,'response':response,
8187 'pattern':pattern,'count':count})
8188 return Row({'status':400,'error':'no matching pattern','response':None})
8189
8190 - def define_table(
8191 self,
8192 tablename,
8193 *fields,
8194 **args
8195 ):
8196 if not fields and 'fields' in args:
8197 fields = args.get('fields',())
8198 if not isinstance(tablename, str):
8199 if isinstance(tablename, unicode):
8200 try:
8201 tablename = str(tablename)
8202 except UnicodeEncodeError:
8203 raise SyntaxError("invalid unicode table name")
8204 else:
8205 raise SyntaxError("missing table name")
8206 elif hasattr(self,tablename) or tablename in self.tables:
8207 if not args.get('redefine',False):
8208 raise SyntaxError('table already defined: %s' % tablename)
8209 elif tablename.startswith('_') or hasattr(self,tablename) or \
8210 REGEX_PYTHON_KEYWORDS.match(tablename):
8211 raise SyntaxError('invalid table name: %s' % tablename)
8212 elif self.check_reserved:
8213 self.check_reserved_keyword(tablename)
8214 else:
8215 invalid_args = set(args)-TABLE_ARGS
8216 if invalid_args:
8217 raise SyntaxError('invalid table "%s" attributes: %s' \
8218 % (tablename,invalid_args))
8219 if self._lazy_tables and not tablename in self._LAZY_TABLES:
8220 self._LAZY_TABLES[tablename] = (tablename,fields,args)
8221 table = None
8222 else:
8223 table = self.lazy_define_table(tablename,*fields,**args)
8224 if not tablename in self.tables:
8225 self.tables.append(tablename)
8226 return table
8227
8228 - def lazy_define_table(
8229 self,
8230 tablename,
8231 *fields,
8232 **args
8233 ):
8234 args_get = args.get
8235 common_fields = self._common_fields
8236 if common_fields:
8237 fields = list(fields) + list(common_fields)
8238
8239 table_class = args_get('table_class',Table)
8240 table = table_class(self, tablename, *fields, **args)
8241 table._actual = True
8242 self[tablename] = table
8243
8244 table._create_references()
8245 for field in table:
8246 if field.requires == DEFAULT:
8247 field.requires = sqlhtml_validators(field)
8248
8249 migrate = self._migrate_enabled and args_get('migrate',self._migrate)
8250 if migrate and not self._uri in (None,'None') \
8251 or self._adapter.dbengine=='google:datastore':
8252 fake_migrate = self._fake_migrate_all or \
8253 args_get('fake_migrate',self._fake_migrate)
8254 polymodel = args_get('polymodel',None)
8255 try:
8256 GLOBAL_LOCKER.acquire()
8257 self._lastsql = self._adapter.create_table(
8258 table,migrate=migrate,
8259 fake_migrate=fake_migrate,
8260 polymodel=polymodel)
8261 finally:
8262 GLOBAL_LOCKER.release()
8263 else:
8264 table._dbt = None
8265 on_define = args_get('on_define',None)
8266 if on_define: on_define(table)
8267 return table
8268
8269 - def as_dict(self, flat=False, sanitize=True):
8270 db_uid = uri = None
8271 if not sanitize:
8272 uri, db_uid = (self._uri, self._db_uid)
8273 db_as_dict = dict(tables=[], uri=uri, db_uid=db_uid,
8274 **dict([(k, getattr(self, "_" + k, None))
8275 for k in 'pool_size','folder','db_codec',
8276 'check_reserved','migrate','fake_migrate',
8277 'migrate_enabled','fake_migrate_all',
8278 'decode_credentials','driver_args',
8279 'adapter_args', 'attempts',
8280 'bigint_id','debug','lazy_tables',
8281 'do_connect']))
8282 for table in self:
8283 db_as_dict["tables"].append(table.as_dict(flat=flat,
8284 sanitize=sanitize))
8285 return db_as_dict
8286
8287 - def as_xml(self, sanitize=True):
8292
8293 - def as_json(self, sanitize=True):
8298
8299 - def as_yaml(self, sanitize=True):
8304
8306 try:
8307 return tablename in self.tables
8308 except AttributeError:
8309
8310 return False
8311
8312 has_key = __contains__
8313
8314 - def get(self,key,default=None):
8315 return self.__dict__.get(key,default)
8316
8318 for tablename in self.tables:
8319 yield self[tablename]
8320
8323
8325 if ogetattr(self,'_lazy_tables') and \
8326 key in ogetattr(self,'_LAZY_TABLES'):
8327 tablename, fields, args = self._LAZY_TABLES.pop(key)
8328 return self.lazy_define_table(tablename,*fields,**args)
8329 return ogetattr(self, key)
8330
8332 osetattr(self, str(key), value)
8333
8335 if key[:1]!='_' and key in self:
8336 raise SyntaxError(
8337 'Object %s exists and cannot be redefined' % key)
8338 osetattr(self,key,value)
8339
8340 __delitem__ = object.__delattr__
8341
8343 if hasattr(self,'_uri'):
8344 return '<DAL uri="%s">' % hide_password(self._adapter.uri)
8345 else:
8346 return '<DAL db_uid="%s">' % self._db_uid
8347
8350
8351 - def __call__(self, query=None, ignore_common_filters=None):
8352 if isinstance(query,Table):
8353 query = self._adapter.id_query(query)
8354 elif isinstance(query,Field):
8355 query = query!=None
8356 elif isinstance(query, dict):
8357 icf = query.get("ignore_common_filters")
8358 if icf: ignore_common_filters = icf
8359 return Set(self, query, ignore_common_filters=ignore_common_filters)
8360
8363
8366
8368 self._adapter.close()
8369 if self._db_uid in THREAD_LOCAL.db_instances:
8370 db_group = THREAD_LOCAL.db_instances[self._db_uid]
8371 db_group.remove(self)
8372 if not db_group:
8373 del THREAD_LOCAL.db_instances[self._db_uid]
8374
8375 - def executesql(self, query, placeholders=None, as_dict=False,
8376 fields=None, colnames=None, as_ordered_dict=False):
8377 """
8378 placeholders is optional and will always be None.
8379 If using raw SQL with placeholders, placeholders may be
8380 a sequence of values to be substituted in
8381 or, (if supported by the DB driver), a dictionary with keys
8382 matching named placeholders in your SQL.
8383
8384 Added 2009-12-05 "as_dict" optional argument. Will always be
8385 None when using DAL. If using raw SQL can be set to True and
8386 the results cursor returned by the DB driver will be converted
8387 to a sequence of dictionaries keyed with the db field
8388 names. Tested with SQLite but should work with any database
8389 since the cursor.description used to get field names is part
8390 of the Python dbi 2.0 specs. Results returned with
8391 as_dict=True are the same as those returned when applying
8392 .to_list() to a DAL query. If "as_ordered_dict"=True the
8393 behaviour is the same as when "as_dict"=True with the keys
8394 (field names) guaranteed to be in the same order as returned
8395 by the select name executed on the database.
8396
8397 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
8398
8399 Added 2012-08-24 "fields" and "colnames" optional arguments. If either
8400 is provided, the results cursor returned by the DB driver will be
8401 converted to a DAL Rows object using the db._adapter.parse() method.
8402
8403 The "fields" argument is a list of DAL Field objects that match the
8404 fields returned from the DB. The Field objects should be part of one or
8405 more Table objects defined on the DAL object. The "fields" list can
8406 include one or more DAL Table objects in addition to or instead of
8407 including Field objects, or it can be just a single table (not in a
8408 list). In that case, the Field objects will be extracted from the
8409 table(s).
8410
8411 Instead of specifying the "fields" argument, the "colnames" argument
8412 can be specified as a list of field names in tablename.fieldname format.
8413 Again, these should represent tables and fields defined on the DAL
8414 object.
8415
8416 It is also possible to specify both "fields" and the associated
8417 "colnames". In that case, "fields" can also include DAL Expression
8418 objects in addition to Field objects. For Field objects in "fields",
8419 the associated "colnames" must still be in tablename.fieldname format.
8420 For Expression objects in "fields", the associated "colnames" can
8421 be any arbitrary labels.
8422
8423 Note, the DAL Table objects referred to by "fields" or "colnames" can
8424 be dummy tables and do not have to represent any real tables in the
8425 database. Also, note that the "fields" and "colnames" must be in the
8426 same order as the fields in the results cursor returned from the DB.
8427
8428 """
8429 adapter = self._adapter
8430 if placeholders:
8431 adapter.execute(query, placeholders)
8432 else:
8433 adapter.execute(query)
8434 if as_dict or as_ordered_dict:
8435 if not hasattr(adapter.cursor,'description'):
8436 raise RuntimeError("database does not support executesql(...,as_dict=True)")
8437
8438
8439
8440 columns = adapter.cursor.description
8441
8442 fields = colnames or [f[0] for f in columns]
8443 if len(fields) != len(set(fields)):
8444 raise RuntimeError("Result set includes duplicate column names. Specify unique column names using the 'colnames' argument")
8445
8446
8447 data = adapter._fetchall()
8448
8449
8450 if as_ordered_dict:
8451 _dict = OrderedDict
8452 else:
8453 _dict = dict
8454 return [_dict(zip(fields,row)) for row in data]
8455 try:
8456 data = adapter._fetchall()
8457 except:
8458 return None
8459 if fields or colnames:
8460 fields = [] if fields is None else fields
8461 if not isinstance(fields, list):
8462 fields = [fields]
8463 extracted_fields = []
8464 for field in fields:
8465 if isinstance(field, Table):
8466 extracted_fields.extend([f for f in field])
8467 else:
8468 extracted_fields.append(field)
8469 if not colnames:
8470 colnames = ['%s.%s' % (f.tablename, f.name)
8471 for f in extracted_fields]
8472 data = adapter.parse(
8473 data, fields=extracted_fields, colnames=colnames)
8474 return data
8475
8477 for table in self:
8478 table._referenced_by = [field for field in table._referenced_by
8479 if not field.table==thistable]
8480
8482 step = long(kwargs.get('max_fetch_rows,',500))
8483 write_colnames = kwargs['write_colnames'] = \
8484 kwargs.get("write_colnames", True)
8485 for table in self.tables:
8486 ofile.write('TABLE %s\r\n' % table)
8487 query = self._adapter.id_query(self[table])
8488 nrows = self(query).count()
8489 kwargs['write_colnames'] = write_colnames
8490 for k in range(0,nrows,step):
8491 self(query).select(limitby=(k,k+step)).export_to_csv_file(
8492 ofile, *args, **kwargs)
8493 kwargs['write_colnames'] = False
8494 ofile.write('\r\n\r\n')
8495 ofile.write('END')
8496
8497 - def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
8498 unique='uuid', map_tablenames=None,
8499 ignore_missing_tables=False,
8500 *args, **kwargs):
8501
8502 id_offset = {}
8503 map_tablenames = map_tablenames or {}
8504 for line in ifile:
8505 line = line.strip()
8506 if not line:
8507 continue
8508 elif line == 'END':
8509 return
8510 elif not line.startswith('TABLE ') or \
8511 not line[6:] in self.tables:
8512 raise SyntaxError('invalid file format')
8513 else:
8514 tablename = line[6:]
8515 tablename = map_tablenames.get(tablename,tablename)
8516 if tablename is not None and tablename in self.tables:
8517 self[tablename].import_from_csv_file(
8518 ifile, id_map, null, unique, id_offset,
8519 *args, **kwargs)
8520 elif tablename is None or ignore_missing_tables:
8521
8522 for line in ifile:
8523 if not line.strip():
8524 break
8525 else:
8526 raise RuntimeError("Unable to import table that does not exist.\nTry db.import_from_csv_file(..., map_tablenames={'table':'othertable'},ignore_missing_tables=True)")
8527
8530 return DAL('<zombie>',db_uid=db_uid)
8531
8534
8535 copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
8538 """
8539 Helper class providing a comma-separated string having all the field names
8540 (prefixed by table name and '.')
8541
8542 normally only called from within gluon.sql
8543 """
8544
8547
8549 return ', '.join([str(field) for field in self._table])
8550
8553
8555 if not self._record:
8556 self._record = self._table[long(self)]
8557 if not self._record:
8558 raise RuntimeError(
8559 "Using a recursive select but encountered a broken reference: %s %d"%(self._table, long(self)))
8560
8562 if key == 'id':
8563 return long(self)
8564 if key in self._table:
8565 self.__allocate()
8566 if self._record:
8567 return self._record.get(key,None)
8568 else:
8569 return None
8570
8571 - def get(self, key, default=None):
8573
8580
8582 if key == 'id':
8583 return long(self)
8584 self.__allocate()
8585 return self._record.get(key, None)
8586
8588 self.__allocate()
8589 self._record[key] = value
8590
8593 return marshal.loads(data)
8594
8596 try:
8597 marshal_dump = marshal.dumps(long(data))
8598 except AttributeError:
8599 marshal_dump = 'i%s' % struct.pack('<i', long(data))
8600 return (Reference_unpickler, (marshal_dump,))
8601
8602 copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
8612 def _decorated(f):
8613 instance = self.table
8614 import types
8615 method = types.MethodType(f, instance, instance.__class__)
8616 name = method_name or f.func_name
8617 setattr(instance, name, method)
8618 return f
8619 return _decorated
8620
8622
8623 """
8624 an instance of this class represents a database table
8625
8626 Example::
8627
8628 db = DAL(...)
8629 db.define_table('users', Field('name'))
8630 db.users.insert(name='me') # print db.users._insert(...) to see SQL
8631 db.users.drop()
8632 """
8633
8634 - def __init__(
8635 self,
8636 db,
8637 tablename,
8638 *fields,
8639 **args):
8640 """
8641 Initializes the table and performs checking on the provided fields.
8642
8643 Each table will have automatically an 'id'.
8644
8645 If a field is of type Table, the fields (excluding 'id') from that table
8646 will be used instead.
8647
8648 :raises SyntaxError: when a supplied field is of incorrect type.
8649 """
8650 self._actual = False
8651 self._tablename = tablename
8652 if (not isinstance(tablename, str) or tablename[0] == '_'
8653 or hasattr(DAL, tablename) or '.' in tablename
8654 or REGEX_PYTHON_KEYWORDS.match(tablename)
8655 ):
8656 raise SyntaxError('Field: invalid table name: %s, '
8657 'use rname for "funny" names' % tablename)
8658 self._ot = None
8659 self._rname = args.get('rname')
8660 self._sequence_name = (args.get('sequence_name') or
8661 db and db._adapter.sequence_name(self._rname
8662 or tablename))
8663 self._trigger_name = (args.get('trigger_name') or
8664 db and db._adapter.trigger_name(tablename))
8665 self._common_filter = args.get('common_filter')
8666 self._format = args.get('format')
8667 self._singular = args.get(
8668 'singular', tablename.replace('_', ' ').capitalize())
8669 self._plural = args.get(
8670 'plural', pluralize(self._singular.lower()).capitalize())
8671
8672 if 'primarykey' in args and args['primarykey'] is not None:
8673 self._primarykey = args.get('primarykey')
8674
8675 self._before_insert = []
8676 self._before_update = [Set.delete_uploaded_files]
8677 self._before_delete = [Set.delete_uploaded_files]
8678 self._after_insert = []
8679 self._after_update = []
8680 self._after_delete = []
8681
8682 self.add_method = MethodAdder(self)
8683
8684 fieldnames, newfields=set(), []
8685 _primarykey = getattr(self, '_primarykey', None)
8686 if _primarykey is not None:
8687 if not isinstance(_primarykey, list):
8688 raise SyntaxError(
8689 "primarykey must be a list of fields from table '%s'"
8690 % tablename)
8691 if len(_primarykey) == 1:
8692 self._id = [f for f in fields if isinstance(f, Field)
8693 and f.name ==_primarykey[0]][0]
8694 elif not [f for f in fields if (isinstance(f, Field) and
8695 f.type == 'id') or (isinstance(f, dict) and
8696 f.get("type", None) == "id")]:
8697 field = Field('id', 'id')
8698 newfields.append(field)
8699 fieldnames.add('id')
8700 self._id = field
8701 virtual_fields = []
8702
8703 def include_new(field):
8704 newfields.append(field)
8705 fieldnames.add(field.name)
8706 if field.type == 'id':
8707 self._id = field
8708 for field in fields:
8709 if isinstance(field, (FieldMethod, FieldVirtual)):
8710 virtual_fields.append(field)
8711 elif isinstance(field, Field) and not field.name in fieldnames:
8712 if field.db is not None:
8713 field = copy.copy(field)
8714 include_new(field)
8715 elif isinstance(field, dict) and not field['fieldname'] in fieldnames:
8716 include_new(Field(**field))
8717 elif isinstance(field, Table):
8718 table = field
8719 for field in table:
8720 if not field.name in fieldnames and not field.type == 'id':
8721 t2 = not table._actual and self._tablename
8722 include_new(field.clone(point_self_references_to=t2))
8723 elif not isinstance(field, (Field, Table)):
8724 raise SyntaxError(
8725 'define_table argument is not a Field or Table: %s' % field)
8726 fields = newfields
8727 self._db = db
8728 tablename = tablename
8729 self._fields = SQLCallableList()
8730 self.virtualfields = []
8731 fields = list(fields)
8732
8733 if db and db._adapter.uploads_in_blob is True:
8734 uploadfields = [f.name for f in fields if f.type == 'blob']
8735 for field in fields:
8736 fn = field.uploadfield
8737 if isinstance(field, Field) and field.type == 'upload'\
8738 and fn is True:
8739 fn = field.uploadfield = '%s_blob' % field.name
8740 if isinstance(fn, str) and not fn in uploadfields:
8741 fields.append(Field(fn, 'blob', default='',
8742 writable=False, readable=False))
8743
8744 fieldnames_set = set()
8745 reserved = dir(Table) + ['fields']
8746 if (db and db.check_reserved):
8747 check_reserved = db.check_reserved_keyword
8748 else:
8749 def check_reserved(field_name):
8750 if field_name in reserved:
8751 raise SyntaxError("field name %s not allowed" % field_name)
8752 for field in fields:
8753 field_name = field.name
8754 check_reserved(field_name)
8755 if db and db._ignore_field_case:
8756 fname_item = field_name.lower()
8757 else:
8758 fname_item = field_name
8759 if fname_item in fieldnames_set:
8760 raise SyntaxError("duplicate field %s in table %s" %
8761 (field_name, tablename))
8762 else:
8763 fieldnames_set.add(fname_item)
8764
8765 self.fields.append(field_name)
8766 self[field_name] = field
8767 if field.type == 'id':
8768 self['id'] = field
8769 field.tablename = field._tablename = tablename
8770 field.table = field._table = self
8771 field.db = field._db = db
8772 self.ALL = SQLALL(self)
8773
8774 if _primarykey is not None:
8775 for k in _primarykey:
8776 if k not in self.fields:
8777 raise SyntaxError(
8778 "primarykey must be a list of fields from table '%s " %
8779 tablename)
8780 else:
8781 self[k].notnull = True
8782 for field in virtual_fields:
8783 self[field.name] = field
8784
8785 @property
8788
8789 - def update(self, *args, **kwargs):
8790 raise RuntimeError("Syntax Not Supported")
8791
8792 - def _enable_record_versioning(self,
8793 archive_db=None,
8794 archive_name='%(tablename)s_archive',
8795 is_active='is_active',
8796 current_record='current_record',
8797 current_record_label=None):
8798 db = self._db
8799 archive_db = archive_db or db
8800 archive_name = archive_name % dict(tablename=self._tablename)
8801 if archive_name in archive_db.tables():
8802 return
8803 fieldnames = self.fields()
8804 same_db = archive_db is db
8805 field_type = self if same_db else 'bigint'
8806 clones = []
8807 for field in self:
8808 nfk = same_db or not field.type.startswith('reference')
8809 clones.append(
8810 field.clone(unique=False, type=field.type if nfk else 'bigint')
8811 )
8812 archive_db.define_table(
8813 archive_name,
8814 Field(current_record, field_type, label=current_record_label),
8815 *clones, **dict(format=self._format))
8816
8817 self._before_update.append(
8818 lambda qset, fs, db=archive_db, an=archive_name, cn=current_record:
8819 archive_record(qset, fs, db[an], cn))
8820 if is_active and is_active in fieldnames:
8821 self._before_delete.append(
8822 lambda qset: qset.update(is_active=False))
8823 newquery = lambda query, t=self, name=self._tablename: \
8824 reduce(AND, [db[tn].is_active == True
8825 for tn in db._adapter.tables(query)
8826 if tn == name or getattr(db[tn],'_ot',None)==name])
8827 query = self._common_filter
8828 if query:
8829 newquery = query & newquery
8830 self._common_filter = newquery
8831
8839
8841 db = self._db
8842 pr = db._pending_references
8843 self._referenced_by = []
8844 self._references = []
8845 for field in self:
8846
8847 field_type = field.type
8848 if isinstance(field_type, str) and field_type[:10] == 'reference ':
8849 ref = field_type[10:].strip()
8850 if not ref:
8851 SyntaxError('Table: reference to nothing: %s' % ref)
8852 if '.' in ref:
8853 rtablename, throw_it, rfieldname = ref.partition('.')
8854 else:
8855 rtablename, rfieldname = ref, None
8856 if not rtablename in db:
8857 pr[rtablename] = pr.get(rtablename, []) + [field]
8858 continue
8859 rtable = db[rtablename]
8860 if rfieldname:
8861 if not hasattr(rtable, '_primarykey'):
8862 raise SyntaxError(
8863 'keyed tables can only reference other keyed tables (for now)')
8864 if rfieldname not in rtable.fields:
8865 raise SyntaxError(
8866 "invalid field '%s' for referenced table '%s'"
8867 " in table '%s'" % (rfieldname, rtablename, self._tablename)
8868 )
8869 rfield = rtable[rfieldname]
8870 else:
8871 rfield = rtable._id
8872 rtable._referenced_by.append(field)
8873 field.referent = rfield
8874 self._references.append(field)
8875 else:
8876 field.referent = None
8877 if self._tablename in pr:
8878 referees = pr.pop(self._tablename)
8879 for referee in referees:
8880 self._referenced_by.append(referee)
8881
8883 return dict([(k, v) for (k, v) in record.iteritems() if k
8884 in self.fields and (self[k].type!='id' or id)])
8885
8887 """ for keyed table only """
8888 query = None
8889 for k,v in key.iteritems():
8890 if k in self._primarykey:
8891 if query:
8892 query = query & (self[k] == v)
8893 else:
8894 query = (self[k] == v)
8895 else:
8896 raise SyntaxError(
8897 'Field %s is not part of the primary key of %s' %
8898 (k,self._tablename)
8899 )
8900 return query
8901
8903 if not key:
8904 return None
8905 elif isinstance(key, dict):
8906 """ for keyed table """
8907 query = self._build_query(key)
8908 return self._db(query).select(limitby=(0, 1), orderby_on_limitby=False).first()
8909 elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key):
8910 return self._db(self._id == key).select(limitby=(0, 1), orderby_on_limitby=False).first()
8911 elif key:
8912 return ogetattr(self, str(key))
8913
8915 for_update = kwargs.get('_for_update', False)
8916 if '_for_update' in kwargs:
8917 del kwargs['_for_update']
8918
8919 orderby = kwargs.get('_orderby', None)
8920 if '_orderby' in kwargs:
8921 del kwargs['_orderby']
8922
8923 if not key is DEFAULT:
8924 if isinstance(key, Query):
8925 record = self._db(key).select(
8926 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8927 elif not str(key).isdigit():
8928 record = None
8929 else:
8930 record = self._db(self._id == key).select(
8931 limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8932 if record:
8933 for k,v in kwargs.iteritems():
8934 if record[k]!=v: return None
8935 return record
8936 elif kwargs:
8937 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()])
8938 return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
8939 else:
8940 return None
8941
8943 if isinstance(key, dict) and isinstance(value, dict):
8944 """ option for keyed table """
8945 if set(key.keys()) == set(self._primarykey):
8946 value = self._filter_fields(value)
8947 kv = {}
8948 kv.update(value)
8949 kv.update(key)
8950 if not self.insert(**kv):
8951 query = self._build_query(key)
8952 self._db(query).update(**self._filter_fields(value))
8953 else:
8954 raise SyntaxError(
8955 'key must have all fields from primary key: %s'%
8956 (self._primarykey))
8957 elif str(key).isdigit():
8958 if key == 0:
8959 self.insert(**self._filter_fields(value))
8960 elif self._db(self._id == key)\
8961 .update(**self._filter_fields(value)) is None:
8962 raise SyntaxError('No such record: %s' % key)
8963 else:
8964 if isinstance(key, dict):
8965 raise SyntaxError(
8966 'value must be a dictionary: %s' % value)
8967 osetattr(self, str(key), value)
8968
8969 __getattr__ = __getitem__
8970
8972 if key[:1]!='_' and key in self:
8973 raise SyntaxError('Object exists and cannot be redefined: %s' % key)
8974 osetattr(self,key,value)
8975
8977 if isinstance(key, dict):
8978 query = self._build_query(key)
8979 if not self._db(query).delete():
8980 raise SyntaxError('No such record: %s' % key)
8981 elif not str(key).isdigit() or \
8982 not self._db(self._id == key).delete():
8983 raise SyntaxError('No such record: %s' % key)
8984
8986 return hasattr(self, key)
8987
8988 has_key = __contains__
8989
8991 return self.__dict__.items()
8992
8994 for fieldname in self.fields:
8995 yield self[fieldname]
8996
8999
9001 return '<Table %s (%s)>' % (self._tablename, ','.join(self.fields()))
9002
9004 if self._ot is not None:
9005 ot = self._ot
9006 if 'Oracle' in str(type(self._db._adapter)):
9007 return '%s %s' % (ot, self._tablename)
9008 return '%s AS %s' % (ot, self._tablename)
9009
9010 return self._tablename
9011
9012 @property
9014 rname = self._rname
9015 if rname: return rname
9016 return self._db._adapter.sqlsafe_table(self._tablename)
9017
9018 @property
9020 rname = self._rname
9021 ot = self._ot
9022 if rname and not ot: return rname
9023 return self._db._adapter.sqlsafe_table(self._tablename, self._ot)
9024
9025
9026 - def _drop(self, mode=''):
9027 return self._db._adapter._drop(self, mode)
9028
9029 - def drop(self, mode=''):
9030 return self._db._adapter.drop(self,mode)
9031
9032 - def _listify(self,fields,update=False):
9033 new_fields = {}
9034
9035
9036 for name in fields:
9037 if not name in self.fields:
9038 if name != 'id':
9039 raise SyntaxError(
9040 'Field %s does not belong to the table' % name)
9041 else:
9042 field = self[name]
9043 value = fields[name]
9044 if field.filter_in:
9045 value = field.filter_in(value)
9046 new_fields[name] = (field, value)
9047
9048
9049 to_compute = []
9050 for ofield in self:
9051 name = ofield.name
9052 if not name in new_fields:
9053
9054 if ofield.compute:
9055 to_compute.append((name, ofield))
9056
9057 elif not update and not ofield.default is None:
9058 value = ofield.default
9059 fields[name] = value
9060 new_fields[name] = (ofield, value)
9061
9062 elif update and not ofield.update is None:
9063 value = ofield.update
9064 fields[name] = value
9065 new_fields[name] = (ofield, value)
9066
9067 elif not update and ofield.required:
9068 raise RuntimeError(
9069 'Table: missing required field: %s' % name)
9070
9071 if to_compute:
9072 row = Row(fields)
9073 for name, ofield in to_compute:
9074
9075 try:
9076 row[name] = new_value = ofield.compute(row)
9077 new_fields[name] = (ofield, new_value)
9078 except (KeyError, AttributeError):
9079
9080 if ofield.required:
9081 raise SyntaxError('unable to compute field: %s' % name)
9082 return new_fields.values()
9083
9085 for field in self:
9086 if field.type == 'upload' and field.name in fields:
9087 value = fields[field.name]
9088 if value is not None and not isinstance(value, str):
9089 if hasattr(value, 'file') and hasattr(value, 'filename'):
9090 new_name = field.store(value.file, filename=value.filename)
9091 elif hasattr(value, 'read') and hasattr(value, 'name'):
9092 new_name = field.store(value, filename=value.name)
9093 else:
9094 raise RuntimeError("Unable to handle upload")
9095 fields[field.name] = new_name
9096
9098 "If there are no fields/values specified, return table defaults"
9099 if not fields:
9100 fields = {}
9101 for field in self:
9102 if field.type != "id":
9103 fields[field.name] = field.default
9104 return fields
9105
9109
9119
9135
9137 response = Row()
9138 response.errors = Row()
9139 new_fields = copy.copy(fields)
9140
9141 for key, value in fields.iteritems():
9142 value, error = self[key].validate(value)
9143 if error:
9144 response.errors[key] = "%s" % error
9145 else:
9146 new_fields[key] = value
9147
9148 if _key is DEFAULT:
9149 record = self(**fields)
9150 elif isinstance(_key, dict):
9151 record = self(**_key)
9152 else:
9153 record = self(_key)
9154
9155 if not response.errors and record:
9156 if '_id' in self:
9157 myset = self._db(self._id == record[self._id.name])
9158 else:
9159 query = None
9160 for key, value in _key.iteritems():
9161 if query is None:
9162 query = getattr(self, key) == value
9163 else:
9164 query = query & (getattr(self, key) == value)
9165 myset = self._db(query)
9166 response.id = myset.update(**fields)
9167 else:
9168 response.id = None
9169 return response
9170
9172 if _key is DEFAULT:
9173 record = self(**values)
9174 elif isinstance(_key, dict):
9175 record = self(**_key)
9176 else:
9177 record = self(_key)
9178 if record:
9179 record.update_record(**values)
9180 newid = None
9181 else:
9182 newid = self.insert(**values)
9183 return newid
9184
9186 if _key is DEFAULT or _key == '':
9187 primary_keys = {}
9188 for key, value in fields.iteritems():
9189 if key in self._primarykey:
9190 primary_keys[key] = value
9191 if primary_keys != {}:
9192 record = self(**primary_keys)
9193 _key = primary_keys
9194 else:
9195 required_keys = {}
9196 for key, value in fields.iteritems():
9197 if getattr(self, key).required:
9198 required_keys[key] = value
9199 record = self(**required_keys)
9200 _key = required_keys
9201 elif isinstance(_key, dict):
9202 record = self(**_key)
9203 else:
9204 record = self(_key)
9205
9206 if record:
9207 response = self.validate_and_update(_key, **fields)
9208 primary_keys = {}
9209 for key in self._primarykey:
9210 primary_keys[key] = getattr(record, key)
9211 response.id = primary_keys
9212 else:
9213 response = self.validate_and_insert(**fields)
9214 return response
9215
9217 """
9218 here items is a list of dictionaries
9219 """
9220 items = [self._listify(item) for item in items]
9221 if any(f(item) for item in items for f in self._before_insert):return 0
9222 ret = self._db._adapter.bulk_insert(self,items)
9223 ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
9224 return ret
9225
9227 return self._db._adapter._truncate(self, mode)
9228
9230 return self._db._adapter.truncate(self, mode)
9231
9232 - def import_from_csv_file(
9233 self,
9234 csvfile,
9235 id_map=None,
9236 null='<NULL>',
9237 unique='uuid',
9238 id_offset=None,
9239 *args, **kwargs
9240 ):
9241 """
9242 Import records from csv file.
9243 Column headers must have same names as table fields.
9244 Field 'id' is ignored.
9245 If column names read 'table.file' the 'table.' prefix is ignored.
9246 'unique' argument is a field which must be unique
9247 (typically a uuid field)
9248 'restore' argument is default False;
9249 if set True will remove old values in table first.
9250 'id_map' if set to None will not map ids.
9251 The import will keep the id numbers in the restored table.
9252 This assumes that there is an field of type id that
9253 is integer and in incrementing order.
9254 Will keep the id numbers in restored table.
9255 """
9256
9257 delimiter = kwargs.get('delimiter', ',')
9258 quotechar = kwargs.get('quotechar', '"')
9259 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
9260 restore = kwargs.get('restore', False)
9261 if restore:
9262 self._db[self].truncate()
9263
9264 reader = csv.reader(csvfile, delimiter=delimiter,
9265 quotechar=quotechar, quoting=quoting)
9266 colnames = None
9267 if isinstance(id_map, dict):
9268 if not self._tablename in id_map:
9269 id_map[self._tablename] = {}
9270 id_map_self = id_map[self._tablename]
9271
9272 def fix(field, value, id_map, id_offset):
9273 list_reference_s='list:reference'
9274 if value == null:
9275 value = None
9276 elif field.type=='blob':
9277 value = base64.b64decode(value)
9278 elif field.type=='double' or field.type=='float':
9279 if not value.strip():
9280 value = None
9281 else:
9282 value = float(value)
9283 elif field.type in ('integer','bigint'):
9284 if not value.strip():
9285 value = None
9286 else:
9287 value = long(value)
9288 elif field.type.startswith('list:string'):
9289 value = bar_decode_string(value)
9290 elif field.type.startswith(list_reference_s):
9291 ref_table = field.type[len(list_reference_s):].strip()
9292 if id_map is not None:
9293 value = [id_map[ref_table][long(v)] \
9294 for v in bar_decode_string(value)]
9295 else:
9296 value = [v for v in bar_decode_string(value)]
9297 elif field.type.startswith('list:'):
9298 value = bar_decode_integer(value)
9299 elif id_map and field.type.startswith('reference'):
9300 try:
9301 value = id_map[field.type[9:].strip()][long(value)]
9302 except KeyError:
9303 pass
9304 elif id_offset and field.type.startswith('reference'):
9305 try:
9306 value = id_offset[field.type[9:].strip()]+long(value)
9307 except KeyError:
9308 pass
9309 return (field.name, value)
9310
9311 def is_id(colname):
9312 if colname in self:
9313 return self[colname].type == 'id'
9314 else:
9315 return False
9316
9317 first = True
9318 unique_idx = None
9319 for lineno, line in enumerate(reader):
9320 if not line:
9321 break
9322 if not colnames:
9323
9324 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
9325 cols, cid = [], None
9326 for i,colname in enumerate(colnames):
9327 if is_id(colname):
9328 cid = i
9329 elif colname in self.fields:
9330 cols.append((i,self[colname]))
9331 if colname == unique:
9332 unique_idx = i
9333 else:
9334
9335 items = []
9336 for i, field in cols:
9337 try:
9338 items.append(fix(field, line[i], id_map, id_offset))
9339 except ValueError:
9340 raise RuntimeError("Unable to parse line:%s field:%s value:'%s'"
9341 % (lineno+1,field,line[i]))
9342
9343 if not (id_map or cid is None or id_offset is None or unique_idx):
9344 csv_id = long(line[cid])
9345 curr_id = self.insert(**dict(items))
9346 if first:
9347 first = False
9348
9349
9350
9351 id_offset[self._tablename] = (curr_id-csv_id) \
9352 if curr_id>csv_id else 0
9353
9354 while curr_id<csv_id+id_offset[self._tablename]:
9355 self._db(self._db[self][colnames[cid]] == curr_id).delete()
9356 curr_id = self.insert(**dict(items))
9357
9358
9359 elif not unique_idx:
9360 new_id = self.insert(**dict(items))
9361 else:
9362 unique_value = line[unique_idx]
9363 query = self._db[self][unique] == unique_value
9364 record = self._db(query).select().first()
9365 if record:
9366 record.update_record(**dict(items))
9367 new_id = record[self._id.name]
9368 else:
9369 new_id = self.insert(**dict(items))
9370 if id_map and cid is not None:
9371 id_map_self[long(line[cid])] = new_id
9372
9373 - def as_dict(self, flat=False, sanitize=True):
9374 table_as_dict = dict(
9375 tablename=str(self),
9376 fields=[],
9377 sequence_name=self._sequence_name,
9378 trigger_name=self._trigger_name,
9379 common_filter=self._common_filter,
9380 format=self._format,
9381 singular=self._singular,
9382 plural=self._plural)
9383
9384 for field in self:
9385 if (field.readable or field.writable) or (not sanitize):
9386 table_as_dict["fields"].append(field.as_dict(
9387 flat=flat, sanitize=sanitize))
9388 return table_as_dict
9389
9390 - def as_xml(self, sanitize=True):
9395
9396 - def as_json(self, sanitize=True):
9401
9402 - def as_yaml(self, sanitize=True):
9407
9410
9411 - def on(self, query):
9412 return Expression(self._db, self._db._adapter.ON, self, query)
9413
9416 tablenames = qset.db._adapter.tables(qset.query)
9417 if len(tablenames) != 1:
9418 raise RuntimeError("cannot update join")
9419 for row in qset.select():
9420 fields = archive_table._filter_fields(row)
9421 fields[current_record] = row.id
9422 archive_table.insert(**fields)
9423 return False
9424
9427
9428 - def __init__(
9429 self,
9430 db,
9431 op,
9432 first=None,
9433 second=None,
9434 type=None,
9435 **optional_args
9436 ):
9437
9438 self.db = db
9439 self.op = op
9440 self.first = first
9441 self.second = second
9442 self._table = getattr(first,'_table',None)
9443
9444 if not type and first and hasattr(first,'type'):
9445 self.type = first.type
9446 else:
9447 self.type = type
9448 self.optional_args = optional_args
9449
9453
9457
9461
9465
9469
9473
9477
9481
9485
9489
9493
9497
9501
9505
9509
9513
9517
9521
9523 db = self.db
9524 if start < 0:
9525 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
9526 else:
9527 pos0 = start + 1
9528
9529 if stop < 0:
9530 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
9531 elif stop == sys.maxint:
9532 length = self.len()
9533 else:
9534 length = '(%s - %s)' % (stop + 1, pos0)
9535 return Expression(db, db._adapter.SUBSTRING,
9536 self, (pos0, length), self.type)
9537
9539 return self[i:i + 1]
9540
9542 return self.db._adapter.expand(self, self.type)
9543
9545 db = self.db
9546 return Expression(db, db._adapter.COMMA, self, other, self.type)
9547
9553
9557
9559 db = self.db
9560 if self.type in ('integer', 'bigint'):
9561 result_type = 'integer'
9562 elif self.type in ['date','time','datetime','double','float']:
9563 result_type = 'double'
9564 elif self.type.startswith('decimal('):
9565 result_type = self.type
9566 else:
9567 raise SyntaxError("subtraction operation not supported for type")
9568 return Expression(db,db._adapter.SUB,self,other,result_type)
9569
9573
9577
9581
9585
9589
9593
9597
9601
9605
9606 - def like(self, value, case_sensitive=False):
9607 db = self.db
9608 op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
9609 return Query(db, op, self, value)
9610
9614
9615 - def belongs(self, *value, **kwattr):
9616 """
9617 Accepts the following inputs:
9618 field.belongs(1,2)
9619 field.belongs((1,2))
9620 field.belongs(query)
9621
9622 Does NOT accept:
9623 field.belongs(1)
9624 """
9625 db = self.db
9626 if len(value) == 1:
9627 value = value[0]
9628 if isinstance(value,Query):
9629 value = db(value)._select(value.first._table._id)
9630 elif not isinstance(value, basestring):
9631 value = set(value)
9632 if kwattr.get('null') and None in value:
9633 value.remove(None)
9634 return (self == None) | Query(db, db._adapter.BELONGS, self, value)
9635 return Query(db, db._adapter.BELONGS, self, value)
9636
9638 db = self.db
9639 if not self.type in ('string', 'text', 'json', 'upload'):
9640 raise SyntaxError("startswith used with incompatible field type")
9641 return Query(db, db._adapter.STARTSWITH, self, value)
9642
9644 db = self.db
9645 if not self.type in ('string', 'text', 'json', 'upload'):
9646 raise SyntaxError("endswith used with incompatible field type")
9647 return Query(db, db._adapter.ENDSWITH, self, value)
9648
9649 - def contains(self, value, all=False, case_sensitive=False):
9650 """
9651 The case_sensitive parameters is only useful for PostgreSQL
9652 For other RDMBs it is ignored and contains is always case in-sensitive
9653 For MongoDB and GAE contains is always case sensitive
9654 """
9655 db = self.db
9656 if isinstance(value,(list, tuple)):
9657 subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive)
9658 for v in value if str(v).strip()]
9659 if not subqueries:
9660 return self.contains('')
9661 else:
9662 return reduce(all and AND or OR,subqueries)
9663 if not self.type in ('string', 'text', 'json', 'upload') and not self.type.startswith('list:'):
9664 raise SyntaxError("contains used with incompatible field type")
9665 return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
9666
9670
9671
9672
9673 - def st_asgeojson(self, precision=15, options=0, version=1):
9677
9678 - def st_astext(self):
9679 db = self.db
9680 return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
9681
9685
9689
9693
9697
9698
9699
9703
9707
9711
9715
9719
9723
9727
9732 """
9733 allows defining of custom SQL types
9734
9735 Example::
9736
9737 decimal = SQLCustomType(
9738 type ='double',
9739 native ='integer',
9740 encoder =(lambda x: int(float(x) * 100)),
9741 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
9742 )
9743
9744 db.define_table(
9745 'example',
9746 Field('value', type=decimal)
9747 )
9748
9749 :param type: the web2py type (default = 'string')
9750 :param native: the backend type
9751 :param encoder: how to encode the value to store it in the backend
9752 :param decoder: how to decode the value retrieved from the backend
9753 :param validator: what validators to use ( default = None, will use the
9754 default validator for type)
9755 """
9756
9757 - def __init__(
9758 self,
9759 type='string',
9760 native=None,
9761 encoder=None,
9762 decoder=None,
9763 validator=None,
9764 _class=None,
9765 ):
9766
9767 self.type = type
9768 self.native = native
9769 self.encoder = encoder or (lambda x: x)
9770 self.decoder = decoder or (lambda x: x)
9771 self.validator = validator
9772 self._class = _class or type
9773
9775 try:
9776 return self.type.startswith(self, text)
9777 except TypeError:
9778 return False
9779
9781 try:
9782 return self.type.endswith(self, text)
9783 except TypeError:
9784 return False
9785
9788
9791
9794
9796 - def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
9811 return '%s.%s' % (self.tablename, self.name)
9812
9815 - def __init__(self, name, f=None, handler=None):
9819
9822 return ', '.join(str(y) for y in x or [])
9823
9824
9825 -class Field(Expression):
9826
9827 Virtual = FieldVirtual
9828 Method = FieldMethod
9829 Lazy = FieldMethod
9830
9831 """
9832 an instance of this class represents a database field
9833
9834 example::
9835
9836 a = Field(name, 'string', length=32, default=None, required=False,
9837 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
9838 notnull=False, unique=False,
9839 uploadfield=True, widget=None, label=None, comment=None,
9840 uploadfield=True, # True means store on disk,
9841 # 'a_field_name' means store in this field in db
9842 # False means file content will be discarded.
9843 writable=True, readable=True, update=None, authorize=None,
9844 autodelete=False, represent=None, uploadfolder=None,
9845 uploadseparate=False # upload to separate directories by uuid_keys
9846 # first 2 character and tablename.fieldname
9847 # False - old behavior
9848 # True - put uploaded file in
9849 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
9850 # directory)
9851 uploadfs=None # a pyfilesystem where to store upload
9852
9853 to be used as argument of DAL.define_table
9854
9855 allowed field types:
9856 string, boolean, integer, double, text, blob,
9857 date, time, datetime, upload, password
9858
9859 """
9860
9861 - def __init__(
9862 self,
9863 fieldname,
9864 type='string',
9865 length=None,
9866 default=DEFAULT,
9867 required=False,
9868 requires=DEFAULT,
9869 ondelete='CASCADE',
9870 notnull=False,
9871 unique=False,
9872 uploadfield=True,
9873 widget=None,
9874 label=None,
9875 comment=None,
9876 writable=True,
9877 readable=True,
9878 update=None,
9879 authorize=None,
9880 autodelete=False,
9881 represent=None,
9882 uploadfolder=None,
9883 uploadseparate=False,
9884 uploadfs=None,
9885 compute=None,
9886 custom_store=None,
9887 custom_retrieve=None,
9888 custom_retrieve_file_properties=None,
9889 custom_delete=None,
9890 filter_in=None,
9891 filter_out=None,
9892 custom_qualifier=None,
9893 map_none=None,
9894 rname=None
9895 ):
9896 self._db = self.db = None
9897 self.op = None
9898 self.first = None
9899 self.second = None
9900 if isinstance(fieldname, unicode):
9901 try:
9902 fieldname = str(fieldname)
9903 except UnicodeEncodeError:
9904 raise SyntaxError('Field: invalid unicode field name')
9905 self.name = fieldname = cleanup(fieldname)
9906 if not isinstance(fieldname, str) or hasattr(Table, fieldname) or \
9907 fieldname[0] == '_' or '.' in fieldname or \
9908 REGEX_PYTHON_KEYWORDS.match(fieldname):
9909 raise SyntaxError('Field: invalid field name: %s, '
9910 'use rname for "funny" names' % fieldname)
9911
9912 if not isinstance(type, (Table, Field)):
9913 self.type = type
9914 else:
9915 self.type = 'reference %s' % type
9916
9917 self.length = length if not length is None else DEFAULTLENGTH.get(self.type, 512)
9918 self.default = default if default != DEFAULT else (update or None)
9919 self.required = required
9920 self.ondelete = ondelete.upper()
9921 self.notnull = notnull
9922 self.unique = unique
9923 self.uploadfield = uploadfield
9924 self.uploadfolder = uploadfolder
9925 self.uploadseparate = uploadseparate
9926 self.uploadfs = uploadfs
9927 self.widget = widget
9928 self.comment = comment
9929 self.writable = writable
9930 self.readable = readable
9931 self.update = update
9932 self.authorize = authorize
9933 self.autodelete = autodelete
9934 self.represent = (list_represent if represent is None and
9935 type in ('list:integer', 'list:string') else represent)
9936 self.compute = compute
9937 self.isattachment = True
9938 self.custom_store = custom_store
9939 self.custom_retrieve = custom_retrieve
9940 self.custom_retrieve_file_properties = custom_retrieve_file_properties
9941 self.custom_delete = custom_delete
9942 self.filter_in = filter_in
9943 self.filter_out = filter_out
9944 self.custom_qualifier = custom_qualifier
9945 self.label = (label if label is not None else
9946 fieldname.replace('_', ' ').title())
9947 self.requires = requires if requires is not None else []
9948 self.map_none = map_none
9949 self._rname = rname
9950
9952 self.__dict__.update(*args, **attributes)
9953
9954 - def clone(self, point_self_references_to=False, **args):
9955 field = copy.copy(self)
9956 if point_self_references_to and \
9957 field.type == 'reference %s'+field._tablename:
9958 field.type = 'reference %s' % point_self_references_to
9959 field.__dict__.update(args)
9960 return field
9961
9962 - def store(self, file, filename=None, path=None):
9963 if self.custom_store:
9964 return self.custom_store(file, filename, path)
9965 if isinstance(file, cgi.FieldStorage):
9966 filename = filename or file.filename
9967 file = file.file
9968 elif not filename:
9969 filename = file.name
9970 filename = os.path.basename(filename.replace('/', os.sep).replace('\\', os.sep))
9971 m = REGEX_STORE_PATTERN.search(filename)
9972 extension = m and m.group('e') or 'txt'
9973 uuid_key = web2py_uuid().replace('-', '')[-16:]
9974 encoded_filename = base64.b16encode(filename).lower()
9975 newfilename = '%s.%s.%s.%s' % \
9976 (self._tablename, self.name, uuid_key, encoded_filename)
9977 newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
9978 self_uploadfield = self.uploadfield
9979 if isinstance(self_uploadfield, Field):
9980 blob_uploadfield_name = self_uploadfield.uploadfield
9981 keys = {self_uploadfield.name: newfilename,
9982 blob_uploadfield_name: file.read()}
9983 self_uploadfield.table.insert(**keys)
9984 elif self_uploadfield is True:
9985 if path:
9986 pass
9987 elif self.uploadfolder:
9988 path = self.uploadfolder
9989 elif self.db._adapter.folder:
9990 path = pjoin(self.db._adapter.folder, '..', 'uploads')
9991 else:
9992 raise RuntimeError(
9993 "you must specify a Field(...,uploadfolder=...)")
9994 if self.uploadseparate:
9995 if self.uploadfs:
9996 raise RuntimeError("not supported")
9997 path = pjoin(path, "%s.%s" % (
9998 self._tablename, self.name), uuid_key[:2]
9999 )
10000 if not exists(path):
10001 os.makedirs(path)
10002 pathfilename = pjoin(path, newfilename)
10003 if self.uploadfs:
10004 dest_file = self.uploadfs.open(newfilename, 'wb')
10005 else:
10006 dest_file = open(pathfilename, 'wb')
10007 try:
10008 shutil.copyfileobj(file, dest_file)
10009 except IOError:
10010 raise IOError(
10011 'Unable to store file "%s" because invalid permissions, '
10012 'readonly file system, or filename too long' % pathfilename)
10013 dest_file.close()
10014 return newfilename
10015
10016 - def retrieve(self, name, path=None, nameonly=False):
10017 """
10018 if nameonly==True return (filename, fullfilename) instead of
10019 (filename, stream)
10020 """
10021 self_uploadfield = self.uploadfield
10022 if self.custom_retrieve:
10023 return self.custom_retrieve(name, path)
10024 import http
10025 if self.authorize or isinstance(self_uploadfield, str):
10026 row = self.db(self == name).select().first()
10027 if not row:
10028 raise http.HTTP(404)
10029 if self.authorize and not self.authorize(row):
10030 raise http.HTTP(403)
10031 file_properties = self.retrieve_file_properties(name, path)
10032 filename = file_properties['filename']
10033 if isinstance(self_uploadfield, str):
10034 stream = StringIO.StringIO(row[self_uploadfield] or '')
10035 elif isinstance(self_uploadfield, Field):
10036 blob_uploadfield_name = self_uploadfield.uploadfield
10037 query = self_uploadfield == name
10038 data = self_uploadfield.table(query)[blob_uploadfield_name]
10039 stream = StringIO.StringIO(data)
10040 elif self.uploadfs:
10041
10042 stream = self.uploadfs.open(name, 'rb')
10043 else:
10044
10045
10046
10047 fullname = pjoin(file_properties['path'], name)
10048 if nameonly:
10049 return (filename, fullname)
10050 stream = open(fullname, 'rb')
10051 return (filename, stream)
10052
10054 m = REGEX_UPLOAD_PATTERN.match(name)
10055 if not m or not self.isattachment:
10056 raise TypeError('Can\'t retrieve %s file properties' % name)
10057 self_uploadfield = self.uploadfield
10058 if self.custom_retrieve_file_properties:
10059 return self.custom_retrieve_file_properties(name, path)
10060 if m.group('name'):
10061 try:
10062 filename = base64.b16decode(m.group('name'), True)
10063 filename = REGEX_CLEANUP_FN.sub('_', filename)
10064 except (TypeError, AttributeError):
10065 filename = name
10066 else:
10067 filename = name
10068
10069 if isinstance(self_uploadfield, (str, Field)):
10070 return dict(path=None, filename=filename)
10071
10072 if not path:
10073 if self.uploadfolder:
10074 path = self.uploadfolder
10075 else:
10076 path = pjoin(self.db._adapter.folder, '..', 'uploads')
10077 if self.uploadseparate:
10078 t = m.group('table')
10079 f = m.group('field')
10080 u = m.group('uuidkey')
10081 path = pjoin(path, "%s.%s" % (t, f), u[:2])
10082 return dict(path=path, filename=filename)
10083
10099
10111
10112 - def count(self, distinct=None):
10114
10115 - def as_dict(self, flat=False, sanitize=True):
10116 attrs = (
10117 'name', 'authorize', 'represent', 'ondelete',
10118 'custom_store', 'autodelete', 'custom_retrieve',
10119 'filter_out', 'uploadseparate', 'widget', 'uploadfs',
10120 'update', 'custom_delete', 'uploadfield', 'uploadfolder',
10121 'custom_qualifier', 'unique', 'writable', 'compute',
10122 'map_none', 'default', 'type', 'required', 'readable',
10123 'requires', 'comment', 'label', 'length', 'notnull',
10124 'custom_retrieve_file_properties', 'filter_in')
10125 serializable = (int, long, basestring, float, tuple,
10126 bool, type(None))
10127
10128 def flatten(obj):
10129 if isinstance(obj, dict):
10130 return dict((flatten(k), flatten(v)) for k, v in obj.items())
10131 elif isinstance(obj, (tuple, list, set)):
10132 return [flatten(v) for v in obj]
10133 elif isinstance(obj, serializable):
10134 return obj
10135 elif isinstance(obj, (datetime.datetime,
10136 datetime.date, datetime.time)):
10137 return str(obj)
10138 else:
10139 return None
10140
10141 d = dict()
10142 if not (sanitize and not (self.readable or self.writable)):
10143 for attr in attrs:
10144 if flat:
10145 d.update({attr: flatten(getattr(self, attr))})
10146 else:
10147 d.update({attr: getattr(self, attr)})
10148 d["fieldname"] = d.pop("name")
10149 return d
10150
10151 - def as_xml(self, sanitize=True):
10158
10159 - def as_json(self, sanitize=True):
10166
10167 - def as_yaml(self, sanitize=True):
10173
10176
10178 try:
10179 return '%s.%s' % (self.tablename, self.name)
10180 except:
10181 return '<no table>.%s' % self.name
10182
10183 @property
10185 if self._table:
10186 return self._table.sqlsafe + '.' + \
10187 (self._rname or self._db._adapter.sqlsafe_field(self.name))
10188 return '<no table>.%s' % self.name
10189
10190 @property
10193
10194
10195 -class Query(object):
10196
10197 """
10198 a query object necessary to define a set.
10199 it can be stored or can be passed to DAL.__call__() to obtain a Set
10200
10201 Example::
10202
10203 query = db.users.name=='Max'
10204 set = db(query)
10205 records = set.select()
10206
10207 """
10208
10209 - def __init__(
10210 self,
10211 db,
10212 op,
10213 first=None,
10214 second=None,
10215 ignore_common_filters=False,
10216 **optional_args
10217 ):
10218 self.db = self._db = db
10219 self.op = op
10220 self.first = first
10221 self.second = second
10222 self.ignore_common_filters = ignore_common_filters
10223 self.optional_args = optional_args
10224
10227
10229 return str(self.db._adapter.expand(self))
10230
10233
10234 __rand__ = __and__
10235
10237 return Query(self.db,self.db._adapter.OR,self,other)
10238
10239 __ror__ = __or__
10240
10242 if self.op==self.db._adapter.NOT:
10243 return self.first
10244 return Query(self.db,self.db._adapter.NOT,self)
10245
10247 return repr(self) == repr(other)
10248
10250 return not (self == other)
10251
10252 - def case(self,t=1,f=0):
10253 return self.db._adapter.CASE(self,t,f)
10254
10255 - def as_dict(self, flat=False, sanitize=True):
10256 """Experimental stuff
10257
10258 This allows to return a plain dictionary with the basic
10259 query representation. Can be used with json/xml services
10260 for client-side db I/O
10261
10262 Example:
10263 >>> q = db.auth_user.id != 0
10264 >>> q.as_dict(flat=True)
10265 {"op": "NE", "first":{"tablename": "auth_user",
10266 "fieldname": "id"},
10267 "second":0}
10268 """
10269
10270 SERIALIZABLE_TYPES = (tuple, dict, set, list, int, long, float,
10271 basestring, type(None), bool)
10272
10273 def loop(d):
10274 newd = dict()
10275 for k, v in d.items():
10276 if k in ("first", "second"):
10277 if isinstance(v, self.__class__):
10278 newd[k] = loop(v.__dict__)
10279 elif isinstance(v, Field):
10280 newd[k] = {"tablename": v._tablename,
10281 "fieldname": v.name}
10282 elif isinstance(v, Expression):
10283 newd[k] = loop(v.__dict__)
10284 elif isinstance(v, SERIALIZABLE_TYPES):
10285 newd[k] = v
10286 elif isinstance(v, (datetime.date,
10287 datetime.time,
10288 datetime.datetime)):
10289 newd[k] = unicode(v)
10290 elif k == "op":
10291 if callable(v):
10292 newd[k] = v.__name__
10293 elif isinstance(v, basestring):
10294 newd[k] = v
10295 else: pass
10296 elif isinstance(v, SERIALIZABLE_TYPES):
10297 if isinstance(v, dict):
10298 newd[k] = loop(v)
10299 else: newd[k] = v
10300 return newd
10301
10302 if flat:
10303 return loop(self.__dict__)
10304 else: return self.__dict__
10305
10306 - def as_xml(self, sanitize=True):
10313
10314 - def as_json(self, sanitize=True):
10321
10324 if not orderby:
10325 return None
10326 orderby2 = orderby[0]
10327 for item in orderby[1:]:
10328 orderby2 = orderby2 | item
10329 return orderby2
10330
10333 return (query and hasattr(query,'ignore_common_filters') and \
10334 not query.ignore_common_filters)
10335
10336
10337 -class Set(object):
10338
10339 """
10340 a Set represents a set of records in the database,
10341 the records are identified by the query=Query(...) object.
10342 normally the Set is generated by DAL.__call__(Query(...))
10343
10344 given a set, for example
10345 set = db(db.users.name=='Max')
10346 you can:
10347 set.update(db.users.name='Massimo')
10348 set.delete() # all elements in the set
10349 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
10350 and take subsets:
10351 subset = set(db.users.id<5)
10352 """
10353
10354 - def __init__(self, db, query, ignore_common_filters = None):
10355 self.db = db
10356 self._db = db
10357 self.dquery = None
10358
10359
10360 if isinstance(query, dict):
10361 query = self.parse(query)
10362
10363 if not ignore_common_filters is None and \
10364 use_common_filters(query) == ignore_common_filters:
10365 query = copy.copy(query)
10366 query.ignore_common_filters = ignore_common_filters
10367 self.query = query
10368
10371
10372 - def __call__(self, query, ignore_common_filters=False):
10373 if query is None:
10374 return self
10375 elif isinstance(query,Table):
10376 query = self.db._adapter.id_query(query)
10377 elif isinstance(query,str):
10378 query = Expression(self.db,query)
10379 elif isinstance(query,Field):
10380 query = query!=None
10381 if self.query:
10382 return Set(self.db, self.query & query,
10383 ignore_common_filters=ignore_common_filters)
10384 else:
10385 return Set(self.db, query,
10386 ignore_common_filters=ignore_common_filters)
10387
10388 - def _count(self,distinct=None):
10389 return self.db._adapter._count(self.query,distinct)
10390
10391 - def _select(self, *fields, **attributes):
10392 adapter = self.db._adapter
10393 tablenames = adapter.tables(self.query,
10394 attributes.get('join',None),
10395 attributes.get('left',None),
10396 attributes.get('orderby',None),
10397 attributes.get('groupby',None))
10398 fields = adapter.expand_all(fields, tablenames)
10399 return adapter._select(self.query,fields,attributes)
10400
10402 db = self.db
10403 tablename = db._adapter.get_table(self.query)
10404 return db._adapter._delete(tablename,self.query)
10405
10406 - def _update(self, **update_fields):
10411
10412 - def as_dict(self, flat=False, sanitize=True):
10413 if flat:
10414 uid = dbname = uri = None
10415 codec = self.db._db_codec
10416 if not sanitize:
10417 uri, dbname, uid = (self.db._dbname, str(self.db),
10418 self.db._db_uid)
10419 d = {"query": self.query.as_dict(flat=flat)}
10420 d["db"] = {"uid": uid, "codec": codec,
10421 "name": dbname, "uri": uri}
10422 return d
10423 else: return self.__dict__
10424
10425 - def as_xml(self, sanitize=True):
10432
10433 - def as_json(self, sanitize=True):
10440
10441 - def parse(self, dquery):
10442 "Experimental: Turn a dictionary into a Query object"
10443 self.dquery = dquery
10444 return self.build(self.dquery)
10445
10447 "Experimental: see .parse()"
10448 op, first, second = (d["op"], d["first"],
10449 d.get("second", None))
10450 left = right = built = None
10451
10452 if op in ("AND", "OR"):
10453 if not (type(first), type(second)) == (dict, dict):
10454 raise SyntaxError("Invalid AND/OR query")
10455 if op == "AND":
10456 built = self.build(first) & self.build(second)
10457 else: built = self.build(first) | self.build(second)
10458
10459 elif op == "NOT":
10460 if first is None:
10461 raise SyntaxError("Invalid NOT query")
10462 built = ~self.build(first)
10463 else:
10464
10465 for k, v in {"left": first, "right": second}.items():
10466 if isinstance(v, dict) and v.get("op"):
10467 v = self.build(v)
10468 if isinstance(v, dict) and ("tablename" in v):
10469 v = self.db[v["tablename"]][v["fieldname"]]
10470 if k == "left": left = v
10471 else: right = v
10472
10473 if hasattr(self.db._adapter, op):
10474 opm = getattr(self.db._adapter, op)
10475
10476 if op == "EQ": built = left == right
10477 elif op == "NE": built = left != right
10478 elif op == "GT": built = left > right
10479 elif op == "GE": built = left >= right
10480 elif op == "LT": built = left < right
10481 elif op == "LE": built = left <= right
10482 elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
10483 built = Expression(self.db, opm)
10484 elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
10485 "COALESCE_ZERO", "RAW", "INVERT"):
10486 built = Expression(self.db, opm, left)
10487 elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
10488 "REGEXP", "LIKE", "ILIKE", "STARTSWITH",
10489 "ENDSWITH", "ADD", "SUB", "MUL", "DIV",
10490 "MOD", "AS", "ON", "COMMA", "NOT_NULL",
10491 "COALESCE", "CONTAINS", "BELONGS"):
10492 built = Expression(self.db, opm, left, right)
10493
10494 elif not (left or right): built = Expression(self.db, op)
10495 else:
10496 raise SyntaxError("Operator not supported: %s" % op)
10497
10498 return built
10499
10501 return not self.select(limitby=(0,1), orderby_on_limitby=False)
10502
10503 - def count(self,distinct=None, cache=None):
10504 db = self.db
10505 if cache:
10506 cache_model, time_expire = cache
10507 sql = self._count(distinct=distinct)
10508 key = db._uri + '/' + sql
10509 if len(key)>200: key = hashlib_md5(key).hexdigest()
10510 return cache_model(
10511 key,
10512 (lambda self=self,distinct=distinct: \
10513 db._adapter.count(self.query,distinct)),
10514 time_expire)
10515 return db._adapter.count(self.query,distinct)
10516
10517 - def select(self, *fields, **attributes):
10518 adapter = self.db._adapter
10519 tablenames = adapter.tables(self.query,
10520 attributes.get('join',None),
10521 attributes.get('left',None),
10522 attributes.get('orderby',None),
10523 attributes.get('groupby',None))
10524 fields = adapter.expand_all(fields, tablenames)
10525 return adapter.select(self.query,fields,attributes)
10526
10529
10531 db = self.db
10532 tablename = db._adapter.get_table(self.query)
10533 table = db[tablename]
10534 if any(f(self) for f in table._before_delete): return 0
10535 ret = db._adapter.delete(tablename,self.query)
10536 ret and [f(self) for f in table._after_delete]
10537 return ret
10538
10539 - def update(self, **update_fields):
10540 db = self.db
10541 tablename = db._adapter.get_table(self.query)
10542 table = db[tablename]
10543 table._attempt_upload(update_fields)
10544 if any(f(self,update_fields) for f in table._before_update):
10545 return 0
10546 fields = table._listify(update_fields,update=True)
10547 if not fields:
10548 raise SyntaxError("No fields to update")
10549 ret = db._adapter.update("%s" % table._tablename,self.query,fields)
10550 ret and [f(self,update_fields) for f in table._after_update]
10551 return ret
10552
10554 """
10555 same as update but does not call table._before_update and _after_update
10556 """
10557 tablename = self.db._adapter.get_table(self.query)
10558 table = self.db[tablename]
10559 fields = table._listify(update_fields,update=True)
10560 if not fields: raise SyntaxError("No fields to update")
10561
10562 ret = self.db._adapter.update("%s" % table,self.query,fields)
10563 return ret
10564
10566 tablename = self.db._adapter.get_table(self.query)
10567 response = Row()
10568 response.errors = Row()
10569 new_fields = copy.copy(update_fields)
10570 for key,value in update_fields.iteritems():
10571 value,error = self.db[tablename][key].validate(value)
10572 if error:
10573 response.errors[key] = error
10574 else:
10575 new_fields[key] = value
10576 table = self.db[tablename]
10577 if response.errors:
10578 response.updated = None
10579 else:
10580 if not any(f(self,new_fields) for f in table._before_update):
10581 fields = table._listify(new_fields,update=True)
10582 if not fields: raise SyntaxError("No fields to update")
10583 ret = self.db._adapter.update(tablename,self.query,fields)
10584 ret and [f(self,new_fields) for f in table._after_update]
10585 else:
10586 ret = 0
10587 response.updated = ret
10588 return response
10589
10591 table = self.db[self.db._adapter.tables(self.query)[0]]
10592
10593 if upload_fields:
10594 fields = upload_fields.keys()
10595 else:
10596 fields = table.fields
10597 fields = [f for f in fields if table[f].type == 'upload'
10598 and table[f].uploadfield == True
10599 and table[f].autodelete]
10600 if not fields:
10601 return False
10602 for record in self.select(*[table[f] for f in fields]):
10603 for fieldname in fields:
10604 field = table[fieldname]
10605 oldname = record.get(fieldname, None)
10606 if not oldname:
10607 continue
10608 if upload_fields and oldname == upload_fields[fieldname]:
10609 continue
10610 if field.custom_delete:
10611 field.custom_delete(oldname)
10612 else:
10613 uploadfolder = field.uploadfolder
10614 if not uploadfolder:
10615 uploadfolder = pjoin(
10616 self.db._adapter.folder, '..', 'uploads')
10617 if field.uploadseparate:
10618 items = oldname.split('.')
10619 uploadfolder = pjoin(
10620 uploadfolder,
10621 "%s.%s" % (items[0], items[1]),
10622 items[2][:2])
10623 oldpath = pjoin(uploadfolder, oldname)
10624 if exists(oldpath):
10625 os.unlink(oldpath)
10626 return False
10627
10629 - def __init__(self, colset, table, id):
10630 self.colset, self.db, self.tablename, self.id = \
10631 colset, table._db, table._tablename, id
10632
10634 colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
10635 table = db[tablename]
10636 newfields = fields or dict(colset)
10637 for fieldname in newfields.keys():
10638 if not fieldname in table.fields or table[fieldname].type=='id':
10639 del newfields[fieldname]
10640 table._db(table._id==id,ignore_common_filters=True).update(**newfields)
10641 colset.update(newfields)
10642 return colset
10643
10646 self.db, self.tablename, self.id = table._db, table._tablename, id
10648 return self.db(self.db[self.tablename]._id==self.id).delete()
10649
10652 self.db, self.tablename, self.id = table._db, table._tablename, id
10654 if self.db._lazy_tables is False:
10655 raise AttributeError()
10656 table = self.db[self.tablename]
10657 other_table = self.db[other_tablename]
10658 for rfield in table._referenced_by:
10659 if rfield.table == other_table:
10660 return LazySet(rfield, self.id)
10661
10662 raise AttributeError()
10663
10666 self.db, self.tablename, self.fieldname, self.id = \
10667 field.db, field._tablename, field.name, id
10669 query = self.db[self.tablename][self.fieldname]==self.id
10670 return Set(self.db,query)
10673 - def __call__(self, query, ignore_common_filters=False):
10674 return self._getset()(query, ignore_common_filters)
10675 - def _count(self,distinct=None):
10677 - def _select(self, *fields, **attributes):
10681 - def _update(self, **update_fields):
10685 - def count(self,distinct=None, cache=None):
10687 - def select(self, *fields, **attributes):
10693 - def update(self, **update_fields):
10701
10704 self.method=method
10705 self.row=row
10707 return self.method(self.row,*args,**kwargs)
10708
10710 f.__lazy__ = True
10711 return f
10712
10713 -class Rows(object):
10714
10715 """
10716 A wrapper for the return value of a select. It basically represents a table.
10717 It has an iterator and each row is represented as a dictionary.
10718 """
10719
10720
10721
10722 - def __init__(
10723 self,
10724 db=None,
10725 records=[],
10726 colnames=[],
10727 compact=True,
10728 rawrows=None
10729 ):
10730 self.db = db
10731 self.records = records
10732 self.colnames = colnames
10733 self.compact = compact
10734 self.response = rawrows
10735
10737 return '<Rows (%s)>' % len(self.records)
10738
10740 """
10741 db.define_table('x',Field('number','integer'))
10742 if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
10743
10744 from gluon.dal import lazy_virtualfield
10745
10746 class MyVirtualFields(object):
10747 # normal virtual field (backward compatible, discouraged)
10748 def normal_shift(self): return self.x.number+1
10749 # lazy virtual field (because of @staticmethod)
10750 @lazy_virtualfield
10751 def lazy_shift(instance,row,delta=4): return row.x.number+delta
10752 db.x.virtualfields.append(MyVirtualFields())
10753
10754 for row in db(db.x).select():
10755 print row.number, row.normal_shift, row.lazy_shift(delta=7)
10756 """
10757 if not keyed_virtualfields:
10758 return self
10759 for row in self.records:
10760 for (tablename,virtualfields) in keyed_virtualfields.iteritems():
10761 attributes = dir(virtualfields)
10762 if not tablename in row:
10763 box = row[tablename] = Row()
10764 else:
10765 box = row[tablename]
10766 updated = False
10767 for attribute in attributes:
10768 if attribute[0] != '_':
10769 method = getattr(virtualfields,attribute)
10770 if hasattr(method,'__lazy__'):
10771 box[attribute]=VirtualCommand(method,row)
10772 elif type(method)==types.MethodType:
10773 if not updated:
10774 virtualfields.__dict__.update(row)
10775 updated = True
10776 box[attribute]=method()
10777 return self
10778
10780 if self.colnames!=other.colnames:
10781 raise Exception('Cannot & incompatible Rows objects')
10782 records = self.records+other.records
10783 return Rows(self.db,records,self.colnames,
10784 compact=self.compact or other.compact)
10785
10787 if self.colnames!=other.colnames:
10788 raise Exception('Cannot | incompatible Rows objects')
10789 records = [record for record in other.records
10790 if not record in self.records]
10791 records = self.records + records
10792 return Rows(self.db,records,self.colnames,
10793 compact=self.compact or other.compact)
10794
10796 if len(self.records):
10797 return 1
10798 return 0
10799
10801 return len(self.records)
10802
10804 return Rows(self.db,self.records[a:b],self.colnames,compact=self.compact)
10805
10807 row = self.records[i]
10808 keys = row.keys()
10809 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10810 return row[row.keys()[0]]
10811 return row
10812
10814 """
10815 iterator over records
10816 """
10817
10818 for i in xrange(len(self)):
10819 yield self[i]
10820
10822 """
10823 serializes the table into a csv file
10824 """
10825
10826 s = StringIO.StringIO()
10827 self.export_to_csv_file(s)
10828 return s.getvalue()
10829
10831 if not self.records:
10832 return None
10833 return self[0]
10834
10836 if not self.records:
10837 return None
10838 return self[-1]
10839
10840 - def find(self,f,limitby=None):
10841 """
10842 returns a new Rows object, a subset of the original object,
10843 filtered by the function f
10844 """
10845 if not self:
10846 return Rows(self.db, [], self.colnames, compact=self.compact)
10847 records = []
10848 if limitby:
10849 a,b = limitby
10850 else:
10851 a,b = 0,len(self)
10852 k = 0
10853 for i, row in enumerate(self):
10854 if f(row):
10855 if a<=k: records.append(self.records[i])
10856 k += 1
10857 if k==b: break
10858 return Rows(self.db, records, self.colnames, compact=self.compact)
10859
10861 """
10862 removes elements from the calling Rows object, filtered by the function f,
10863 and returns a new Rows object containing the removed elements
10864 """
10865 if not self.records:
10866 return Rows(self.db, [], self.colnames, compact=self.compact)
10867 removed = []
10868 i=0
10869 while i<len(self):
10870 row = self[i]
10871 if f(row):
10872 removed.append(self.records[i])
10873 del self.records[i]
10874 else:
10875 i += 1
10876 return Rows(self.db, removed, self.colnames, compact=self.compact)
10877
10878 - def sort(self, f, reverse=False):
10879 """
10880 returns a list of sorted elements (not sorted in place)
10881 """
10882 rows = Rows(self.db, [], self.colnames, compact=self.compact)
10883
10884
10885
10886 rows.records = [r for (r, s) in sorted(zip(self.records, self),
10887 key=lambda r: f(r[1]),
10888 reverse=reverse)]
10889 return rows
10890
10892 """
10893 regroups the rows, by one of the fields
10894 """
10895 one_result = False
10896 if 'one_result' in args:
10897 one_result = args['one_result']
10898
10899 def build_fields_struct(row, fields, num, groups):
10900 ''' helper function:
10901 '''
10902 if num > len(fields)-1:
10903 if one_result:
10904 return row
10905 else:
10906 return [row]
10907
10908 key = fields[num]
10909 value = row[key]
10910
10911 if value not in groups:
10912 groups[value] = build_fields_struct(row, fields, num+1, {})
10913 else:
10914 struct = build_fields_struct(row, fields, num+1, groups[ value ])
10915
10916
10917 if type(struct) == type(dict()):
10918 groups[value].update()
10919
10920 elif type(struct) == type(list()):
10921 groups[value] += struct
10922
10923 else:
10924 groups[value] = struct
10925
10926 return groups
10927
10928 if len(fields) == 0:
10929 return self
10930
10931
10932 if not self.records:
10933 return {}
10934
10935 grouped_row_group = dict()
10936
10937
10938 for row in self:
10939 build_fields_struct(row, fields, 0, grouped_row_group)
10940
10941 return grouped_row_group
10942
10943 - def render(self, i=None, fields=None):
10944 """
10945 Takes an index and returns a copy of the indexed row with values
10946 transformed via the "represent" attributes of the associated fields.
10947
10948 If no index is specified, a generator is returned for iteration
10949 over all the rows.
10950
10951 fields -- a list of fields to transform (if None, all fields with
10952 "represent" attributes will be transformed).
10953 """
10954
10955 if i is None:
10956 return (self.render(i, fields=fields) for i in range(len(self)))
10957 import sqlhtml
10958 row = copy.deepcopy(self.records[i])
10959 keys = row.keys()
10960 tables = [f.tablename for f in fields] if fields \
10961 else [k for k in keys if k != '_extra']
10962 for table in tables:
10963 repr_fields = [f.name for f in fields if f.tablename == table] \
10964 if fields else [k for k in row[table].keys()
10965 if (hasattr(self.db[table], k) and
10966 isinstance(self.db[table][k], Field)
10967 and self.db[table][k].represent)]
10968 for field in repr_fields:
10969 row[table][field] = sqlhtml.represent(
10970 self.db[table][field], row[table][field], row[table])
10971 if self.compact and len(keys) == 1 and keys[0] != '_extra':
10972 return row[keys[0]]
10973 return row
10974
10975 - def as_list(self,
10976 compact=True,
10977 storage_to_dict=True,
10978 datetime_to_str=False,
10979 custom_types=None):
10980 """
10981 returns the data as a list or dictionary.
10982 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
10983 :param datetime_to_str: convert datetime fields as strings (default False)
10984 """
10985 (oc, self.compact) = (self.compact, compact)
10986 if storage_to_dict:
10987 items = [item.as_dict(datetime_to_str, custom_types) for item in self]
10988 else:
10989 items = [item for item in self]
10990 self.compact = compact
10991 return items
10992
10993 - def as_dict(self,
10994 key='id',
10995 compact=True,
10996 storage_to_dict=True,
10997 datetime_to_str=False,
10998 custom_types=None):
10999 """
11000 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
11001
11002 :param key: the name of the field to be used as dict key, normally the id
11003 :param compact: ? (default True)
11004 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
11005 :param datetime_to_str: convert datetime fields as strings (default False)
11006 """
11007
11008
11009 multi = False
11010 f = self.first()
11011 if f and isinstance(key, basestring):
11012 multi = any([isinstance(v, f.__class__) for v in f.values()])
11013 if (not "." in key) and multi:
11014
11015 def new_key():
11016 i = 0
11017 while True:
11018 yield i
11019 i += 1
11020 key_generator = new_key()
11021 key = lambda r: key_generator.next()
11022
11023 rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types)
11024 if isinstance(key,str) and key.count('.')==1:
11025 (table, field) = key.split('.')
11026 return dict([(r[table][field],r) for r in rows])
11027 elif isinstance(key,str):
11028 return dict([(r[key],r) for r in rows])
11029 else:
11030 return dict([(key(r),r) for r in rows])
11031
11032 - def as_trees(self, parent_name='parent_id', children_name='children'):
11033 roots = []
11034 drows = {}
11035 for row in self:
11036 drows[row.id] = row
11037 row[children_name] = []
11038 for row in self:
11039 parent = row[parent_name]
11040 if parent is None:
11041 roots.append(row)
11042 else:
11043 drows[parent][children_name].append(row)
11044 return roots
11045
11047 """
11048 export data to csv, the first line contains the column names
11049
11050 :param ofile: where the csv must be exported to
11051 :param null: how null values must be represented (default '<NULL>')
11052 :param delimiter: delimiter to separate values (default ',')
11053 :param quotechar: character to use to quote string values (default '"')
11054 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
11055 :param represent: use the fields .represent value (default False)
11056 :param colnames: list of column names to use (default self.colnames)
11057 This will only work when exporting rows objects!!!!
11058 DO NOT use this with db.export_to_csv()
11059 """
11060 delimiter = kwargs.get('delimiter', ',')
11061 quotechar = kwargs.get('quotechar', '"')
11062 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
11063 represent = kwargs.get('represent', False)
11064 writer = csv.writer(ofile, delimiter=delimiter,
11065 quotechar=quotechar, quoting=quoting)
11066
11067 def unquote_colnames(colnames):
11068 unq_colnames = []
11069 for col in colnames:
11070 m = self.db._adapter.REGEX_TABLE_DOT_FIELD.match(col)
11071 if not m:
11072 unq_colnames.append(col)
11073 else:
11074 unq_colnames.append('.'.join(m.groups()))
11075 return unq_colnames
11076
11077 colnames = kwargs.get('colnames', self.colnames)
11078 write_colnames = kwargs.get('write_colnames',True)
11079
11080 if write_colnames:
11081 writer.writerow(unquote_colnames(colnames))
11082
11083 def none_exception(value):
11084 """
11085 returns a cleaned up value that can be used for csv export:
11086 - unicode text is encoded as such
11087 - None values are replaced with the given representation (default <NULL>)
11088 """
11089 if value is None:
11090 return null
11091 elif isinstance(value, unicode):
11092 return value.encode('utf8')
11093 elif isinstance(value,Reference):
11094 return long(value)
11095 elif hasattr(value, 'isoformat'):
11096 return value.isoformat()[:19].replace('T', ' ')
11097 elif isinstance(value, (list,tuple)):
11098 return bar_encode(value)
11099 return value
11100
11101 for record in self:
11102 row = []
11103 for col in colnames:
11104 m = self.db._adapter.REGEX_TABLE_DOT_FIELD.match(col)
11105 if not m:
11106 row.append(record._extra[col])
11107 else:
11108 (t, f) = m.groups()
11109 field = self.db[t][f]
11110 if isinstance(record.get(t, None), (Row,dict)):
11111 value = record[t][f]
11112 else:
11113 value = record[f]
11114 if field.type=='blob' and not value is None:
11115 value = base64.b64encode(value)
11116 elif represent and field.represent:
11117 value = field.represent(value,record)
11118 row.append(none_exception(value))
11119 writer.writerow(row)
11120
11121 - def xml(self,strict=False,row_name='row',rows_name='rows'):
11122 """
11123 serializes the table using sqlhtml.SQLTABLE (if present)
11124 """
11125
11126 if strict:
11127 ncols = len(self.colnames)
11128 return '<%s>\n%s\n</%s>' % (rows_name,
11129 '\n'.join(row.as_xml(row_name=row_name,
11130 colnames=self.colnames) for
11131 row in self), rows_name)
11132
11133 import sqlhtml
11134 return sqlhtml.SQLTABLE(self).xml()
11135
11136 - def as_xml(self,row_name='row',rows_name='rows'):
11137 return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
11138
11139 - def as_json(self, mode='object', default=None):
11140 """
11141 serializes the rows to a JSON list or object with objects
11142 mode='object' is not implemented (should return a nested
11143 object structure)
11144 """
11145
11146 items = [record.as_json(mode=mode, default=default,
11147 serialize=False,
11148 colnames=self.colnames) for
11149 record in self]
11150
11151 if have_serializers:
11152 return serializers.json(items,
11153 default=default or
11154 serializers.custom_json)
11155 elif simplejson:
11156 return simplejson.dumps(items)
11157 else:
11158 raise RuntimeError("missing simplejson")
11159
11160
11161 as_csv = __str__
11162 json = as_json
11163
11164
11165
11166
11167
11168
11169 -def test_all():
11170 """
11171
11172 >>> if len(sys.argv)<2: db = DAL("sqlite://test.db")
11173 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
11174 >>> tmp = db.define_table('users',\
11175 Field('stringf', 'string', length=32, required=True),\
11176 Field('booleanf', 'boolean', default=False),\
11177 Field('passwordf', 'password', notnull=True),\
11178 Field('uploadf', 'upload'),\
11179 Field('blobf', 'blob'),\
11180 Field('integerf', 'integer', unique=True),\
11181 Field('doublef', 'double', unique=True,notnull=True),\
11182 Field('jsonf', 'json'),\
11183 Field('datef', 'date', default=datetime.date.today()),\
11184 Field('timef', 'time'),\
11185 Field('datetimef', 'datetime'),\
11186 migrate='test_user.table')
11187
11188 Insert a field
11189
11190 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
11191 uploadf=None, integerf=5, doublef=3.14,\
11192 jsonf={"j": True},\
11193 datef=datetime.date(2001, 1, 1),\
11194 timef=datetime.time(12, 30, 15),\
11195 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
11196 1
11197
11198 Drop the table
11199
11200 >>> db.users.drop()
11201
11202 Examples of insert, select, update, delete
11203
11204 >>> tmp = db.define_table('person',\
11205 Field('name'),\
11206 Field('birth','date'),\
11207 migrate='test_person.table')
11208 >>> person_id = db.person.insert(name='Marco',birth='2005-06-22')
11209 >>> person_id = db.person.insert(name='Massimo',birth='1971-12-21')
11210
11211 commented len(db().select(db.person.ALL))
11212 commented 2
11213
11214 >>> me = db(db.person.id==person_id).select()[0] # test select
11215 >>> me.name
11216 'Massimo'
11217 >>> db.person[2].name
11218 'Massimo'
11219 >>> db.person(2).name
11220 'Massimo'
11221 >>> db.person(name='Massimo').name
11222 'Massimo'
11223 >>> db.person(db.person.name=='Massimo').name
11224 'Massimo'
11225 >>> row = db.person[2]
11226 >>> row.name == row['name'] == row['person.name'] == row('person.name')
11227 True
11228 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
11229 1
11230 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
11231 1
11232
11233 Update a single record
11234
11235 >>> me.update_record(name="Max")
11236 <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}>
11237 >>> me.name
11238 'Max'
11239
11240 Examples of complex search conditions
11241
11242 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
11243 1
11244 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
11245 1
11246 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
11247 1
11248 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
11249 >>> me.name
11250 'Max'
11251
11252 Examples of search conditions using extract from date/datetime/time
11253
11254 >>> len(db(db.person.birth.month()==12).select())
11255 1
11256 >>> len(db(db.person.birth.year()>1900).select())
11257 1
11258
11259 Example of usage of NULL
11260
11261 >>> len(db(db.person.birth==None).select()) ### test NULL
11262 0
11263 >>> len(db(db.person.birth!=None).select()) ### test NULL
11264 1
11265
11266 Examples of search conditions using lower, upper, and like
11267
11268 >>> len(db(db.person.name.upper()=='MAX').select())
11269 1
11270 >>> len(db(db.person.name.like('%ax')).select())
11271 1
11272 >>> len(db(db.person.name.upper().like('%AX')).select())
11273 1
11274 >>> len(db(~db.person.name.upper().like('%AX')).select())
11275 0
11276
11277 orderby, groupby and limitby
11278
11279 >>> people = db().select(db.person.name, orderby=db.person.name)
11280 >>> order = db.person.name|~db.person.birth
11281 >>> people = db().select(db.person.name, orderby=order)
11282
11283 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
11284
11285 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
11286
11287 Example of one 2 many relation
11288
11289 >>> tmp = db.define_table('dog',\
11290 Field('name'),\
11291 Field('birth','date'),\
11292 Field('owner',db.person),\
11293 migrate='test_dog.table')
11294 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
11295 1
11296
11297 A simple JOIN
11298
11299 >>> len(db(db.dog.owner==db.person.id).select())
11300 1
11301
11302 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
11303 1
11304
11305 Drop tables
11306
11307 >>> db.dog.drop()
11308 >>> db.person.drop()
11309
11310 Example of many 2 many relation and Set
11311
11312 >>> tmp = db.define_table('author', Field('name'),\
11313 migrate='test_author.table')
11314 >>> tmp = db.define_table('paper', Field('title'),\
11315 migrate='test_paper.table')
11316 >>> tmp = db.define_table('authorship',\
11317 Field('author_id', db.author),\
11318 Field('paper_id', db.paper),\
11319 migrate='test_authorship.table')
11320 >>> aid = db.author.insert(name='Massimo')
11321 >>> pid = db.paper.insert(title='QCD')
11322 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
11323
11324 Define a Set
11325
11326 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
11327 >>> rows = authored_papers.select(db.author.name, db.paper.title)
11328 >>> for row in rows: print row.author.name, row.paper.title
11329 Massimo QCD
11330
11331 Example of search condition using belongs
11332
11333 >>> set = (1, 2, 3)
11334 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
11335 >>> print rows[0].title
11336 QCD
11337
11338 Example of search condition using nested select
11339
11340 >>> nested_select = db()._select(db.authorship.paper_id)
11341 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
11342 >>> print rows[0].title
11343 QCD
11344
11345 Example of expressions
11346
11347 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
11348 >>> db(mynumber).delete()
11349 0
11350 >>> for i in range(10): tmp = mynumber.insert(x=i)
11351 >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum())
11352 45
11353
11354 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
11355 5
11356
11357 Output in csv
11358
11359 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
11360 author.name,paper.title\r
11361 Massimo,QCD
11362
11363 Delete all leftover tables
11364
11365 >>> DAL.distributed_transaction_commit(db)
11366
11367 >>> db.mynumber.drop()
11368 >>> db.authorship.drop()
11369 >>> db.author.drop()
11370 >>> db.paper.drop()
11371 """
11372
11373
11374
11375
11376 SQLField = Field
11377 SQLTable = Table
11378 SQLXorable = Expression
11379 SQLQuery = Query
11380 SQLSet = Set
11381 SQLRows = Rows
11382 SQLStorage = Row
11383 SQLDB = DAL
11384 GQLDB = DAL
11385 DAL.Field = Field
11386 DAL.Table = Table
11387
11388
11389
11390
11391
11392 -def geoPoint(x,y):
11393 return "POINT (%f %f)" % (x,y)
11394
11396 return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
11397
11399 return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
11400
11401
11402
11403
11404
11405 if __name__ == '__main__':
11406 import doctest
11407 doctest.testmod()
11408