1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite
21 - MySQL
22 - Postgres
23 - Oracle
24 - MS SQL
25 - DB2
26 - Interbase
27 - Ingres
28 - SapDB (experimental)
29 - Cubrid (experimental)
30 - CouchDB (experimental)
31 - MongoDB (in progress)
32 - Google:nosql
33 - Google:sql
34
35 Example of usage:
36
37 >>> # from dal import DAL, Field
38
39 ### create DAL connection (and create DB if not exists)
40 >>> db=DAL(('mysql://a:b@locahost/x','sqlite://storage.sqlite'),folder=None)
41
42 ### define a table 'person' (create/aster as necessary)
43 >>> person = db.define_table('person',Field('name','string'))
44
45 ### insert a record
46 >>> id = person.insert(name='James')
47
48 ### retrieve it by id
49 >>> james = person(id)
50
51 ### retrieve it by name
52 >>> james = person(name='James')
53
54 ### retrieve it by arbitrary query
55 >>> query = (person.name=='James')&(person.name.startswith('J'))
56 >>> james = db(query).select(person.ALL)[0]
57
58 ### update one record
59 >>> james.update_record(name='Jim')
60
61 ### update multiple records by query
62 >>> db(person.name.like('J%')).update(name='James')
63 1
64
65 ### delete records by query
66 >>> db(person.name.lower()=='jim').delete()
67 0
68
69 ### retrieve multiple records (rows)
70 >>> people = db(person).select(orderby=person.name,groupby=person.name,limitby=(0,100))
71
72 ### further filter them
73 >>> james = people.find(lambda row: row.name=='James').first()
74 >>> print james.id, james.name
75 1 James
76
77 ### check aggrgates
78 >>> counter = person.id.count()
79 >>> print db(person).select(counter).first()(counter)
80 1
81
82 ### delete one record
83 >>> james.delete_record()
84 1
85
86 ### delete (drop) entire database table
87 >>> person.drop()
88
89 Supported field types:
90 id string text boolean integer double decimal password upload blob time date datetime,
91
92 Supported DAL URI strings:
93 'sqlite://test.db'
94 'sqlite:memory'
95 'jdbc:sqlite://test.db'
96 'mysql://root:none@localhost/test'
97 'postgres://mdipierro:none@localhost/test'
98 'jdbc:postgres://mdipierro:none@localhost/test'
99 'mssql://web2py:none@A64X2/web2py_test'
100 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
101 'oracle://username:password@database'
102 'firebird://user:password@server:3050/database'
103 'db2://DSN=dsn;UID=user;PWD=pass'
104 'firebird://username:password@hostname/database'
105 'firebird_embedded://username:password@c://path'
106 'informix://user:password@server:3050/database'
107 'informixu://user:password@server:3050/database' # unicode informix
108 'google:datastore' # for google app engine datastore
109 'google:sql' # for google app engine with sql (mysql compatible)
110
111 For more info:
112 help(DAL)
113 help(Field)
114 """
115
116
117
118
119
120 __all__ = ['DAL', 'Field']
121 MAXCHARLENGTH = 512
122 INFINITY = 32768
123
124 import re
125 import sys
126 import locale
127 import os
128 import types
129 import cPickle
130 import datetime
131 import threading
132 import time
133 import cStringIO
134 import csv
135 import copy
136 import socket
137 import logging
138 import copy_reg
139 import base64
140 import shutil
141 import marshal
142 import decimal
143 import struct
144 import urllib
145 import hashlib
146 import uuid
147 import glob
148
149 CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType,
150 types.MethodType, types.BuiltinMethodType)
151
152
153
154
155
156 try:
157 from utils import web2py_uuid
158 except ImportError:
159 import uuid
161
162 try:
163 import portalocker
164 have_portalocker = True
165 except ImportError:
166 have_portalocker = False
167
168 try:
169 import serializers
170 have_serializers = True
171 except ImportError:
172 have_serializers = False
173
174 try:
175 import validators
176 have_validators = True
177 except ImportError:
178 have_validators = False
179
180 logger = logging.getLogger("web2py.dal")
181 DEFAULT = lambda:0
182
183 sql_locker = threading.RLock()
184 thread = threading.local()
185
186
187
188
189 regex_dbname = re.compile('^(\w+)(\:\w+)*')
190 table_field = re.compile('^[\w_]+\.[\w_]+$')
191 regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
192 regex_cleanup_fn = re.compile('[\'"\s;]+')
193 string_unpack=re.compile('(?<!\|)\|(?!\|)')
194 regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
195
196
197
198
199
200 drivers = []
201
202 try:
203 from pysqlite2 import dbapi2 as sqlite3
204 drivers.append('pysqlite2')
205 except ImportError:
206 try:
207 from sqlite3 import dbapi2 as sqlite3
208 drivers.append('SQLite3')
209 except ImportError:
210 logger.debug('no sqlite3 or pysqlite2.dbapi2 driver')
211
212 try:
213 import contrib.pymysql as pymysql
214 drivers.append('pymysql')
215 except ImportError:
216 logger.debug('no pymysql driver')
217
218 try:
219 import psycopg2
220 drivers.append('PostgreSQL')
221 except ImportError:
222 logger.debug('no psycopg2 driver')
223
224 try:
225 import cx_Oracle
226 drivers.append('Oracle')
227 except ImportError:
228 logger.debug('no cx_Oracle driver')
229
230 try:
231 import pyodbc
232 drivers.append('MSSQL/DB2')
233 except ImportError:
234 logger.debug('no MSSQL/DB2 driver')
235
236 try:
237 import kinterbasdb
238 drivers.append('Interbase')
239 except ImportError:
240 logger.debug('no kinterbasdb driver')
241
242 try:
243 import firebirdsql
244 drivers.append('Firebird')
245 except ImportError:
246 logger.debug('no Firebird driver')
247
248 try:
249 import informixdb
250 drivers.append('Informix')
251 logger.warning('Informix support is experimental')
252 except ImportError:
253 logger.debug('no informixdb driver')
254
255 try:
256 import sapdb
257 drivers.append('SAPDB')
258 logger.warning('SAPDB support is experimental')
259 except ImportError:
260 logger.debug('no sapdb driver')
261
262 try:
263 import cubriddb
264 drivers.append('Cubrid')
265 logger.warning('Cubrid support is experimental')
266 except ImportError:
267 logger.debug('no cubriddb driver')
268
269 try:
270 from com.ziclix.python.sql import zxJDBC
271 import java.sql
272 from org.sqlite import JDBC
273 drivers.append('zxJDBC')
274 logger.warning('zxJDBC support is experimental')
275 is_jdbc = True
276 except ImportError:
277 logger.debug('no zxJDBC driver')
278 is_jdbc = False
279
280 try:
281 import ingresdbi
282 drivers.append('Ingres')
283 except ImportError:
284 logger.debug('no Ingres driver')
285
286
287 try:
288 from new import classobj
289 from google.appengine.ext import db as gae
290 from google.appengine.api import namespace_manager, rdbms
291 from google.appengine.api.datastore_types import Key
292 from google.appengine.ext.db.polymodel import PolyModel
293
294 drivers.append('google')
295
297 """
298 GAE decimal implementation
299 """
300 data_type = decimal.Decimal
301
302 - def __init__(self, precision, scale, **kwargs):
303 super(GAEDecimalProperty, self).__init__(self, **kwargs)
304 d = '1.'
305 for x in range(scale):
306 d += '0'
307 self.round = decimal.Decimal(d)
308
315
317 if value:
318 return decimal.Decimal(value).quantize(self.round)
319 else:
320 return None
321
323 value = super(GAEDecimalProperty, self).validate(value)
324 if value is None or isinstance(value, decimal.Decimal):
325 return value
326 elif isinstance(value, basestring):
327 return decimal.Decimal(value)
328 raise gae.BadValueError("Property %s must be a Decimal or string." % self.name)
329
330 except ImportError:
331 pass
332
333
334
335
336
338
339 pools = {}
340
341 @staticmethod
344
345
346
347 @staticmethod
349 """ to close cleanly databases in a multithreaded environment """
350 if not hasattr(thread,'instances'):
351 return
352 while thread.instances:
353 instance = thread.instances.pop()
354 getattr(instance,action)()
355
356 really = True
357 if instance.pool_size:
358 sql_locker.acquire()
359 pool = ConnectionPool.pools[instance.uri]
360 if len(pool) < instance.pool_size:
361 pool.append(instance.connection)
362 really = False
363 sql_locker.release()
364 if really:
365 getattr(instance,'close')()
366 return
367
369 """ this actually does not make the folder. it has to be there """
370 if hasattr(thread,'folder'):
371 self.folder = thread.folder
372 else:
373 self.folder = thread.folder = ''
374
375
376 if False and self.folder and not os.path.exists(self.folder):
377 os.mkdir(self.folder)
378
396
397
398
399
400
401
403
404 maxcharlength = INFINITY
405 commit_on_alter_table = False
406 support_distributed_transaction = False
407 uploads_in_blob = False
408 types = {
409 'boolean': 'CHAR(1)',
410 'string': 'CHAR(%(length)s)',
411 'text': 'TEXT',
412 'password': 'CHAR(%(length)s)',
413 'blob': 'BLOB',
414 'upload': 'CHAR(%(length)s)',
415 'integer': 'INTEGER',
416 'double': 'DOUBLE',
417 'decimal': 'DOUBLE',
418 'date': 'DATE',
419 'time': 'TIME',
420 'datetime': 'TIMESTAMP',
421 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
422 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
423 'list:integer': 'TEXT',
424 'list:string': 'TEXT',
425 'list:reference': 'TEXT',
426 }
427
429 """
430 to be used ONLY for files that on GAE may not be on filesystem
431 """
432 return os.path.exists(filename)
433
434 - def file_open(self, filename, mode='rb', lock=True):
447
456
459
460 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
461 credential_decoder=lambda x:x, driver_args={},
462 adapter_args={}):
463 self.db = db
464 self.dbengine = "None"
465 self.uri = uri
466 self.pool_size = pool_size
467 self.folder = folder
468 self.db_codec = db_codec
469 class Dummy(object):
470 lastrowid = 1
471 def __getattr__(self, value):
472 return lambda *a, **b: []
473 self.connection = Dummy()
474 self.cursor = Dummy()
475
477 return '%s_sequence' % tablename
478
480 return '%s_sequence' % tablename
481
482
483 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
484 fields = []
485 sql_fields = {}
486 sql_fields_aux = {}
487 TFK = {}
488 tablename = table._tablename
489 sortable = 0
490 for field in table:
491 sortable += 1
492 k = field.name
493 if isinstance(field.type,SQLCustomType):
494 ftype = field.type.native or field.type.type
495 elif field.type.startswith('reference'):
496 referenced = field.type[10:].strip()
497 constraint_name = self.constraint_name(tablename, field.name)
498 if hasattr(table,'_primarykey'):
499 rtablename,rfieldname = referenced.split('.')
500 rtable = table._db[rtablename]
501 rfield = rtable[rfieldname]
502
503 if rfieldname in rtable._primarykey or rfield.unique:
504 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
505
506 if not rfield.unique and len(rtable._primarykey)>1 :
507
508 if rtablename not in TFK:
509 TFK[rtablename] = {}
510 TFK[rtablename][rfieldname] = field.name
511 else:
512 ftype = ftype + \
513 self.types['reference FK'] %dict(\
514 constraint_name=constraint_name,
515 table_name=tablename,
516 field_name=field.name,
517 foreign_key='%s (%s)'%(rtablename, rfieldname),
518 on_delete_action=field.ondelete)
519 else:
520
521 id_fieldname = referenced in table._db and table._db[referenced]._id.name or 'id'
522 ftype = self.types[field.type[:9]]\
523 % dict(table_name=tablename,
524 field_name=field.name,
525 constraint_name=constraint_name,
526 foreign_key=referenced + ('(%s)' % id_fieldname),
527 on_delete_action=field.ondelete)
528 elif field.type.startswith('list:reference'):
529 ftype = self.types[field.type[:14]]
530 elif field.type.startswith('decimal'):
531 precision, scale = [int(x) for x in field.type[8:-1].split(',')]
532 ftype = self.types[field.type[:7]] % \
533 dict(precision=precision,scale=scale)
534 elif not field.type in self.types:
535 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
536 (field.type, field.name)
537 else:
538 ftype = self.types[field.type]\
539 % dict(length=field.length)
540 if not field.type.startswith('id') and not field.type.startswith('reference'):
541 if field.notnull:
542 ftype += ' NOT NULL'
543 else:
544 ftype += self.ALLOW_NULL()
545 if field.unique:
546 ftype += ' UNIQUE'
547
548
549 sql_fields[field.name] = dict(sortable=sortable,
550 type=str(field.type),
551 sql=ftype)
552
553 if isinstance(field.default,(str,int,float)):
554
555
556
557
558
559 not_null = self.NOT_NULL(field.default,field.type)
560 ftype = ftype.replace('NOT NULL',not_null)
561 sql_fields_aux[field.name] = dict(sql=ftype)
562
563 fields.append('%s %s' % (field.name, ftype))
564 other = ';'
565
566
567 if self.dbengine == 'mysql':
568 if not hasattr(table, "_primarykey"):
569 fields.append('PRIMARY KEY(%s)' % table._id.name)
570 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
571
572 fields = ',\n '.join(fields)
573 for rtablename in TFK:
574 rfields = TFK[rtablename]
575 pkeys = table._db[rtablename]._primarykey
576 fkeys = [ rfields[k] for k in pkeys ]
577 fields = fields + ',\n ' + \
578 self.types['reference TFK'] %\
579 dict(table_name=tablename,
580 field_name=', '.join(fkeys),
581 foreign_table=rtablename,
582 foreign_key=', '.join(pkeys),
583 on_delete_action=field.ondelete)
584
585 if hasattr(table,'_primarykey'):
586 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
587 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
588 else:
589 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
590 (tablename, fields, other)
591
592 if self.uri.startswith('sqlite:///'):
593 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
594 dbpath = self.uri[9:self.uri.rfind('/')].decode('utf8').encode(path_encoding)
595 else:
596 dbpath = self.folder
597
598 if not migrate:
599 return query
600 elif self.uri.startswith('sqlite:memory'):
601 table._dbt = None
602 elif isinstance(migrate, str):
603 table._dbt = os.path.join(dbpath, migrate)
604 else:
605 table._dbt = os.path.join(dbpath, '%s_%s.table' \
606 % (table._db._uri_hash, tablename))
607 if table._dbt:
608 table._loggername = os.path.join(dbpath, 'sql.log')
609 logfile = self.file_open(table._loggername, 'a')
610 else:
611 logfile = None
612 if not table._dbt or not self.file_exists(table._dbt):
613 if table._dbt:
614 logfile.write('timestamp: %s\n'
615 % datetime.datetime.today().isoformat())
616 logfile.write(query + '\n')
617 if not fake_migrate:
618 self.create_sequence_and_triggers(query,table)
619 table._db.commit()
620 if table._dbt:
621 tfile = self.file_open(table._dbt, 'w')
622 cPickle.dump(sql_fields, tfile)
623 self.file_close(tfile)
624 if fake_migrate:
625 logfile.write('faked!\n')
626 else:
627 logfile.write('success!\n')
628 else:
629 tfile = self.file_open(table._dbt, 'r')
630 try:
631 sql_fields_old = cPickle.load(tfile)
632 except EOFError:
633 self.file_close(tfile)
634 self.file_close(logfile)
635 raise RuntimeError, 'File %s appears corrupted' % table._dbt
636 self.file_close(tfile)
637 if sql_fields != sql_fields_old:
638 self.migrate_table(table,
639 sql_fields, sql_fields_old,
640 sql_fields_aux, logfile,
641 fake_migrate=fake_migrate)
642 self.file_close(logfile)
643 return query
644
645 - def migrate_table(
646 self,
647 table,
648 sql_fields,
649 sql_fields_old,
650 sql_fields_aux,
651 logfile,
652 fake_migrate=False,
653 ):
654 tablename = table._tablename
655 def fix(item):
656 k,v=item
657 if not isinstance(v,dict):
658 v=dict(type='unkown',sql=v)
659 return k.lower(),v
660
661 sql_fields = dict(fix(v) for v in sql_fields.items())
662 sql_fields_old = dict(fix(v) for v in sql_fields_old.items())
663 sql_fields_aux = dict(fix(v) for v in sql_fields_aux.items())
664
665 keys = sql_fields.keys()
666 for key in sql_fields_old:
667 if not key in keys:
668 keys.append(key)
669 if self.dbengine == 'mssql':
670 new_add = '; ALTER TABLE %s ADD ' % tablename
671 else:
672 new_add = ', ADD '
673
674 metadata_change = False
675 sql_fields_current = copy.copy(sql_fields_old)
676 for key in keys:
677 query = None
678 if not key in sql_fields_old:
679 sql_fields_current[key] = sql_fields[key]
680 query = ['ALTER TABLE %s ADD %s %s;' % \
681 (tablename, key,
682 sql_fields_aux[key]['sql'].replace(', ', new_add))]
683 metadata_change = True
684 elif self.dbengine == 'sqlite':
685 if key in sql_fields:
686 sql_fields_current[key] = sql_fields[key]
687 metadata_change = True
688 elif not key in sql_fields:
689 del sql_fields_current[key]
690 if not self.dbengine in ('firebird',):
691 query = ['ALTER TABLE %s DROP COLUMN %s;' % (tablename, key)]
692 else:
693 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
694 metadata_change = True
695 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
696 and not isinstance(table[key].type, SQLCustomType) \
697 and not (table[key].type.startswith('reference') and \
698 sql_fields[key]['sql'].startswith('INT,') and \
699 sql_fields_old[key]['sql'].startswith('INT NOT NULL,')):
700 sql_fields_current[key] = sql_fields[key]
701 t = tablename
702 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
703 if not self.dbengine in ('firebird',):
704 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
705 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
706 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
707 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
708 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
709 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
710 else:
711 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
712 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
713 'ALTER TABLE %s DROP %s;' % (t, key),
714 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
715 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
716 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
717 metadata_change = True
718 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
719 sql_fields_current[key] = sql_fields[key]
720 metadata_change = True
721
722 if query:
723 logfile.write('timestamp: %s\n'
724 % datetime.datetime.today().isoformat())
725 table._db['_lastsql'] = '\n'.join(query)
726 for sub_query in query:
727 logfile.write(sub_query + '\n')
728 if not fake_migrate:
729 self.execute(sub_query)
730
731
732
733 if table._db._adapter.commit_on_alter_table:
734 table._db.commit()
735 tfile = self.file_open(table._dbt, 'w')
736 cPickle.dump(sql_fields_current, tfile)
737 self.file_close(tfile)
738 logfile.write('success!\n')
739 else:
740 logfile.write('faked!\n')
741 elif metadata_change:
742 tfile = self.file_open(table._dbt, 'w')
743 cPickle.dump(sql_fields_current, tfile)
744 self.file_close(tfile)
745
746 if metadata_change and \
747 not (query and self.dbengine in ('mysql','oracle','firebird')):
748 table._db.commit()
749 tfile = self.file_open(table._dbt, 'w')
750 cPickle.dump(sql_fields_current, tfile)
751 self.file_close(tfile)
752
755
758
760 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
761
764
767
770
773
775 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
776
779
782
784 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
785
787 return 'PRIMARY KEY(%s)' % key
788
789 - def _drop(self,table,mode):
790 return ['DROP TABLE %s;' % table]
791
792 - def drop(self, table, mode=''):
793 if table._dbt:
794 logfile = self.file_open(table._loggername, 'a')
795 queries = self._drop(table, mode)
796 for query in queries:
797 if table._dbt:
798 logfile.write(query + '\n')
799 self.execute(query)
800 table._db.commit()
801 del table._db[table._tablename]
802 del table._db.tables[table._db.tables.index(table._tablename)]
803 table._db._update_referenced_by(table._tablename)
804 if table._dbt:
805 self.file_delete(table._dbt)
806 logfile.write('success!\n')
807
809 keys = ','.join(f.name for f,v in fields)
810 values = ','.join(self.expand(v,f.type) for f,v in fields)
811 return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
812
813 - def insert(self,table,fields):
814 query = self._insert(table,fields)
815 try:
816 self.execute(query)
817 except Exception, e:
818 if isinstance(e,self.integrity_error_class()):
819 return None
820 raise e
821 if hasattr(table,'_primarykey'):
822 return dict( [ (k,fields[k]) for k in table._primarykey ])
823 id = self.lastrowid(table)
824 if not isinstance(id,int):
825 return id
826 rid = Reference(id)
827 (rid._table, rid._record) = (table, None)
828 return rid
829
832
833 - def NOT(self,first):
835
836 - def AND(self,first,second):
838
839 - def OR(self,first,second):
841
843 if isinstance(second,str):
844 return '(%s IN (%s))' % (self.expand(first),second[:-1])
845 return '(%s IN (%s))' % (self.expand(first),
846 ','.join(self.expand(item,first.type) for item in second))
847
848 - def LIKE(self,first,second):
850
853
856
858 if first.type in ('string','text'):
859 key = '%'+str(second).replace('%','%%')+'%'
860 elif first.type.startswith('list:'):
861 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
862 return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
863
864 - def EQ(self,first,second=None):
868
869 - def NE(self,first,second=None):
873
874 - def LT(self,first,second=None):
876
877 - def LE(self,first,second=None):
879
880 - def GT(self,first,second=None):
882
883 - def GE(self,first,second=None):
885
886 - def ADD(self,first,second):
888
889 - def SUB(self,first,second):
891
892 - def MUL(self,first,second):
894
895 - def DIV(self,first,second):
897
898 - def MOD(self,first,second):
900
901 - def AS(self,first,second):
903
904 - def ON(self,first,second):
906
909
910 - def COMMA(self,first,second):
912
913 - def expand(self,expression,field_type=None):
914 if isinstance(expression,Field):
915 return str(expression)
916 elif isinstance(expression, (Expression, Query)):
917 if not expression.second is None:
918 return expression.op(expression.first, expression.second)
919 elif not expression.first is None:
920 return expression.op(expression.first)
921 else:
922 return expression.op()
923 elif field_type:
924 return self.represent(expression,field_type)
925 elif isinstance(expression,(list,tuple)):
926 return ','.join([self.represent(item,field_type) for item in expression])
927 else:
928 return str(expression)
929
930 - def alias(self,table,alias):
931 """
932 given a table object, makes a new table object
933 with alias name.
934 """
935 other = copy.copy(table)
936 other['_ot'] = other._tablename
937 other['ALL'] = SQLALL(other)
938 other['_tablename'] = alias
939 for fieldname in other.fields:
940 other[fieldname] = copy.copy(other[fieldname])
941 other[fieldname]._tablename = alias
942 other[fieldname].tablename = alias
943 other[fieldname].table = other
944 table._db[alias] = table
945 return other
946
948 tablename = table._tablename
949 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
950
952 if table._dbt:
953 logfile = self.file_open(table._loggername, 'a')
954 queries = table._db._adapter._truncate(table, mode)
955 for query in queries:
956 if table._dbt:
957 logfile.write(query + '\n')
958 self.execute(query)
959 table._db.commit()
960 if table._dbt:
961 logfile.write('success!\n')
962
963 - def _update(self,tablename,query,fields):
964 if query:
965 sql_w = ' WHERE ' + self.expand(query)
966 else:
967 sql_w = ''
968 sql_v = ','.join(['%s=%s' % (field.name, self.expand(value,field.type)) for (field,value) in fields])
969 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
970
971 - def update(self,tablename,query,fields):
972 sql = self._update(tablename,query,fields)
973 self.execute(sql)
974 try:
975 return self.cursor.rowcount
976 except:
977 return None
978
979 - def _delete(self,tablename, query):
980 if query:
981 sql_w = ' WHERE ' + self.expand(query)
982 else:
983 sql_w = ''
984 return 'DELETE FROM %s%s;' % (tablename, sql_w)
985
986 - def delete(self,tablename,query):
987 sql = self._delete(tablename,query)
988
989 db = self.db
990 table = db[tablename]
991 if self.dbengine=='sqlite' and table._referenced_by:
992 deleted = [x[table._id.name] for x in db(query).select(table._id)]
993
994 self.execute(sql)
995 try:
996 counter = self.cursor.rowcount
997 except:
998 counter = None
999
1000 if self.dbengine=='sqlite' and counter:
1001 for tablename,fieldname in table._referenced_by:
1002 f = db[tablename][fieldname]
1003 if f.type=='reference '+table._tablename and f.ondelete=='CASCADE':
1004 db(db[tablename][fieldname].belongs(deleted)).delete()
1005
1006 return counter
1007
1009 tablenames = self.tables(query)
1010 if len(tablenames)==1:
1011 return tablenames[0]
1012 elif len(tablenames)<1:
1013 raise RuntimeError, "No table selected"
1014 else:
1015 raise RuntimeError, "Too many tables selected"
1016
1017 - def _select(self, query, fields, attributes):
1018 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
1019 'required','cache','left',
1020 'distinct','having', 'join')):
1021 raise SyntaxError, 'invalid select attribute: %s' % key
1022
1023 new_fields = []
1024 for item in fields:
1025 if isinstance(item,SQLALL):
1026 new_fields += item.table
1027 else:
1028 new_fields.append(item)
1029 fields = new_fields
1030 tablenames = self.tables(query)
1031 query = self.filter_tenant(query,tablenames)
1032 if not fields:
1033 for table in tablenames:
1034 for field in self.db[table]:
1035 fields.append(field)
1036 else:
1037 for field in fields:
1038 if isinstance(field,basestring) and table_field.match(field):
1039 tn,fn = field.split('.')
1040 field = self.db[tn][fn]
1041 for tablename in self.tables(field):
1042 if not tablename in tablenames:
1043 tablenames.append(tablename)
1044 if len(tablenames) < 1:
1045 raise SyntaxError, 'Set: no tables selected'
1046 sql_f = ', '.join([self.expand(f) for f in fields])
1047 self._colnames = [c.strip() for c in sql_f.split(', ')]
1048 if query:
1049 sql_w = ' WHERE ' + self.expand(query)
1050 else:
1051 sql_w = ''
1052 sql_o = ''
1053 sql_s = ''
1054 left = attributes.get('left', False)
1055 inner_join = attributes.get('join', False)
1056 distinct = attributes.get('distinct', False)
1057 groupby = attributes.get('groupby', False)
1058 orderby = attributes.get('orderby', False)
1059 having = attributes.get('having', False)
1060 limitby = attributes.get('limitby', False)
1061 if distinct is True:
1062 sql_s += 'DISTINCT'
1063 elif distinct:
1064 sql_s += 'DISTINCT ON (%s)' % distinct
1065 if inner_join:
1066 icommand = self.JOIN()
1067 if not isinstance(inner_join, (tuple, list)):
1068 inner_join = [inner_join]
1069 ijoint = [t._tablename for t in inner_join if not isinstance(t,Expression)]
1070 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1071 ijoinont = [t.first._tablename for t in ijoinon]
1072 iexcluded = [t for t in tablenames if not t in ijoint + ijoinont]
1073 if left:
1074 join = attributes['left']
1075 command = self.LEFT_JOIN()
1076 if not isinstance(join, (tuple, list)):
1077 join = [join]
1078 joint = [t._tablename for t in join if not isinstance(t,Expression)]
1079 joinon = [t for t in join if isinstance(t, Expression)]
1080
1081 tables_to_merge={}
1082 [tables_to_merge.update(dict.fromkeys(self.tables(t))) for t in joinon]
1083 joinont = [t.first._tablename for t in joinon]
1084 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1085 important_tablenames = joint + joinont + tables_to_merge.keys()
1086 excluded = [t for t in tablenames if not t in important_tablenames ]
1087 if inner_join and not left:
1088 sql_t = ', '.join(iexcluded)
1089 for t in ijoinon:
1090 sql_t += ' %s %s' % (icommand, str(t))
1091 elif not inner_join and left:
1092 sql_t = ', '.join([ t for t in excluded + tables_to_merge.keys()])
1093 if joint:
1094 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1095 for t in joinon:
1096 sql_t += ' %s %s' % (command, str(t))
1097 elif inner_join and left:
1098 sql_t = ','.join([ t for t in excluded + tables_to_merge.keys() if t in iexcluded ])
1099 for t in ijoinon:
1100 sql_t += ' %s %s' % (icommand, str(t))
1101 if joint:
1102 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1103 for t in joinon:
1104 sql_t += ' %s %s' % (command, str(t))
1105 else:
1106 sql_t = ', '.join(tablenames)
1107 if groupby:
1108 if isinstance(groupby, (list, tuple)):
1109 groupby = xorify(groupby)
1110 sql_o += ' GROUP BY %s' % self.expand(groupby)
1111 if having:
1112 sql_o += ' HAVING %s' % attributes['having']
1113 if orderby:
1114 if isinstance(orderby, (list, tuple)):
1115 orderby = xorify(orderby)
1116 if str(orderby) == '<random>':
1117 sql_o += ' ORDER BY %s' % self.RANDOM()
1118 else:
1119 sql_o += ' ORDER BY %s' % self.expand(orderby)
1120 if limitby:
1121 if not orderby and tablenames:
1122 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in ((hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey) or [self.db[t]._id.name])])
1123
1124 return self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1125
1126 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1127 if limitby:
1128 (lmin, lmax) = limitby
1129 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1130 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1131
1132 - def select(self,query,fields,attributes):
1133 """
1134 Always returns a Rows object, even if it may be empty
1135 """
1136 def response(sql):
1137 self.execute(sql)
1138 return self.cursor.fetchall()
1139 sql = self._select(query,fields,attributes)
1140 if attributes.get('cache', None):
1141 (cache_model, time_expire) = attributes['cache']
1142 del attributes['cache']
1143 key = self.uri + '/' + sql
1144 key = (key<=200) and key or hashlib.md5(key).hexdigest()
1145 rows = cache_model(key, lambda: response(sql), time_expire)
1146 else:
1147 rows = response(sql)
1148 if isinstance(rows,tuple):
1149 rows = list(rows)
1150 limitby = attributes.get('limitby',None) or (0,)
1151 rows = self.rowslice(rows,limitby[0],None)
1152 return self.parse(rows,self._colnames)
1153
1154 - def _count(self,query,distinct=None):
1155 tablenames = self.tables(query)
1156 if query:
1157 sql_w = ' WHERE ' + self.expand(query)
1158 else:
1159 sql_w = ''
1160 sql_t = ','.join(tablenames)
1161 if distinct:
1162 if isinstance(distinct,(list,tuple)):
1163 distinct = xorify(distinct)
1164 sql_d = self.expand(distinct)
1165 return 'SELECT count(DISTINCT %s) FROM %s%s' % (sql_d, sql_t, sql_w)
1166 return 'SELECT count(*) FROM %s%s' % (sql_t, sql_w)
1167
1168 - def count(self,query,distinct=None):
1169 self.execute(self._count(query,distinct))
1170 return self.cursor.fetchone()[0]
1171
1172
1183
1185 return self.connection.commit()
1186
1189
1191 return self.connection.close()
1192
1195
1198
1201
1204
1207
1209 return '%s_%s__constraint' % (table,fieldname)
1210
1213
1215 self.db._lastsql = a[0]
1216 t0 = time.time()
1217 ret = self.cursor.execute(*a,**b)
1218 self.db._timings.append((a[0],time.time()-t0))
1219 return ret
1220
1223
1225 if isinstance(obj,CALLABLETYPES):
1226 obj = obj()
1227 if isinstance(fieldtype, SQLCustomType):
1228 return fieldtype.encoder(obj)
1229 if isinstance(obj, (Expression, Field)):
1230 return str(obj)
1231 if fieldtype.startswith('list:'):
1232 if not obj:
1233 obj = []
1234 if not isinstance(obj, (list, tuple)):
1235 obj = [obj]
1236 if isinstance(obj, (list, tuple)):
1237 obj = bar_encode(obj)
1238 if obj is None:
1239 return 'NULL'
1240 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
1241 return 'NULL'
1242 r = self.represent_exceptions(obj,fieldtype)
1243 if r != None:
1244 return r
1245 if fieldtype == 'boolean':
1246 if obj and not str(obj)[:1].upper() in ['F', '0']:
1247 return "'T'"
1248 else:
1249 return "'F'"
1250 if fieldtype == 'id' or fieldtype == 'integer':
1251 return str(int(obj))
1252 if fieldtype.startswith('decimal'):
1253 return str(obj)
1254 elif fieldtype.startswith('reference'):
1255 if fieldtype.find('.')>0:
1256 return repr(obj)
1257 elif isinstance(obj, (Row, Reference)):
1258 return str(obj['id'])
1259 return str(int(obj))
1260 elif fieldtype == 'double':
1261 return repr(float(obj))
1262 if isinstance(obj, unicode):
1263 obj = obj.encode(self.db_codec)
1264 if fieldtype == 'blob':
1265 obj = base64.b64encode(str(obj))
1266 elif fieldtype == 'date':
1267 if isinstance(obj, (datetime.date, datetime.datetime)):
1268 obj = obj.isoformat()[:10]
1269 else:
1270 obj = str(obj)
1271 elif fieldtype == 'datetime':
1272 if isinstance(obj, datetime.datetime):
1273 obj = obj.isoformat()[:19].replace('T',' ')
1274 elif isinstance(obj, datetime.date):
1275 obj = obj.isoformat()[:10]+' 00:00:00'
1276 else:
1277 obj = str(obj)
1278 elif fieldtype == 'time':
1279 if isinstance(obj, datetime.time):
1280 obj = obj.isoformat()[:10]
1281 else:
1282 obj = str(obj)
1283 if not isinstance(obj,str):
1284 obj = str(obj)
1285 try:
1286 obj.decode(self.db_codec)
1287 except:
1288 obj = obj.decode('latin1').encode(self.db_codec)
1289 return "'%s'" % obj.replace("'", "''")
1290
1293
1296
1299
1300 - def rowslice(self,rows,minimum=0,maximum=None):
1301 """ by default this function does nothing, overload when db does not do slicing """
1302 return rows
1303
1304 - def parse(self, rows, colnames, blob_decode=True):
1305 db = self.db
1306 virtualtables = []
1307 new_rows = []
1308 for (i,row) in enumerate(rows):
1309 new_row = Row()
1310 for j,colname in enumerate(colnames):
1311 value = row[j]
1312 if not table_field.match(colnames[j]):
1313 if not '_extra' in new_row:
1314 new_row['_extra'] = Row()
1315 new_row['_extra'][colnames[j]] = value
1316 select_as_parser = re.compile("\s+AS\s+(\S+)")
1317 new_column_name = select_as_parser.search(colnames[j])
1318 if not new_column_name is None:
1319 column_name = new_column_name.groups(0)
1320 setattr(new_row,column_name[0],value)
1321 continue
1322 (tablename, fieldname) = colname.split('.')
1323 table = db[tablename]
1324 field = table[fieldname]
1325 field_type = field.type
1326 if field.type != 'blob' and isinstance(value, str):
1327 try:
1328 value = value.decode(db._db_codec)
1329 except Exception:
1330 pass
1331 if isinstance(value, unicode):
1332 value = value.encode('utf-8')
1333 if not tablename in new_row:
1334 colset = new_row[tablename] = Row()
1335 virtualtables.append(tablename)
1336 else:
1337 colset = new_row[tablename]
1338
1339 if isinstance(field_type, SQLCustomType):
1340 colset[fieldname] = field_type.decoder(value)
1341
1342 elif not isinstance(field_type, str) or value is None:
1343 colset[fieldname] = value
1344 elif isinstance(field_type, str) and \
1345 field_type.startswith('reference'):
1346 referee = field_type[10:].strip()
1347 if not '.' in referee:
1348 colset[fieldname] = rid = Reference(value)
1349 (rid._table, rid._record) = (db[referee], None)
1350 else:
1351 colset[fieldname] = value
1352 elif field_type == 'boolean':
1353 if value == True or str(value)[:1].lower() == 't':
1354 colset[fieldname] = True
1355 else:
1356 colset[fieldname] = False
1357 elif field_type == 'date' \
1358 and (not isinstance(value, datetime.date)\
1359 or isinstance(value, datetime.datetime)):
1360 (y, m, d) = [int(x) for x in
1361 str(value)[:10].strip().split('-')]
1362 colset[fieldname] = datetime.date(y, m, d)
1363 elif field_type == 'time' \
1364 and not isinstance(value, datetime.time):
1365 time_items = [int(x) for x in
1366 str(value)[:8].strip().split(':')[:3]]
1367 if len(time_items) == 3:
1368 (h, mi, s) = time_items
1369 else:
1370 (h, mi, s) = time_items + [0]
1371 colset[fieldname] = datetime.time(h, mi, s)
1372 elif field_type == 'datetime'\
1373 and not isinstance(value, datetime.datetime):
1374 (y, m, d) = [int(x) for x in
1375 str(value)[:10].strip().split('-')]
1376 time_items = [int(x) for x in
1377 str(value)[11:19].strip().split(':')[:3]]
1378 if len(time_items) == 3:
1379 (h, mi, s) = time_items
1380 else:
1381 (h, mi, s) = time_items + [0]
1382 colset[fieldname] = datetime.datetime(y, m, d, h, mi, s)
1383 elif field_type == 'blob' and blob_decode:
1384 colset[fieldname] = base64.b64decode(str(value))
1385 elif field_type.startswith('decimal'):
1386 decimals = [int(x) for x in field_type[8:-1].split(',')][-1]
1387 if self.dbengine == 'sqlite':
1388 value = ('%.' + str(decimals) + 'f') % value
1389 if not isinstance(value, decimal.Decimal):
1390 value = decimal.Decimal(str(value))
1391 colset[fieldname] = value
1392 elif field_type.startswith('list:integer'):
1393 if not self.dbengine=='google:datastore':
1394 colset[fieldname] = bar_decode_integer(value)
1395 else:
1396 colset[fieldname] = value
1397 elif field_type.startswith('list:reference'):
1398 if not self.dbengine=='google:datastore':
1399 colset[fieldname] = bar_decode_integer(value)
1400 else:
1401 colset[fieldname] = value
1402 elif field_type.startswith('list:string'):
1403 if not self.dbengine=='google:datastore':
1404 colset[fieldname] = bar_decode_string(value)
1405 else:
1406 colset[fieldname] = value
1407 else:
1408 colset[fieldname] = value
1409 if field_type == 'id':
1410 id = colset[field.name]
1411 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
1412 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
1413 for (referee_table, referee_name) in \
1414 table._referenced_by:
1415 s = db[referee_table][referee_name]
1416 if not referee_table in colset:
1417
1418 colset[referee_table] = Set(db, s == id)
1419
1420
1421 colset['id'] = id
1422 new_rows.append(new_row)
1423 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
1424 for tablename in virtualtables:
1425 for item in db[tablename].virtualfields:
1426 try:
1427 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
1428 except KeyError:
1429
1430 pass
1431 return rowsobj
1432
1434 fieldname = self.db._request_tenant
1435 for tablename in tablenames:
1436 table = self.db[tablename]
1437 if fieldname in table:
1438 default = table[fieldname].default
1439 if default!=None:
1440 query = query&(table[fieldname]==default)
1441 return query
1442
1443
1444
1445
1446
1448
1450 return "web2py_extract('%s',%s)" % (what,self.expand(field))
1451
1452 @staticmethod
1454 table = {
1455 'year': (0, 4),
1456 'month': (5, 7),
1457 'day': (8, 10),
1458 'hour': (11, 13),
1459 'minute': (14, 16),
1460 'second': (17, 19),
1461 }
1462 try:
1463 (i, j) = table[lookup]
1464 return int(s[i:j])
1465 except:
1466 return None
1467
1468 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1469 credential_decoder=lambda x:x, driver_args={},
1470 adapter_args={}):
1471 self.db = db
1472 self.dbengine = "sqlite"
1473 self.uri = uri
1474 self.pool_size = pool_size
1475 self.folder = folder
1476 self.db_codec = db_codec
1477 self.find_or_make_work_folder()
1478 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1]
1479 if uri.startswith('sqlite:memory'):
1480 dbpath = ':memory:'
1481 else:
1482 dbpath = uri.split('://')[1]
1483 if dbpath[0] != '/':
1484 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1485 if not 'check_same_thread' in driver_args:
1486 driver_args['check_same_thread'] = False
1487 def connect(dbpath=dbpath, driver_args=driver_args):
1488 return sqlite3.Connection(dbpath, **driver_args)
1489 self.pool_connection(connect)
1490 self.cursor = self.connection.cursor()
1491 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1492
1494 tablename = table._tablename
1495 return ['DELETE FROM %s;' % tablename,
1496 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
1497
1500
1501
1503
1504 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1505 credential_decoder=lambda x:x, driver_args={},
1506 adapter_args={}):
1507 self.db = db
1508 self.dbengine = "sqlite"
1509 self.uri = uri
1510 self.pool_size = pool_size
1511 self.folder = folder
1512 self.db_codec = db_codec
1513 self.find_or_make_work_folder()
1514 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1]
1515 if uri.startswith('sqlite:memory'):
1516 dbpath = ':memory:'
1517 else:
1518 dbpath = uri.split('://')[1]
1519 if dbpath[0] != '/':
1520 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1521 def connect(dbpath=dbpath,driver_args=driver_args):
1522 return zxJDBC.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath),**driver_args)
1523 self.pool_connection(connect)
1524 self.cursor = self.connection.cursor()
1525 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1526
1529
1530
1532
1533 driver = globals().get('pymysql',None)
1534 maxcharlength = 255
1535 commit_on_alter_table = True
1536 support_distributed_transaction = True
1537 types = {
1538 'boolean': 'CHAR(1)',
1539 'string': 'VARCHAR(%(length)s)',
1540 'text': 'LONGTEXT',
1541 'password': 'VARCHAR(%(length)s)',
1542 'blob': 'LONGBLOB',
1543 'upload': 'VARCHAR(%(length)s)',
1544 'integer': 'INT',
1545 'double': 'DOUBLE',
1546 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1547 'date': 'DATE',
1548 'time': 'TIME',
1549 'datetime': 'DATETIME',
1550 'id': 'INT AUTO_INCREMENT NOT NULL',
1551 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1552 'list:integer': 'LONGTEXT',
1553 'list:string': 'LONGTEXT',
1554 'list:reference': 'LONGTEXT',
1555 }
1556
1559
1561 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1562
1563 - def _drop(self,table,mode):
1564
1565 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,'SET FOREIGN_KEY_CHECKS=1;']
1566
1569
1573
1576
1579
1581 return '; ALTER TABLE %s ADD ' % table
1582
1583 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1584 credential_decoder=lambda x:x, driver_args={},
1585 adapter_args={}):
1586 self.db = db
1587 self.dbengine = "mysql"
1588 self.uri = uri
1589 self.pool_size = pool_size
1590 self.folder = folder
1591 self.db_codec = db_codec
1592 self.find_or_make_work_folder()
1593 uri = uri.split('://')[1]
1594 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
1595 if not m:
1596 raise SyntaxError, \
1597 "Invalid URI string in DAL: %s" % self.uri
1598 user = credential_decoder(m.group('user'))
1599 if not user:
1600 raise SyntaxError, 'User required'
1601 password = credential_decoder(m.group('password'))
1602 if not password:
1603 password = ''
1604 host = m.group('host')
1605 if not host:
1606 raise SyntaxError, 'Host name required'
1607 db = m.group('db')
1608 if not db:
1609 raise SyntaxError, 'Database name required'
1610 port = int(m.group('port') or '3306')
1611 charset = m.group('charset') or 'utf8'
1612 driver_args.update(dict(db=db,
1613 user=credential_decoder(user),
1614 passwd=credential_decoder(password),
1615 host=host,
1616 port=port,
1617 charset=charset))
1618 def connect(driver_args=driver_args):
1619 return self.driver.connect(**driver_args)
1620 self.pool_connection(connect)
1621 self.cursor = self.connection.cursor()
1622 self.execute('SET FOREIGN_KEY_CHECKS=1;')
1623 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
1624
1626 self.execute('select last_insert_id();')
1627 return int(self.cursor.fetchone()[0])
1628
1629
1630 -class PostgreSQLAdapter(BaseAdapter):
1631
1632 support_distributed_transaction = True
1633 types = {
1634 'boolean': 'CHAR(1)',
1635 'string': 'VARCHAR(%(length)s)',
1636 'text': 'TEXT',
1637 'password': 'VARCHAR(%(length)s)',
1638 'blob': 'BYTEA',
1639 'upload': 'VARCHAR(%(length)s)',
1640 'integer': 'INTEGER',
1641 'double': 'FLOAT8',
1642 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1643 'date': 'DATE',
1644 'time': 'TIME',
1645 'datetime': 'TIMESTAMP',
1646 'id': 'SERIAL PRIMARY KEY',
1647 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1648 'list:integer': 'TEXT',
1649 'list:string': 'TEXT',
1650 'list:reference': 'TEXT',
1651 }
1652
1653 - def sequence_name(self,table):
1654 return '%s_id_Seq' % table
1655
1658
1661
1662 - def prepare(self,key):
1663 self.execute("PREPARE TRANSACTION '%s';" % key)
1664
1665 - def commit_prepared(self,key):
1666 self.execute("COMMIT PREPARED '%s';" % key)
1667
1668 - def rollback_prepared(self,key):
1669 self.execute("ROLLBACK PREPARED '%s';" % key)
1670
1671 - def create_sequence_and_triggers(self, query, table, **args):
1672
1673
1674
1675
1676 self.execute(query)
1677
1678 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1679 credential_decoder=lambda x:x, driver_args={},
1680 adapter_args={}):
1681 self.db = db
1682 self.dbengine = "postgres"
1683 self.uri = uri
1684 self.pool_size = pool_size
1685 self.folder = folder
1686 self.db_codec = db_codec
1687 self.find_or_make_work_folder()
1688 uri = uri.split('://')[1]
1689 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
1690 if not m:
1691 raise SyntaxError, "Invalid URI string in DAL"
1692 user = credential_decoder(m.group('user'))
1693 if not user:
1694 raise SyntaxError, 'User required'
1695 password = credential_decoder(m.group('password'))
1696 if not password:
1697 password = ''
1698 host = m.group('host')
1699 if not host:
1700 raise SyntaxError, 'Host name required'
1701 db = m.group('db')
1702 if not db:
1703 raise SyntaxError, 'Database name required'
1704 port = m.group('port') or '5432'
1705 sslmode = m.group('sslmode')
1706 if sslmode:
1707 msg = ("dbname='%s' user='%s' host='%s'"
1708 "port=%s password='%s' sslmode='%s'") \
1709 % (db, user, host, port, password, sslmode)
1710 else:
1711 msg = ("dbname='%s' user='%s' host='%s'"
1712 "port=%s password='%s'") \
1713 % (db, user, host, port, password)
1714 def connect(msg=msg,driver_args=driver_args):
1715 return psycopg2.connect(msg,**driver_args)
1716 self.pool_connection(connect)
1717 self.connection.set_client_encoding('UTF8')
1718 self.cursor = self.connection.cursor()
1719 self.execute('BEGIN;')
1720 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1721 self.execute("SET standard_conforming_strings=on;")
1722
1723 - def lastrowid(self,table):
1724 self.execute("select currval('%s')" % table._sequence_name)
1725 return int(self.cursor.fetchone()[0])
1726
1727 - def LIKE(self,first,second):
1728 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second,'string'))
1729
1730 - def STARTSWITH(self,first,second):
1731 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second+'%','string'))
1732
1733 - def ENDSWITH(self,first,second):
1734 return '(%s ILIKE %s)' % (self.expand(first),self.expand('%'+second,'string'))
1735
1736 - def CONTAINS(self,first,second):
1737 if first.type in ('string','text'):
1738 key = '%'+str(second).replace('%','%%')+'%'
1739 elif first.type.startswith('list:'):
1740 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
1741 return '(%s ILIKE %s)' % (self.expand(first),self.expand(key,'string'))
1742
1743 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
1744
1745 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1746 credential_decoder=lambda x:x, driver_args={},
1747 adapter_args={}):
1748 self.db = db
1749 self.dbengine = "postgres"
1750 self.uri = uri
1751 self.pool_size = pool_size
1752 self.folder = folder
1753 self.db_codec = db_codec
1754 self.find_or_make_work_folder()
1755 uri = uri.split('://')[1]
1756 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(uri)
1757 if not m:
1758 raise SyntaxError, "Invalid URI string in DAL"
1759 user = credential_decoder(m.group('user'))
1760 if not user:
1761 raise SyntaxError, 'User required'
1762 password = credential_decoder(m.group('password'))
1763 if not password:
1764 password = ''
1765 host = m.group('host')
1766 if not host:
1767 raise SyntaxError, 'Host name required'
1768 db = m.group('db')
1769 if not db:
1770 raise SyntaxError, 'Database name required'
1771 port = m.group('port') or '5432'
1772 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
1773 def connect(msg=msg,driver_args=driver_args):
1774 return zxJDBC.connect(*msg,**driver_args)
1775 self.pool_connection(connect)
1776 self.connection.set_client_encoding('UTF8')
1777 self.cursor = self.connection.cursor()
1778 self.execute('BEGIN;')
1779 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1780
1781
1783 commit_on_alter_table = False
1784 types = {
1785 'boolean': 'CHAR(1)',
1786 'string': 'VARCHAR2(%(length)s)',
1787 'text': 'CLOB',
1788 'password': 'VARCHAR2(%(length)s)',
1789 'blob': 'CLOB',
1790 'upload': 'VARCHAR2(%(length)s)',
1791 'integer': 'INT',
1792 'double': 'FLOAT',
1793 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1794 'date': 'DATE',
1795 'time': 'CHAR(8)',
1796 'datetime': 'DATE',
1797 'id': 'NUMBER PRIMARY KEY',
1798 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1799 'list:integer': 'CLOB',
1800 'list:string': 'CLOB',
1801 'list:reference': 'CLOB',
1802 }
1803
1805 return '%s_sequence' % tablename
1806
1808 return '%s_trigger' % tablename
1809
1811 return 'LEFT OUTER JOIN'
1812
1814 return 'dbms_random.value'
1815
1816 - def NOT_NULL(self,default,field_type):
1817 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
1818
1819 - def _drop(self,table,mode):
1822
1823 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1824 if limitby:
1825 (lmin, lmax) = limitby
1826 if len(sql_w) > 1:
1827 sql_w_row = sql_w + ' AND w_row > %i' % lmin
1828 else:
1829 sql_w_row = 'WHERE w_row > %i' % lmin
1830 return '%s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
1831 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1832
1838
1840 if fieldtype == 'blob':
1841 obj = base64.b64encode(str(obj))
1842 return ":CLOB('%s')" % obj
1843 elif fieldtype == 'date':
1844 if isinstance(obj, (datetime.date, datetime.datetime)):
1845 obj = obj.isoformat()[:10]
1846 else:
1847 obj = str(obj)
1848 return "to_date('%s','yyyy-mm-dd')" % obj
1849 elif fieldtype == 'datetime':
1850 if isinstance(obj, datetime.datetime):
1851 obj = obj.isoformat()[:19].replace('T',' ')
1852 elif isinstance(obj, datetime.date):
1853 obj = obj.isoformat()[:10]+' 00:00:00'
1854 else:
1855 obj = str(obj)
1856 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
1857 return None
1858
1859 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1860 credential_decoder=lambda x:x, driver_args={},
1861 adapter_args={}):
1862 self.db = db
1863 self.dbengine = "oracle"
1864 self.uri = uri
1865 self.pool_size = pool_size
1866 self.folder = folder
1867 self.db_codec = db_codec
1868 self.find_or_make_work_folder()
1869 uri = uri.split('://')[1]
1870 if not 'threaded' in driver_args:
1871 driver_args['threaded']=True
1872 def connect(uri=uri,driver_args=driver_args):
1873 return cx_Oracle.connect(uri,**driver_args)
1874 self.pool_connection(connect)
1875 self.cursor = self.connection.cursor()
1876 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1877 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1878 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
1879
1881 args = []
1882 i = 1
1883 while True:
1884 m = self.oracle_fix.match(command)
1885 if not m:
1886 break
1887 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
1888 args.append(m.group('clob')[6:-2].replace("''", "'"))
1889 i += 1
1890 return self.log_execute(command[:-1], args)
1891
1899
1904
1905
1907 types = {
1908 'boolean': 'BIT',
1909 'string': 'VARCHAR(%(length)s)',
1910 'text': 'TEXT',
1911 'password': 'VARCHAR(%(length)s)',
1912 'blob': 'IMAGE',
1913 'upload': 'VARCHAR(%(length)s)',
1914 'integer': 'INT',
1915 'double': 'FLOAT',
1916 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1917 'date': 'DATETIME',
1918 'time': 'CHAR(8)',
1919 'datetime': 'DATETIME',
1920 'id': 'INT IDENTITY PRIMARY KEY',
1921 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1922 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1923 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
1924 'list:integer': 'TEXT',
1925 'list:string': 'TEXT',
1926 'list:reference': 'TEXT',
1927 }
1928
1930 return "DATEPART('%s' FROM %s)" % (what, self.expand(field))
1931
1933 return 'LEFT OUTER JOIN'
1934
1937
1940
1942 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1943
1945 return 'PRIMARY KEY CLUSTERED (%s)' % key
1946
1947 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1948 if limitby:
1949 (lmin, lmax) = limitby
1950 sql_s += ' TOP %i' % lmax
1951 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1952
1954 if fieldtype == 'boolean':
1955 if obj and not str(obj)[0].upper() == 'F':
1956 return '1'
1957 else:
1958 return '0'
1959 return None
1960
1961 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1962 credential_decoder=lambda x:x, driver_args={},
1963 adapter_args={}, fake_connect=False):
1964 self.db = db
1965 self.dbengine = "mssql"
1966 self.uri = uri
1967 self.pool_size = pool_size
1968 self.folder = folder
1969 self.db_codec = db_codec
1970 self.find_or_make_work_folder()
1971
1972 uri = uri.split('://')[1]
1973 if '@' not in uri:
1974 try:
1975 m = re.compile('^(?P<dsn>.+)$').match(uri)
1976 if not m:
1977 raise SyntaxError, \
1978 'Parsing uri string(%s) has no result' % self.uri
1979 dsn = m.group('dsn')
1980 if not dsn:
1981 raise SyntaxError, 'DSN required'
1982 except SyntaxError, e:
1983 logger.error('NdGpatch error')
1984 raise e
1985 cnxn = 'DSN=%s' % dsn
1986 else:
1987 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(uri)
1988 if not m:
1989 raise SyntaxError, \
1990 "Invalid URI string in DAL: %s" % uri
1991 user = credential_decoder(m.group('user'))
1992 if not user:
1993 raise SyntaxError, 'User required'
1994 password = credential_decoder(m.group('password'))
1995 if not password:
1996 password = ''
1997 host = m.group('host')
1998 if not host:
1999 raise SyntaxError, 'Host name required'
2000 db = m.group('db')
2001 if not db:
2002 raise SyntaxError, 'Database name required'
2003 port = m.group('port') or '1433'
2004
2005
2006
2007 argsdict = { 'DRIVER':'{SQL Server}' }
2008 urlargs = m.group('urlargs') or ''
2009 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
2010 for argmatch in argpattern.finditer(urlargs):
2011 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
2012 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()])
2013 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
2014 % (host, port, db, user, password, urlargs)
2015 def connect(cnxn=cnxn,driver_args=driver_args):
2016 return pyodbc.connect(cnxn,**driver_args)
2017 if not fake_connect:
2018 self.pool_connection(connect)
2019 self.cursor = self.connection.cursor()
2020
2022
2023 self.execute('SELECT SCOPE_IDENTITY();')
2024 return int(self.cursor.fetchone()[0])
2025
2028
2029 - def rowslice(self,rows,minimum=0,maximum=None):
2030 if maximum is None:
2031 return rows[minimum:]
2032 return rows[minimum:maximum]
2033
2034
2036 types = {
2037 'boolean': 'CHAR(1)',
2038 'string': 'NVARCHAR(%(length)s)',
2039 'text': 'NTEXT',
2040 'password': 'NVARCHAR(%(length)s)',
2041 'blob': 'IMAGE',
2042 'upload': 'NVARCHAR(%(length)s)',
2043 'integer': 'INT',
2044 'double': 'FLOAT',
2045 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2046 'date': 'DATETIME',
2047 'time': 'CHAR(8)',
2048 'datetime': 'DATETIME',
2049 'id': 'INT IDENTITY PRIMARY KEY',
2050 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2051 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2052 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2053 'list:integer': 'NTEXT',
2054 'list:string': 'NTEXT',
2055 'list:reference': 'NTEXT',
2056 }
2057
2059 value = BaseAdapter.represent(self, obj, fieldtype)
2060 if fieldtype == 'string' or fieldtype == 'text' and value[:1]=="'":
2061 value = 'N'+value
2062 return value
2063
2066
2067
2069
2070 commit_on_alter_table = False
2071 support_distributed_transaction = True
2072 types = {
2073 'boolean': 'CHAR(1)',
2074 'string': 'VARCHAR(%(length)s)',
2075 'text': 'BLOB SUB_TYPE 1',
2076 'password': 'VARCHAR(%(length)s)',
2077 'blob': 'BLOB SUB_TYPE 0',
2078 'upload': 'VARCHAR(%(length)s)',
2079 'integer': 'INTEGER',
2080 'double': 'DOUBLE PRECISION',
2081 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
2082 'date': 'DATE',
2083 'time': 'TIME',
2084 'datetime': 'TIMESTAMP',
2085 'id': 'INTEGER PRIMARY KEY',
2086 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2087 'list:integer': 'BLOB SUB_TYPE 1',
2088 'list:string': 'BLOB SUB_TYPE 1',
2089 'list:reference': 'BLOB SUB_TYPE 1',
2090 }
2091
2093 return 'genid_%s' % tablename
2094
2096 return 'trg_id_%s' % tablename
2097
2100
2101 - def NOT_NULL(self,default,field_type):
2102 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2103
2105 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
2106
2107 - def _drop(self,table,mode):
2110
2111 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2112 if limitby:
2113 (lmin, lmax) = limitby
2114 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin)
2115 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2116
2118 return ['DELETE FROM %s;' % table._tablename,
2119 'SET GENERATOR %s TO 0;' % table._sequence_name]
2120
2121 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2122 credential_decoder=lambda x:x, driver_args={},
2123 adapter_args={}):
2124 self.db = db
2125 self.dbengine = "firebird"
2126 self.uri = uri
2127 self.pool_size = pool_size
2128 self.folder = folder
2129 self.db_codec = db_codec
2130 self.find_or_make_work_folder()
2131 uri = uri.split('://')[1]
2132 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2133 if not m:
2134 raise SyntaxError, "Invalid URI string in DAL: %s" % uri
2135 user = credential_decoder(m.group('user'))
2136 if not user:
2137 raise SyntaxError, 'User required'
2138 password = credential_decoder(m.group('password'))
2139 if not password:
2140 password = ''
2141 host = m.group('host')
2142 if not host:
2143 raise SyntaxError, 'Host name required'
2144 port = int(m.group('port') or 3050)
2145 db = m.group('db')
2146 if not db:
2147 raise SyntaxError, 'Database name required'
2148 charset = m.group('charset') or 'UTF8'
2149 driver_args.update(dict(dsn='%s/%s:%s' % (host,port,db),
2150 user = credential_decoder(user),
2151 password = credential_decoder(password),
2152 charset = charset))
2153 def connect(driver_args=driver_args, adapter_args=adapter_args):
2154 if adapter_args.has_key('driver_name'):
2155 if adapter_args['driver_name'] == 'kinterbasdb':
2156 conn = kinterbasdb.connect(**driver_args)
2157 elif adapter_args['driver_name'] == 'firebirdsql':
2158 conn = firebirdsql.connect(**driver_args)
2159 else:
2160 conn = kinterbasdb.connect(**driver_args)
2161
2162 return conn
2163
2164 self.pool_connection(connect)
2165
2166 self.cursor = self.connection.cursor()
2167
2176
2181
2182
2184
2185 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2186 credential_decoder=lambda x:x, driver_args={},
2187 adapter_args={}):
2188 self.db = db
2189 self.dbengine = "firebird"
2190 self.uri = uri
2191 self.pool_size = pool_size
2192 self.folder = folder
2193 self.db_codec = db_codec
2194 self.find_or_make_work_folder()
2195 uri = uri.split('://')[1]
2196 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2197 if not m:
2198 raise SyntaxError, \
2199 "Invalid URI string in DAL: %s" % self.uri
2200 user = credential_decoder(m.group('user'))
2201 if not user:
2202 raise SyntaxError, 'User required'
2203 password = credential_decoder(m.group('password'))
2204 if not password:
2205 password = ''
2206 pathdb = m.group('path')
2207 if not pathdb:
2208 raise SyntaxError, 'Path required'
2209 charset = m.group('charset')
2210 if not charset:
2211 charset = 'UTF8'
2212 host = ''
2213 driver_args.update(dict(host=host,
2214 database=pathdb,
2215 user=credential_decoder(user),
2216 password=credential_decoder(password),
2217 charset=charset))
2218
2219
2220 def connect(driver_args=driver_args, adapter_args=adapter_args):
2221 if adapter_args.has_key('driver_name'):
2222 if adapter_args['driver_name'] == 'kinterbasdb':
2223 conn = kinterbasdb.connect(**driver_args)
2224 elif adapter_args['driver_name'] == 'firebirdsql':
2225 conn = firebirdsql.connect(**driver_args)
2226 else:
2227 conn = kinterbasdb.connect(**driver_args)
2228
2229 return conn
2230
2231 self.pool_connection(connect)
2232
2233 self.cursor = self.connection.cursor()
2234
2235
2330
2335
2338
2341
2342
2344 types = {
2345 'boolean': 'CHAR(1)',
2346 'string': 'VARCHAR(%(length)s)',
2347 'text': 'CLOB',
2348 'password': 'VARCHAR(%(length)s)',
2349 'blob': 'BLOB',
2350 'upload': 'VARCHAR(%(length)s)',
2351 'integer': 'INT',
2352 'double': 'DOUBLE',
2353 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2354 'date': 'DATE',
2355 'time': 'TIME',
2356 'datetime': 'TIMESTAMP',
2357 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2358 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2359 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2360 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2361 'list:integer': 'CLOB',
2362 'list:string': 'CLOB',
2363 'list:reference': 'CLOB',
2364 }
2365
2367 return 'LEFT OUTER JOIN'
2368
2371
2372 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2373 if limitby:
2374 (lmin, lmax) = limitby
2375 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
2376 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2377
2379 if fieldtype == 'blob':
2380 obj = base64.b64encode(str(obj))
2381 return "BLOB('%s')" % obj
2382 elif fieldtype == 'datetime':
2383 if isinstance(obj, datetime.datetime):
2384 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
2385 elif isinstance(obj, datetime.date):
2386 obj = obj.isoformat()[:10]+'-00.00.00'
2387 return "'%s'" % obj
2388 return None
2389
2390 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2391 credential_decoder=lambda x:x, driver_args={},
2392 adapter_args={}):
2393 self.db = db
2394 self.dbengine = "db2"
2395 self.uri = uri
2396 self.pool_size = pool_size
2397 self.folder = folder
2398 self.db_codec = db_codec
2399 self.find_or_make_work_folder()
2400 cnxn = uri.split('://', 1)[1]
2401 def connect(cnxn=cnxn,driver_args=driver_args):
2402 return pyodbc.connect(cnxn,**driver_args)
2403 self.pool_connection(connect)
2404 self.cursor = self.connection.cursor()
2405
2407 if command[-1:]==';':
2408 command = command[:-1]
2409 return self.log_execute(command)
2410
2412 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
2413 return int(self.cursor.fetchone()[0])
2414
2415 - def rowslice(self,rows,minimum=0,maximum=None):
2416 if maximum is None:
2417 return rows[minimum:]
2418 return rows[minimum:maximum]
2419
2420
2421 INGRES_SEQNAME='ii***lineitemsequence'
2422
2423
2424
2426
2427 types = {
2428 'boolean': 'CHAR(1)',
2429 'string': 'VARCHAR(%(length)s)',
2430 'text': 'CLOB',
2431 'password': 'VARCHAR(%(length)s)',
2432 'blob': 'BLOB',
2433 'upload': 'VARCHAR(%(length)s)',
2434 'integer': 'INTEGER4',
2435 'double': 'FLOAT8',
2436 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2437 'date': 'ANSIDATE',
2438 'time': 'TIME WITHOUT TIME ZONE',
2439 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2440 'id': 'integer4 not null unique with default next value for %s' % INGRES_SEQNAME,
2441 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2442 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2443 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2444 'list:integer': 'CLOB',
2445 'list:string': 'CLOB',
2446 'list:reference': 'CLOB',
2447 }
2448
2450 return 'LEFT OUTER JOIN'
2451
2454
2455 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2456 if limitby:
2457 (lmin, lmax) = limitby
2458 fetch_amt = lmax - lmin
2459 if fetch_amt:
2460 sql_s += ' FIRST %d ' % (fetch_amt, )
2461 if lmin:
2462
2463 sql_o += ' OFFSET %d' % (lmin, )
2464 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2465
2466 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2467 credential_decoder=lambda x:x, driver_args={},
2468 adapter_args={}):
2469 self.db = db
2470 self.dbengine = "ingres"
2471 self.uri = uri
2472 self.pool_size = pool_size
2473 self.folder = folder
2474 self.db_codec = db_codec
2475 self.find_or_make_work_folder()
2476 connstr = self._uri.split(':', 1)[1]
2477
2478 connstr = connstr.lstrip()
2479 while connstr.startswith('/'):
2480 connstr = connstr[1:]
2481 database_name=connstr
2482 vnode = '(local)'
2483 servertype = 'ingres'
2484 trace = (0, None)
2485 driver_args.update(dict(database=database_name,
2486 vnode=vnode,
2487 servertype=servertype,
2488 trace=trace))
2489 def connect(driver_args=driver_args):
2490 return ingresdbi.connect(**driver_args)
2491 self.pool_connection(connect)
2492 self.cursor = self.connection.cursor()
2493
2495
2496
2497
2498 if hasattr(table,'_primarykey'):
2499 modify_tbl_sql = 'modify %s to btree unique on %s' % \
2500 (table._tablename,
2501 ', '.join(["'%s'" % x for x in table.primarykey]))
2502 self.execute(modify_tbl_sql)
2503 else:
2504 tmp_seqname='%s_iisq' % table._tablename
2505 query=query.replace(INGRES_SEQNAME, tmp_seqname)
2506 self.execute('create sequence %s' % tmp_seqname)
2507 self.execute(query)
2508 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
2509
2510
2512 tmp_seqname='%s_iisq' % table
2513 self.execute('select current value for %s' % tmp_seqname)
2514 return int(self.cursor.fetchone()[0])
2515
2518
2519
2521 types = {
2522 'boolean': 'CHAR(1)',
2523 'string': 'NVARCHAR(%(length)s)',
2524 'text': 'NCLOB',
2525 'password': 'NVARCHAR(%(length)s)',
2526 'blob': 'BLOB',
2527 'upload': 'VARCHAR(%(length)s)',
2528 'integer': 'INTEGER4',
2529 'double': 'FLOAT8',
2530 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2531 'date': 'ANSIDATE',
2532 'time': 'TIME WITHOUT TIME ZONE',
2533 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2534 'id': 'integer4 not null unique with default next value for %s'% INGRES_SEQNAME,
2535 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2536 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2537 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2538 'list:integer': 'NCLOB',
2539 'list:string': 'NCLOB',
2540 'list:reference': 'NCLOB',
2541 }
2542
2544
2545 support_distributed_transaction = False
2546 types = {
2547 'boolean': 'CHAR(1)',
2548 'string': 'VARCHAR(%(length)s)',
2549 'text': 'LONG',
2550 'password': 'VARCHAR(%(length)s)',
2551 'blob': 'LONG',
2552 'upload': 'VARCHAR(%(length)s)',
2553 'integer': 'INT',
2554 'double': 'FLOAT',
2555 'decimal': 'FIXED(%(precision)s,%(scale)s)',
2556 'date': 'DATE',
2557 'time': 'TIME',
2558 'datetime': 'TIMESTAMP',
2559 'id': 'INT PRIMARY KEY',
2560 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2561 'list:integer': 'LONG',
2562 'list:string': 'LONG',
2563 'list:reference': 'LONG',
2564 }
2565
2567 return '%s_id_Seq' % table
2568
2569 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2570 if limitby:
2571 (lmin, lmax) = limitby
2572 if len(sql_w) > 1:
2573 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2574 else:
2575 sql_w_row = 'WHERE w_row > %i' % lmin
2576 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2577 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2578
2580
2581 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
2582 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
2583 % (table._tablename, table._id.name, table._sequence_name))
2584 self.execute(query)
2585
2586 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2587 credential_decoder=lambda x:x, driver_args={},
2588 adapter_args={}):
2589 self.db = db
2590 self.dbengine = "sapdb"
2591 self.uri = uri
2592 self.pool_size = pool_size
2593 self.folder = folder
2594 self.db_codec = db_codec
2595 self.find_or_make_work_folder()
2596 uri = uri.split('://')[1]
2597 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
2598 if not m:
2599 raise SyntaxError, "Invalid URI string in DAL"
2600 user = credential_decoder(m.group('user'))
2601 if not user:
2602 raise SyntaxError, 'User required'
2603 password = credential_decoder(m.group('password'))
2604 if not password:
2605 password = ''
2606 host = m.group('host')
2607 if not host:
2608 raise SyntaxError, 'Host name required'
2609 db = m.group('db')
2610 if not db:
2611 raise SyntaxError, 'Database name required'
2612 def connect(user=user,password=password,database=db,host=host,driver_args=driver_args):
2613 return sapdb.Connection(user,password,database,host,**driver_args)
2614 self.pool_connection(connect)
2615
2616 self.cursor = self.connection.cursor()
2617
2619 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
2620 return int(self.cursor.fetchone()[0])
2621
2623
2624 driver = globals().get('cubriddb',None)
2625
2626 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2627 credential_decoder=lambda x:x, driver_args={},
2628 adapter_args={}):
2629 self.db = db
2630 self.dbengine = "cubrid"
2631 self.uri = uri
2632 self.pool_size = pool_size
2633 self.folder = folder
2634 self.db_codec = db_codec
2635 self.find_or_make_work_folder()
2636 uri = uri.split('://')[1]
2637 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2638 if not m:
2639 raise SyntaxError, \
2640 "Invalid URI string in DAL: %s" % self.uri
2641 user = credential_decoder(m.group('user'))
2642 if not user:
2643 raise SyntaxError, 'User required'
2644 password = credential_decoder(m.group('password'))
2645 if not password:
2646 password = ''
2647 host = m.group('host')
2648 if not host:
2649 raise SyntaxError, 'Host name required'
2650 db = m.group('db')
2651 if not db:
2652 raise SyntaxError, 'Database name required'
2653 port = int(m.group('port') or '30000')
2654 charset = m.group('charset') or 'utf8'
2655 user=credential_decoder(user),
2656 passwd=credential_decoder(password),
2657 def connect(host,port,db,user,passwd,driver_args=driver_args):
2658 return self.driver.connect(host,port,db,user,passwd,**driver_args)
2659 self.pool_connection(connect)
2660 self.cursor = self.connection.cursor()
2661 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2662 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2663
2664
2665
2666
2668
2669 web2py_filesystem = False
2670
2672 if db._adapter.dbengine != 'mysql':
2673 raise RuntimeError, "only MySQL can store metadata .table files in database for now"
2674 self.db = db
2675 self.filename = filename
2676 self.mode = mode
2677 if not self.web2py_filesystem:
2678 self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
2679 DatabaseStoredFile.web2py_filesystem = True
2680 self.p=0
2681 self.data = ''
2682 if mode in ('r','rw','a'):
2683 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" % filename
2684 rows = self.db.executesql(query)
2685 if rows:
2686 self.data = rows[0][0]
2687 elif os.path.exists(filename):
2688 self.data = open(filename,'r').read()
2689 elif mode in ('r','rw'):
2690 raise RuntimeError, "File %s does not exist" % filename
2691
2692 - def read(self, bytes):
2693 data = self.data[self.p:self.p+bytes]
2694 self.p += len(data)
2695 return data
2696
2698 i = self.data.find('\n',self.p)+1
2699 if i>0:
2700 data, self.p = self.data[self.p:i], i
2701 else:
2702 data, self.p = self.data[self.p:], len(self.data)
2703 return data
2704
2707
2709 self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
2710 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')" % \
2711 (self.filename, self.data.replace("'","''"))
2712 self.db.executesql(query)
2713 self.db.commit()
2714
2715 @staticmethod
2717 if os.path.exists(filename):
2718 return True
2719 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
2720 if db.executesql(query):
2721 return True
2722 return False
2723
2724
2726
2729
2730 - def file_open(self, filename, mode='rb', lock=True):
2732
2735
2737 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
2738 self.db.executesql(query)
2739 self.db.commit()
2740
2742
2743 - def __init__(self, db, uri='google:sql://realm:domain/database', pool_size=0,
2744 folder=None, db_codec='UTF-8', check_reserved=None,
2745 migrate=True, fake_migrate=False,
2746 credential_decoder = lambda x:x, driver_args={},
2747 adapter_args={}):
2748
2749 self.db = db
2750 self.dbengine = "mysql"
2751 self.uri = uri
2752 self.pool_size = pool_size
2753 self.folder = folder
2754 self.db_codec = db_codec
2755 self.folder = folder or '$HOME/'+thread.folder.split('/applications/',1)[1]
2756
2757 m = re.compile('^(?P<instance>.*)/(?P<db>.*)$').match(self.uri[len('google:sql://'):])
2758 if not m:
2759 raise SyntaxError, "Invalid URI string in SQLDB: %s" % self._uri
2760 instance = credential_decoder(m.group('instance'))
2761 db = credential_decoder(m.group('db'))
2762 driver_args['instance'] = instance
2763 if not migrate:
2764 driver_args['database'] = db
2765 def connect(driver_args=driver_args):
2766 return rdbms.connect(**driver_args)
2767 self.pool_connection(connect)
2768 self.cursor = self.connection.cursor()
2769 if migrate:
2770
2771 self.execute('CREATE DATABASE IF NOT EXISTS %s' % db)
2772 self.execute('USE %s' % db)
2773 self.execute("SET FOREIGN_KEY_CHECKS=1;")
2774 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2775
2777
2779 if isinstance(obj,CALLABLETYPES):
2780 obj = obj()
2781 if isinstance(fieldtype, SQLCustomType):
2782 return fieldtype.encoder(obj)
2783 if isinstance(obj, (Expression, Field)):
2784 raise SyntaxError, "non supported on GAE"
2785 if self.dbengine=='google:datastore' in globals():
2786 if isinstance(fieldtype, gae.Property):
2787 return obj
2788 if fieldtype.startswith('list:'):
2789 if not obj:
2790 obj = []
2791 if not isinstance(obj, (list, tuple)):
2792 obj = [obj]
2793 if obj == '' and not fieldtype[:2] in ['st','te','pa','up']:
2794 return None
2795 if obj != None:
2796 if isinstance(obj, list) and not fieldtype.startswith('list'):
2797 obj = [self.represent(o, fieldtype) for o in obj]
2798 elif fieldtype in ('integer','id'):
2799 obj = long(obj)
2800 elif fieldtype == 'double':
2801 obj = float(obj)
2802 elif fieldtype.startswith('reference'):
2803 if isinstance(obj, (Row, Reference)):
2804 obj = obj['id']
2805 obj = long(obj)
2806 elif fieldtype == 'boolean':
2807 if obj and not str(obj)[0].upper() == 'F':
2808 obj = True
2809 else:
2810 obj = False
2811 elif fieldtype == 'date':
2812 if not isinstance(obj, datetime.date):
2813 (y, m, d) = [int(x) for x in str(obj).strip().split('-')]
2814 obj = datetime.date(y, m, d)
2815 elif isinstance(obj,datetime.datetime):
2816 (y, m, d) = (obj.year, obj.month, obj.day)
2817 obj = datetime.date(y, m, d)
2818 elif fieldtype == 'time':
2819 if not isinstance(obj, datetime.time):
2820 time_items = [int(x) for x in str(obj).strip().split(':')[:3]]
2821 if len(time_items) == 3:
2822 (h, mi, s) = time_items
2823 else:
2824 (h, mi, s) = time_items + [0]
2825 obj = datetime.time(h, mi, s)
2826 elif fieldtype == 'datetime':
2827 if not isinstance(obj, datetime.datetime):
2828 (y, m, d) = [int(x) for x in str(obj)[:10].strip().split('-')]
2829 time_items = [int(x) for x in str(obj)[11:].strip().split(':')[:3]]
2830 while len(time_items)<3:
2831 time_items.append(0)
2832 (h, mi, s) = time_items
2833 obj = datetime.datetime(y, m, d, h, mi, s)
2834 elif fieldtype == 'blob':
2835 pass
2836 elif fieldtype.startswith('list:string'):
2837 if obj!=None and not isinstance(obj,(list,tuple)):
2838 obj=[obj]
2839 return [str(x) for x in obj]
2840 elif fieldtype.startswith('list:'):
2841 if obj!=None and not isinstance(obj,(list,tuple)):
2842 obj=[obj]
2843 return [int(x) for x in obj]
2844 elif isinstance(obj, str):
2845 obj = obj.decode('utf8')
2846 elif not isinstance(obj, unicode):
2847 obj = unicode(obj)
2848 return obj
2849
2851 return 'insert %s in %s' % (fields, table)
2852
2853 - def _count(self,query,distinct=None):
2854 return 'count %s' % repr(query)
2855
2856 - def _select(self,query,fields,attributes):
2857 return 'select %s where %s' % (repr(fields), repr(query))
2858
2859 - def _delete(self,tablename, query):
2860 return 'delete %s where %s' % (repr(tablename),repr(query))
2861
2862 - def _update(self,tablename,query,fields):
2863 return 'update %s (%s) where %s' % (repr(tablename),
2864 repr(fields),repr(query))
2865
2867 """
2868 remember: no transactions on many NoSQL
2869 """
2870 pass
2871
2873 """
2874 remember: no transactions on many NoSQL
2875 """
2876 pass
2877
2879 """
2880 remember: no transactions on many NoSQL
2881 """
2882 pass
2883
2884
2885
2886 - def OR(self,first,second): raise SyntaxError, "Not supported"
2887 - def AND(self,first,second): raise SyntaxError, "Not supported"
2888 - def AS(self,first,second): raise SyntaxError, "Not supported"
2889 - def ON(self,first,second): raise SyntaxError, "Not supported"
2890 - def STARTSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2891 - def ENDSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2892 - def ADD(self,first,second): raise SyntaxError, "Not supported"
2893 - def SUB(self,first,second): raise SyntaxError, "Not supported"
2894 - def MUL(self,first,second): raise SyntaxError, "Not supported"
2895 - def DIV(self,first,second): raise SyntaxError, "Not supported"
2896 - def LOWER(self,first): raise SyntaxError, "Not supported"
2897 - def UPPER(self,first): raise SyntaxError, "Not supported"
2899 - def AGGREGATE(self,first,what): raise SyntaxError, "Not supported"
2900 - def LEFT_JOIN(self): raise SyntaxError, "Not supported"
2901 - def RANDOM(self): raise SyntaxError, "Not supported"
2902 - def SUBSTRING(self,field,parameters): raise SyntaxError, "Not supported"
2903 - def PRIMARY_KEY(self,key): raise SyntaxError, "Not supported"
2904 - def LIKE(self,first,second): raise SyntaxError, "Not supported"
2905 - def drop(self,table,mode): raise SyntaxError, "Not supported"
2906 - def alias(self,table,alias): raise SyntaxError, "Not supported"
2907 - def migrate_table(self,*a,**b): raise SyntaxError, "Not supported"
2909 - def prepare(self,key): raise SyntaxError, "Not supported"
2912 - def concat_add(self,table): raise SyntaxError, "Not supported"
2913 - def constraint_name(self, table, fieldname): raise SyntaxError, "Not supported"
2915 - def log_execute(self,*a,**b): raise SyntaxError, "Not supported"
2916 - def execute(self,*a,**b): raise SyntaxError, "Not supported"
2918 - def lastrowid(self,table): raise SyntaxError, "Not supported"
2920 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError, "Not supported"
2921
2922
2923 -class GAEF(object):
2924 - def __init__(self,name,op,value,apply):
2925 self.name=name=='id' and '__key__' or name
2926 self.op=op
2927 self.value=value
2928 self.apply=apply
2930 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
2931
2933 uploads_in_blob = True
2934 types = {}
2935
2937 - def file_open(self, filename, mode='rb', lock=True): pass
2938 - def file_close(self, fileobj, unlock=True): pass
2939
2940 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2941 credential_decoder=lambda x:x, driver_args={},
2942 adapter_args={}):
2943 self.types.update({
2944 'boolean': gae.BooleanProperty,
2945 'string': (lambda: gae.StringProperty(multiline=True)),
2946 'text': gae.TextProperty,
2947 'password': gae.StringProperty,
2948 'blob': gae.BlobProperty,
2949 'upload': gae.StringProperty,
2950 'integer': gae.IntegerProperty,
2951 'double': gae.FloatProperty,
2952 'decimal': GAEDecimalProperty,
2953 'date': gae.DateProperty,
2954 'time': gae.TimeProperty,
2955 'datetime': gae.DateTimeProperty,
2956 'id': None,
2957 'reference': gae.IntegerProperty,
2958 'list:string': (lambda: gae.StringListProperty(default=None)),
2959 'list:integer': (lambda: gae.ListProperty(int,default=None)),
2960 'list:reference': (lambda: gae.ListProperty(int,default=None)),
2961 })
2962 self.db = db
2963 self.uri = uri
2964 self.dbengine = 'google:datastore'
2965 self.folder = folder
2966 db['_lastsql'] = ''
2967 self.db_codec = 'UTF-8'
2968 self.pool_size = 0
2969 match = re.compile('.*://(?P<namespace>.+)').match(uri)
2970 if match:
2971 namespace_manager.set_namespace(match.group('namespace'))
2972
2973 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
2974 myfields = {}
2975 for k in table.fields:
2976 if isinstance(polymodel,Table) and k in polymodel.fields():
2977 continue
2978 field = table[k]
2979 attr = {}
2980 if isinstance(field.type, SQLCustomType):
2981 ftype = self.types[field.type.native or field.type.type](**attr)
2982 elif isinstance(field.type, gae.Property):
2983 ftype = field.type
2984 elif field.type.startswith('id'):
2985 continue
2986 elif field.type.startswith('decimal'):
2987 precision, scale = field.type[7:].strip('()').split(',')
2988 precision = int(precision)
2989 scale = int(scale)
2990 ftype = GAEDecimalProperty(precision, scale, **attr)
2991 elif field.type.startswith('reference'):
2992 if field.notnull:
2993 attr = dict(required=True)
2994 referenced = field.type[10:].strip()
2995 ftype = self.types[field.type[:9]](table._db[referenced])
2996 elif field.type.startswith('list:reference'):
2997 if field.notnull:
2998 attr = dict(required=True)
2999 referenced = field.type[15:].strip()
3000 ftype = self.types[field.type[:14]](**attr)
3001 elif field.type.startswith('list:'):
3002 ftype = self.types[field.type](**attr)
3003 elif not field.type in self.types\
3004 or not self.types[field.type]:
3005 raise SyntaxError, 'Field: unknown field type: %s' % field.type
3006 else:
3007 ftype = self.types[field.type](**attr)
3008 myfields[field.name] = ftype
3009 if not polymodel:
3010 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
3011 elif polymodel==True:
3012 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
3013 elif isinstance(polymodel,Table):
3014 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
3015 else:
3016 raise SyntaxError, "polymodel must be None, True, a table or a tablename"
3017 return None
3018
3019 - def expand(self,expression,field_type=None):
3020 if isinstance(expression,Field):
3021 if expression.type in ('text','blob'):
3022 raise SyntaxError, 'AppEngine does not index by: %s' % expression.type
3023 return expression.name
3024 elif isinstance(expression, (Expression, Query)):
3025 if not expression.second is None:
3026 return expression.op(expression.first, expression.second)
3027 elif not expression.first is None:
3028 return expression.op(expression.first)
3029 else:
3030 return expression.op()
3031 elif field_type:
3032 return self.represent(expression,field_type)
3033 elif isinstance(expression,(list,tuple)):
3034 return ','.join([self.represent(item,field_type) for item in expression])
3035 else:
3036 return str(expression)
3037
3038
3039 - def AND(self,first,second):
3045
3046 - def EQ(self,first,second=None):
3047 if isinstance(second, Key):
3048 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
3049 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
3050
3051 - def NE(self,first,second=None):
3052 if first.type != 'id':
3053 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
3054 else:
3055 second = Key.from_path(first._tablename, long(second))
3056 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
3057
3058 - def LT(self,first,second=None):
3059 if first.type != 'id':
3060 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
3061 else:
3062 second = Key.from_path(first._tablename, long(second))
3063 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
3064
3065 - def LE(self,first,second=None):
3066 if first.type != 'id':
3067 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
3068 else:
3069 second = Key.from_path(first._tablename, long(second))
3070 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
3071
3072 - def GT(self,first,second=None):
3073 if first.type != 'id' or second==0 or second == '0':
3074 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
3075 else:
3076 second = Key.from_path(first._tablename, long(second))
3077 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
3078
3079 - def GE(self,first,second=None):
3080 if first.type != 'id':
3081 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
3082 else:
3083 second = Key.from_path(first._tablename, long(second))
3084 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
3085
3088
3089 - def COMMA(self,first,second):
3091
3092 - def BELONGS(self,first,second=None):
3093 if not isinstance(second,(list, tuple)):
3094 raise SyntaxError, "Not supported"
3095 if first.type != 'id':
3096 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
3097 else:
3098 second = [Key.from_path(first._tablename, i) for i in second]
3099 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
3100
3105
3106 - def NOT(self,first):
3107 nops = { self.EQ: self.NE,
3108 self.NE: self.EQ,
3109 self.LT: self.GE,
3110 self.GT: self.LE,
3111 self.LE: self.GT,
3112 self.GE: self.LT}
3113 if not isinstance(first,Query):
3114 raise SyntaxError, "Not suported"
3115 nop = nops.get(first.op,None)
3116 if not nop:
3117 raise SyntaxError, "Not suported %s" % first.op.__name__
3118 first.op = nop
3119 return self.expand(first)
3120
3122 self.db(table.id > 0).delete()
3123
3124 - def select_raw(self,query,fields=[],attributes={}):
3125 new_fields = []
3126 for item in fields:
3127 if isinstance(item,SQLALL):
3128 new_fields += item.table
3129 else:
3130 new_fields.append(item)
3131 fields = new_fields
3132 if query:
3133 tablename = self.get_table(query)
3134 elif fields:
3135 tablename = fields[0].tablename
3136 query = fields[0].table._id>0
3137 else:
3138 raise SyntaxError, "Unable to determine a tablename"
3139 query = self.filter_tenant(query,[tablename])
3140 tableobj = self.db[tablename]._tableobj
3141 items = tableobj.all()
3142 filters = self.expand(query)
3143 for filter in filters:
3144 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
3145 continue
3146 elif filter.name=='__key__' and filter.op=='=':
3147 if filter.value==0:
3148 items = []
3149 elif isinstance(filter.value, Key):
3150 item = tableobj.get(filter.value)
3151 items = (item and [item]) or []
3152 else:
3153 item = tableobj.get_by_id(filter.value)
3154 items = (item and [item]) or []
3155 elif isinstance(items,list):
3156 items = [i for i in items if filter.apply(getattr(item,filter.name),
3157 filter.value)]
3158 else:
3159 if filter.name=='__key__': items.order('__key__')
3160 items = items.filter('%s %s' % (filter.name,filter.op),filter.value)
3161 if not isinstance(items,list):
3162 if attributes.get('left', None):
3163 raise SyntaxError, 'Set: no left join in appengine'
3164 if attributes.get('groupby', None):
3165 raise SyntaxError, 'Set: no groupby in appengine'
3166 orderby = attributes.get('orderby', False)
3167 if orderby:
3168
3169 if isinstance(orderby, (list, tuple)):
3170 orderby = xorify(orderby)
3171 if isinstance(orderby,Expression):
3172 orderby = self.expand(orderby)
3173 orders = orderby.split(', ')
3174 for order in orders:
3175 order={'-id':'-__key__','id':'__key__'}.get(order,order)
3176 items = items.order(order)
3177 if attributes.get('limitby', None):
3178 (lmin, lmax) = attributes['limitby']
3179 (limit, offset) = (lmax - lmin, lmin)
3180 items = items.fetch(limit, offset=offset)
3181 fields = self.db[tablename].fields
3182 return (items, tablename, fields)
3183
3184 - def select(self,query,fields,attributes):
3185 (items, tablename, fields) = self.select_raw(query,fields,attributes)
3186
3187 rows = [
3188 [t=='id' and int(item.key().id()) or getattr(item, t) for t in fields]
3189 for item in items]
3190 colnames = ['%s.%s' % (tablename, t) for t in fields]
3191 return self.parse(rows, colnames, False)
3192
3193
3194 - def count(self,query,distinct=None):
3195 if distinct:
3196 raise RuntimeError, "COUNT DISTINCT not supported"
3197 (items, tablename, fields) = self.select_raw(query)
3198
3199 try:
3200 return len(items)
3201 except TypeError:
3202 return items.count(limit=None)
3203
3204 - def delete(self,tablename, query):
3205 """
3206 This function was changed on 2010-05-04 because according to
3207 http://code.google.com/p/googleappengine/issues/detail?id=3119
3208 GAE no longer support deleting more than 1000 records.
3209 """
3210
3211 (items, tablename, fields) = self.select_raw(query)
3212
3213 if not isinstance(items,list):
3214 counter = items.count(limit=None)
3215 leftitems = items.fetch(1000)
3216 while len(leftitems):
3217 gae.delete(leftitems)
3218 leftitems = items.fetch(1000)
3219 else:
3220 counter = len(items)
3221 gae.delete(items)
3222 return counter
3223
3224 - def update(self,tablename,query,update_fields):
3225
3226 (items, tablename, fields) = self.select_raw(query)
3227 counter = 0
3228 for item in items:
3229 for field, value in update_fields:
3230 setattr(item, field.name, self.represent(value,field.type))
3231 item.put()
3232 counter += 1
3233 logger.info(str(counter))
3234 return counter
3235
3236 - def insert(self,table,fields):
3237 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
3238
3239 tmp = table._tableobj(**dfields)
3240 tmp.put()
3241 rid = Reference(tmp.key().id())
3242 (rid._table, rid._record) = (table, None)
3243 return rid
3244
3246 parsed_items = []
3247 for item in items:
3248 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
3249 parsed_items.append(table._tableobj(**dfields))
3250 gae.put(parsed_items)
3251 return True
3252
3253 try:
3254 import couchdb
3255 drivers.append('CouchDB')
3256 except ImportError:
3257 logger.debug('no couchdb driver')
3258
3260 return uuid.UUID(uuidv).int
3261
3263 return str(uuid.UUID(int=n))
3264
3266 uploads_in_blob = True
3267 types = {
3268 'boolean': bool,
3269 'string': str,
3270 'text': str,
3271 'password': str,
3272 'blob': str,
3273 'upload': str,
3274 'integer': long,
3275 'double': float,
3276 'date': datetime.date,
3277 'time': datetime.time,
3278 'datetime': datetime.datetime,
3279 'id': long,
3280 'reference': long,
3281 'list:string': list,
3282 'list:integer': list,
3283 'list:reference': list,
3284 }
3285
3287 - def file_open(self, filename, mode='rb', lock=True): pass
3288 - def file_close(self, fileobj, unlock=True): pass
3289
3290 - def expand(self,expression,field_type=None):
3291 if isinstance(expression,Field):
3292 if expression.type=='id':
3293 return "%s._id" % expression.tablename
3294 return BaseAdapter.expand(self,expression,field_type)
3295
3296 - def AND(self,first,second):
3298
3299 - def OR(self,first,second):
3301
3302 - def EQ(self,first,second):
3306
3307 - def NE(self,first,second):
3311
3312 - def COMMA(self,first,second):
3314
3316 value = NoSQLAdapter.represent(self, obj, fieldtype)
3317 if fieldtype=='id':
3318 return repr(str(int(value)))
3319 return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
3320
3321 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
3322 pool_size=0,folder=None,db_codec ='UTF-8',
3323 credential_decoder=lambda x:x, driver_args={},
3324 adapter_args={}):
3325 self.db = db
3326 self.uri = uri
3327 self.dbengine = 'couchdb'
3328 self.folder = folder
3329 db['_lastsql'] = ''
3330 self.db_codec = 'UTF-8'
3331 self.pool_size = pool_size
3332
3333 url='http://'+uri[10:]
3334 def connect(url=url,driver_args=driver_args):
3335 return couchdb.Server(url,**driver_args)
3336 self.pool_connection(connect)
3337
3338 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
3339 if migrate:
3340 try:
3341 self.connection.create(table._tablename)
3342 except:
3343 pass
3344
3345 - def insert(self,table,fields):
3352
3353 - def _select(self,query,fields,attributes):
3354 if not isinstance(query,Query):
3355 raise SyntaxError, "Not Supported"
3356 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
3357 'required','cache','left',
3358 'distinct','having')):
3359 raise SyntaxError, 'invalid select attribute: %s' % key
3360 new_fields=[]
3361 for item in fields:
3362 if isinstance(item,SQLALL):
3363 new_fields += item.table
3364 else:
3365 new_fields.append(item)
3366 def uid(fd):
3367 return fd=='id' and '_id' or fd
3368 def get(row,fd):
3369 return fd=='id' and int(row['_id']) or row.get(fd,None)
3370 fields = new_fields
3371 tablename = self.get_table(query)
3372 fieldnames = [f.name for f in (fields or self.db[tablename])]
3373 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
3374 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
3375 fn="function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);}" %\
3376 dict(t=tablename,
3377 query=self.expand(query),
3378 order='%s._id' % tablename,
3379 fields=fields)
3380 return fn, colnames
3381
3382 - def select(self,query,fields,attributes):
3383 if not isinstance(query,Query):
3384 raise SyntaxError, "Not Supported"
3385 fn, colnames = self._select(query,fields,attributes)
3386 tablename = colnames[0].split('.')[0]
3387 ctable = self.connection[tablename]
3388 rows = [cols['value'] for cols in ctable.query(fn)]
3389 return self.parse(rows, colnames, False)
3390
3391 - def delete(self,tablename,query):
3392 if not isinstance(query,Query):
3393 raise SyntaxError, "Not Supported"
3394 if query.first.type=='id' and query.op==self.EQ:
3395 id = query.second
3396 tablename = query.first.tablename
3397 assert(tablename == query.first.tablename)
3398 ctable = self.connection[tablename]
3399 try:
3400 del ctable[str(id)]
3401 return 1
3402 except couchdb.http.ResourceNotFound:
3403 return 0
3404 else:
3405 tablename = self.get_table(query)
3406 rows = self.select(query,[self.db[tablename].id],{})
3407 ctable = self.connection[tablename]
3408 for row in rows:
3409 del ctable[str(row.id)]
3410 return len(rows)
3411
3412 - def update(self,tablename,query,fields):
3413 if not isinstance(query,Query):
3414 raise SyntaxError, "Not Supported"
3415 if query.first.type=='id' and query.op==self.EQ:
3416 id = query.second
3417 tablename = query.first.tablename
3418 ctable = self.connection[tablename]
3419 try:
3420 doc = ctable[str(id)]
3421 for key,value in fields:
3422 doc[key.name] = NoSQLAdapter.represent(self,value,self.db[tablename][key.name].type)
3423 ctable.save(doc)
3424 return 1
3425 except couchdb.http.ResourceNotFound:
3426 return 0
3427 else:
3428 tablename = self.get_table(query)
3429 rows = self.select(query,[self.db[tablename].id],{})
3430 ctable = self.connection[tablename]
3431 table = self.db[tablename]
3432 for row in rows:
3433 doc = ctable[str(row.id)]
3434 for key,value in fields:
3435 doc[key.name] = NoSQLAdapter.represent(self,value,table[key.name].type)
3436 ctable.save(doc)
3437 return len(rows)
3438
3439 - def count(self,query,distinct=None):
3440 if distinct:
3441 raise RuntimeError, "COUNT DISTINCT not supported"
3442 if not isinstance(query,Query):
3443 raise SyntaxError, "Not Supported"
3444 tablename = self.get_table(query)
3445 rows = self.select(query,[self.db[tablename].id],{})
3446 return len(rows)
3447
3449 """
3450 validates that the given text is clean: only contains [0-9a-zA-Z_]
3451 """
3452
3453 if re.compile('[^0-9a-zA-Z_]').findall(text):
3454 raise SyntaxError, \
3455 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
3456 % text
3457 return text
3458
3459
3460 try:
3461 import pymongo
3462 drivers.append('mongoDB')
3463 except:
3464 logger.debug('no mongoDB driver')
3465
3467 uploads_in_blob = True
3468 types = {
3469 'boolean': bool,
3470 'string': str,
3471 'text': str,
3472 'password': str,
3473 'blob': str,
3474 'upload': str,
3475 'integer': long,
3476 'double': float,
3477 'date': datetime.date,
3478 'time': datetime.time,
3479 'datetime': datetime.datetime,
3480 'id': long,
3481 'reference': long,
3482 'list:string': list,
3483 'list:integer': list,
3484 'list:reference': list,
3485 }
3486
3487 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
3488 pool_size=0,folder=None,db_codec ='UTF-8',
3489 credential_decoder=lambda x:x, driver_args={},
3490 adapter_args={}):
3491 self.db = db
3492 self.uri = uri
3493 self.dbengine = 'mongodb'
3494 self.folder = folder
3495 db['_lastsql'] = ''
3496 self.db_codec = 'UTF-8'
3497 self.pool_size = pool_size
3498
3499 m = re.compile('^(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(self._uri[10:])
3500 if not m:
3501 raise SyntaxError, "Invalid URI string in DAL: %s" % self._uri
3502 host = m.group('host')
3503 if not host:
3504 raise SyntaxError, 'mongodb: host name required'
3505 dbname = m.group('db')
3506 if not dbname:
3507 raise SyntaxError, 'mongodb: db name required'
3508 port = m.group('port') or 27017
3509 driver_args.update(dict(host=host,port=port))
3510 def connect(dbname=dbname,driver_args=driver_args):
3511 return pymongo.Connection(**driver_args)[dbname]
3512 self.pool_connection(connect)
3513
3514 - def insert(self,table,fields):
3519
3520
3522 raise RuntimeError, "Not implemented"
3523
3524 - def select(self,query,fields,attributes):
3525 raise RuntimeError, "Not implemented"
3526
3527 - def delete(self,tablename, query):
3528 raise RuntimeError, "Not implemented"
3529
3530 - def update(self,tablename,query,fields):
3531 raise RuntimeError, "Not implemented"
3532
3533
3534
3535
3536
3537
3538 ADAPTERS = {
3539 'sqlite': SQLiteAdapter,
3540 'sqlite:memory': SQLiteAdapter,
3541 'mysql': MySQLAdapter,
3542 'postgres': PostgreSQLAdapter,
3543 'oracle': OracleAdapter,
3544 'mssql': MSSQLAdapter,
3545 'mssql2': MSSQL2Adapter,
3546 'db2': DB2Adapter,
3547 'informix': InformixAdapter,
3548 'firebird': FireBirdAdapter,
3549 'firebird_embedded': FireBirdAdapter,
3550 'ingres': IngresAdapter,
3551 'ingresu': IngresUnicodeAdapter,
3552 'sapdb': SAPDBAdapter,
3553 'cubrid': CubridAdapter,
3554 'jdbc:sqlite': JDBCSQLiteAdapter,
3555 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
3556 'jdbc:postgres': JDBCPostgreSQLAdapter,
3557 'gae': GoogleDatastoreAdapter,
3558 'google:datastore': GoogleDatastoreAdapter,
3559 'google:sql': GoogleSQLAdapter,
3560 'couchdb': CouchDBAdapter,
3561 'mongodb': MongoDBAdapter,
3562 }
3563
3564
3566 """
3567 Field type validation, using web2py's validators mechanism.
3568
3569 makes sure the content of a field is in line with the declared
3570 fieldtype
3571 """
3572 if not have_validators:
3573 return []
3574 field_type, field_length = field.type, field.length
3575 if isinstance(field_type, SQLCustomType):
3576 if hasattr(field_type, 'validator'):
3577 return field_type.validator
3578 else:
3579 field_type = field_type.type
3580 elif not isinstance(field_type,str):
3581 return []
3582 requires=[]
3583 def ff(r,id):
3584 row=r(id)
3585 if not row:
3586 return id
3587 elif hasattr(r, '_format') and isinstance(r._format,str):
3588 return r._format % row
3589 elif hasattr(r, '_format') and callable(r._format):
3590 return r._format(row)
3591 else:
3592 return id
3593 if field_type == 'string':
3594 requires.append(validators.IS_LENGTH(field_length))
3595 elif field_type == 'text':
3596 requires.append(validators.IS_LENGTH(2 ** 16))
3597 elif field_type == 'password':
3598 requires.append(validators.IS_LENGTH(field_length))
3599 elif field_type == 'double':
3600 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
3601 elif field_type == 'integer':
3602 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
3603 elif field_type.startswith('decimal'):
3604 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
3605 elif field_type == 'date':
3606 requires.append(validators.IS_DATE())
3607 elif field_type == 'time':
3608 requires.append(validators.IS_TIME())
3609 elif field_type == 'datetime':
3610 requires.append(validators.IS_DATETIME())
3611 elif field.db and field_type.startswith('reference') and \
3612 field_type.find('.') < 0 and \
3613 field_type[10:] in field.db.tables:
3614 referenced = field.db[field_type[10:]]
3615 def repr_ref(id, r=referenced, f=ff): return f(r, id)
3616 field.represent = field.represent or repr_ref
3617 if hasattr(referenced, '_format') and referenced._format:
3618 requires = validators.IS_IN_DB(field.db,referenced.id,
3619 referenced._format)
3620 if field.unique:
3621 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3622 if field.tablename == field_type[10:]:
3623 return validators.IS_EMPTY_OR(requires)
3624 return requires
3625 elif field.db and field_type.startswith('list:reference') and \
3626 field_type.find('.') < 0 and \
3627 field_type[15:] in field.db.tables:
3628 referenced = field.db[field_type[15:]]
3629 def list_ref_repr(ids, r=referenced, f=ff):
3630 if not ids:
3631 return None
3632 refs = r._db(r.id.belongs(ids)).select(r.id)
3633 return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
3634 field.represent = field.represent or list_ref_repr
3635 if hasattr(referenced, '_format') and referenced._format:
3636 requires = validators.IS_IN_DB(field.db,referenced.id,
3637 referenced._format,multiple=True)
3638 else:
3639 requires = validators.IS_IN_DB(field.db,referenced.id,
3640 multiple=True)
3641 if field.unique:
3642 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3643 return requires
3644 elif field_type.startswith('list:'):
3645 def repr_list(values): return', '.join(str(v) for v in (values or []))
3646 field.represent = field.represent or repr_list
3647 if field.unique:
3648 requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
3649 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
3650 if field.notnull and not field_type[:2] in sff:
3651 requires.insert(0, validators.IS_NOT_EMPTY())
3652 elif not field.notnull and field_type[:2] in sff and requires:
3653 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
3654 return requires
3655
3656
3658 return str(item).replace('|', '||')
3659
3662
3664 return [int(x) for x in value.split('|') if x.strip()]
3665
3668
3669
3671
3672 """
3673 a dictionary that lets you do d['a'] as well as d.a
3674 this is only used to store a Row
3675 """
3676
3678 key=str(key)
3679 if key in self.get('_extra',{}):
3680 return self._extra[key]
3681 return dict.__getitem__(self, key)
3682
3685
3688
3691
3694
3696 return '<Row ' + dict.__repr__(self) + '>'
3697
3700
3702 try:
3703 return self.as_dict() == other.as_dict()
3704 except AttributeError:
3705 return False
3706
3708 return not (self == other)
3709
3711 return Row(dict(self))
3712
3713 - def as_dict(self,datetime_to_str=False):
3714 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list)
3715 d = dict(self)
3716 for k in copy.copy(d.keys()):
3717 v=d[k]
3718 if d[k] is None:
3719 continue
3720 elif isinstance(v,Row):
3721 d[k]=v.as_dict()
3722 elif isinstance(v,Reference):
3723 d[k]=int(v)
3724 elif isinstance(v,decimal.Decimal):
3725 d[k]=float(v)
3726 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
3727 if datetime_to_str:
3728 d[k] = v.isoformat().replace('T',' ')[:19]
3729 elif not isinstance(v,SERIALIZABLE_TYPES):
3730 del d[k]
3731 return d
3732
3733
3735 return Row(cPickle.loads(data))
3736
3739
3740 copy_reg.pickle(Row, Row_pickler, Row_unpickler)
3741
3742
3743
3744
3745
3746
3747
3750 return copy.copy(self)
3751
3752
3754
3755 """
3756 an instance of this class represents a database connection
3757
3758 Example::
3759
3760 db = DAL('sqlite://test.db')
3761 db.define_table('tablename', Field('fieldname1'),
3762 Field('fieldname2'))
3763 """
3764
3765 @staticmethod
3767 """
3768 # ## this allows gluon to set a folder for this thread
3769 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
3770 """
3771 BaseAdapter.set_folder(folder)
3772
3773 @staticmethod
3786
3787 @staticmethod
3809
3810
3811 - def __init__(self, uri='sqlite://dummy.db', pool_size=0, folder=None,
3812 db_codec='UTF-8', check_reserved=None,
3813 migrate=True, fake_migrate=False,
3814 migrate_enabled=True, fake_migrate_all=False,
3815 decode_credentials=False, driver_args=None,
3816 adapter_args={}, attempts=5, auto_import=False):
3817 """
3818 Creates a new Database Abstraction Layer instance.
3819
3820 Keyword arguments:
3821
3822 :uri: string that contains information for connecting to a database.
3823 (default: 'sqlite://dummy.db')
3824 :pool_size: How many open connections to make to the database object.
3825 :folder: <please update me>
3826 :db_codec: string encoding of the database (default: 'UTF-8')
3827 :check_reserved: list of adapters to check tablenames and column names
3828 against sql reserved keywords. (Default None)
3829
3830 * 'common' List of sql keywords that are common to all database types
3831 such as "SELECT, INSERT". (recommended)
3832 * 'all' Checks against all known SQL keywords. (not recommended)
3833 <adaptername> Checks against the specific adapters list of keywords
3834 (recommended)
3835 * '<adaptername>_nonreserved' Checks against the specific adapters
3836 list of nonreserved keywords. (if available)
3837 :migrate (defaults to True) sets default migrate behavior for all tables
3838 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
3839 :migrate_enabled (defaults to True). If set to False disables ALL migrations
3840 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
3841 :attempts (defaults to 5). Number of times to attempt connecting
3842 """
3843 if not decode_credentials:
3844 credential_decoder = lambda cred: cred
3845 else:
3846 credential_decoder = lambda cred: urllib.unquote(cred)
3847 if folder:
3848 self.set_folder(folder)
3849 self._uri = uri
3850 self._pool_size = pool_size
3851 self._db_codec = db_codec
3852 self._lastsql = ''
3853 self._timings = []
3854 self._pending_references = {}
3855 self._request_tenant = 'request_tenant'
3856 self._common_fields = []
3857 if not str(attempts).isdigit() or attempts < 0:
3858 attempts = 5
3859 if uri:
3860 uris = isinstance(uri,(list,tuple)) and uri or [uri]
3861 error = ''
3862 connected = False
3863 for k in range(attempts):
3864 for uri in uris:
3865 try:
3866 if is_jdbc and not uri.startswith('jdbc:'):
3867 uri = 'jdbc:'+uri
3868 self._dbname = regex_dbname.match(uri).group()
3869 if not self._dbname in ADAPTERS:
3870 raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
3871
3872 args = (self,uri,pool_size,folder,db_codec,credential_decoder,driver_args or {}, adapter_args)
3873 self._adapter = ADAPTERS[self._dbname](*args)
3874 connected = True
3875 break
3876 except SyntaxError:
3877 raise
3878 except Exception, error:
3879 pass
3880 if connected:
3881 break
3882 else:
3883 time.sleep(1)
3884 if not connected:
3885 raise RuntimeError, "Failure to connect, tried %d times:\n%s" % (attempts, error)
3886 else:
3887 args = (self,'None',0,folder,db_codec)
3888 self._adapter = BaseAdapter(*args)
3889 migrate = fake_migrate = False
3890 adapter = self._adapter
3891 self._uri_hash = hashlib.md5(adapter.uri).hexdigest()
3892 self.tables = SQLCallableList()
3893 self.check_reserved = check_reserved
3894 if self.check_reserved:
3895 from reserved_sql_keywords import ADAPTERS as RSK
3896 self.RSK = RSK
3897 self._migrate = migrate
3898 self._fake_migrate = fake_migrate
3899 self._migrate_enabled = migrate_enabled
3900 self._fake_migrate_all = fake_migrate_all
3901 if auto_import:
3902 self.import_table_definitions(adapter.folder)
3903
3905 pattern = os.path.join(path,self._uri_hash+'_*.table')
3906 for filename in glob.glob(pattern):
3907 tfile = self._adapter.file_open(filename, 'r')
3908 sql_fields = cPickle.load(tfile)
3909 name = filename[len(pattern)-7:-6]
3910 mf = [(value['sortable'],Field(key,type=value['type'])) \
3911 for key, value in sql_fields.items()]
3912 mf.sort()
3913 self.define_table(name,*[item[1] for item in mf],
3914 **dict(migrate=migrate,fake_migrate=fake_migrate))
3915
3917 """
3918 Validates ``name`` against SQL keywords
3919 Uses self.check_reserve which is a list of
3920 operators to use.
3921 self.check_reserved
3922 ['common', 'postgres', 'mysql']
3923 self.check_reserved
3924 ['all']
3925 """
3926 for backend in self.check_reserved:
3927 if name.upper() in self.RSK[backend]:
3928 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
3929
3931 if self.has_key(tablename):
3932 return True
3933 else:
3934 return False
3935
3936 - def parse_as_rest(self,patterns,args,vars,query=None,nested_select=True):
3937 """
3938 EXAMPLE:
3939
3940 db.define_table('person',Field('name'),Field('info'))
3941 db.define_table('pet',Field('person',db.person),Field('name'),Field('info'))
3942
3943 @request.restful()
3944 def index():
3945 def GET(*args,**vars):
3946 patterns = [
3947 "/persons[person]",
3948 "/{person.name.startswith}",
3949 "/{person.name}/:field",
3950 "/{person.name}/pets[pet.person]",
3951 "/{person.name}/pet[pet.person]/{pet.name}",
3952 "/{person.name}/pet[pet.person]/{pet.name}/:field"
3953 ]
3954 parser = db.parse_as_rest(patterns,args,vars)
3955 if parser.status == 200:
3956 return dict(content=parser.response)
3957 else:
3958 raise HTTP(parser.status,parser.error)
3959 def POST(table_name,**vars):
3960 if table_name == 'person':
3961 return db.person.validate_and_insert(**vars)
3962 elif table_name == 'pet':
3963 return db.pet.validate_and_insert(**vars)
3964 else:
3965 raise HTTP(400)
3966 return locals()
3967 """
3968
3969 db = self
3970 re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
3971 re2 = re.compile('^.+\[.+\]$')
3972
3973 def auto_table(table,base='',depth=0):
3974 patterns = []
3975 for field in db[table].fields:
3976 if base:
3977 tag = '%s/%s' % (base,field.replace('_','-'))
3978 else:
3979 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
3980 f = db[table][field]
3981 if not f.readable: continue
3982 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
3983 tag += '/{%s.%s}' % (table,field)
3984 patterns.append(tag)
3985 patterns.append(tag+'/:field')
3986 elif f.type.startswith('boolean'):
3987 tag += '/{%s.%s}' % (table,field)
3988 patterns.append(tag)
3989 patterns.append(tag+'/:field')
3990 elif f.type.startswith('double') or f.type.startswith('integer'):
3991 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
3992 patterns.append(tag)
3993 patterns.append(tag+'/:field')
3994 elif f.type.startswith('list:'):
3995 tag += '/{%s.%s.contains}' % (table,field)
3996 patterns.append(tag)
3997 patterns.append(tag+'/:field')
3998 elif f.type in ('date','datetime'):
3999 tag+= '/{%s.%s.year}' % (table,field)
4000 patterns.append(tag)
4001 patterns.append(tag+'/:field')
4002 tag+='/{%s.%s.month}' % (table,field)
4003 patterns.append(tag)
4004 patterns.append(tag+'/:field')
4005 tag+='/{%s.%s.day}' % (table,field)
4006 patterns.append(tag)
4007 patterns.append(tag+'/:field')
4008 if f.type in ('datetime','time'):
4009 tag+= '/{%s.%s.hour}' % (table,field)
4010 patterns.append(tag)
4011 patterns.append(tag+'/:field')
4012 tag+='/{%s.%s.minute}' % (table,field)
4013 patterns.append(tag)
4014 patterns.append(tag+'/:field')
4015 tag+='/{%s.%s.second}' % (table,field)
4016 patterns.append(tag)
4017 patterns.append(tag+'/:field')
4018 if depth>0:
4019 for rtable,rfield in db[table]._referenced_by:
4020 tag+='/%s[%s.%s]' % (rtable,rtable,rfield)
4021 patterns.append(tag)
4022 patterns += auto_table(rtable,base=tag,depth=depth-1)
4023 return patterns
4024
4025 if patterns=='auto':
4026 patterns=[]
4027 for table in db.tables:
4028 if not table.startswith('auth_'):
4029 patterns += auto_table(table,base='',depth=1)
4030 else:
4031 i = 0
4032 while i<len(patterns):
4033 pattern = patterns[i]
4034 tokens = pattern.split('/')
4035 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
4036 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],'/'.join(tokens[:-1]))
4037 patterns = patterns[:i]+new_patterns+patterns[i+1:]
4038 i += len(new_patterns)
4039 else:
4040 i += 1
4041 if '/'.join(args) == 'patterns':
4042 return Row({'status':200,'pattern':'list',
4043 'error':None,'response':patterns})
4044 for pattern in patterns:
4045 otable=table=None
4046 dbset=db(query)
4047 i=0
4048 tags = pattern[1:].split('/')
4049
4050 if len(tags)!=len(args):
4051 continue
4052 for tag in tags:
4053
4054 if re1.match(tag):
4055
4056 tokens = tag[1:-1].split('.')
4057 table, field = tokens[0], tokens[1]
4058 if not otable or table == otable:
4059 if len(tokens)==2 or tokens[2]=='eq':
4060 query = db[table][field]==args[i]
4061 elif tokens[2]=='ne':
4062 query = db[table][field]!=args[i]
4063 elif tokens[2]=='lt':
4064 query = db[table][field]<args[i]
4065 elif tokens[2]=='gt':
4066 query = db[table][field]>args[i]
4067 elif tokens[2]=='ge':
4068 query = db[table][field]>=args[i]
4069 elif tokens[2]=='le':
4070 query = db[table][field]<=args[i]
4071 elif tokens[2]=='year':
4072 query = db[table][field].year()==args[i]
4073 elif tokens[2]=='month':
4074 query = db[table][field].month()==args[i]
4075 elif tokens[2]=='day':
4076 query = db[table][field].day()==args[i]
4077 elif tokens[2]=='hour':
4078 query = db[table][field].hour()==args[i]
4079 elif tokens[2]=='minute':
4080 query = db[table][field].minutes()==args[i]
4081 elif tokens[2]=='second':
4082 query = db[table][field].seconds()==args[i]
4083 elif tokens[2]=='startswith':
4084 query = db[table][field].startswith(args[i])
4085 elif tokens[2]=='contains':
4086 query = db[table][field].contains(args[i])
4087 else:
4088 raise RuntimeError, "invalid pattern: %s" % pattern
4089 if len(tokens)==4 and tokens[3]=='not':
4090 query = ~query
4091 elif len(tokens)>=4:
4092 raise RuntimeError, "invalid pattern: %s" % pattern
4093 dbset=dbset(query)
4094 else:
4095 raise RuntimeError, "missing relation in pattern: %s" % pattern
4096 elif otable and re2.match(tag) and args[i]==tag[:tag.find('[')]:
4097
4098 ref = tag[tag.find('[')+1:-1]
4099 if '.' in ref:
4100 table,field = ref.split('.')
4101
4102 if nested_select:
4103 try:
4104 dbset=db(db[table][field].belongs(dbset._select(db[otable]._id)))
4105 except ValueError:
4106 return Row({'status':400,'pattern':pattern,
4107 'error':'invalid path','response':None})
4108 else:
4109 items = [item.id for item in dbset.select(db[otable]._id)]
4110 dbset=db(db[table][field].belongs(items))
4111 else:
4112 dbset=dbset(db[ref])
4113 elif tag==':field' and table:
4114
4115 field = args[i]
4116 if not field in db[table]: break
4117 try:
4118 item = dbset.select(db[table][field],limitby=(0,1)).first()
4119 except ValueError:
4120 return Row({'status':400,'pattern':pattern,
4121 'error':'invalid path','response':None})
4122 if not item:
4123 return Row({'status':404,'pattern':pattern,
4124 'error':'record not found','response':None})
4125 else:
4126 return Row({'status':200,'response':item[field],
4127 'pattern':pattern})
4128 elif tag != args[i]:
4129 break
4130 otable = table
4131 i += 1
4132 if i==len(tags) and table:
4133 otable,ofield = vars.get('order','%s.%s' % (table,field)).split('.',1)
4134 try:
4135 if otable[:1]=='~': orderby = ~db[otable[1:]][ofield]
4136 else: orderby = db[otable][ofield]
4137 except KeyError:
4138 return Row({'status':400,'error':'invalid orderby','response':None})
4139 fields = [field for field in db[table] if field.readable]
4140 count = dbset.count()
4141 try:
4142 limits = (int(vars.get('min',0)),int(vars.get('max',1000)))
4143 if limits[0]<0 or limits[1]<limits[0]: raise ValueError
4144 except ValueError:
4145 Row({'status':400,'error':'invalid limits','response':None})
4146 if count > limits[1]-limits[0]:
4147 Row({'status':400,'error':'too many records','response':None})
4148 try:
4149 response = dbset.select(limitby=limits,orderby=orderby,*fields)
4150 except ValueError:
4151 return Row({'status':400,'pattern':pattern,
4152 'error':'invalid path','response':None})
4153 return Row({'status':200,'response':response,'pattern':pattern})
4154 return Row({'status':400,'error':'no mathcing pattern','response':None})
4155
4156
4157 - def define_table(
4158 self,
4159 tablename,
4160 *fields,
4161 **args
4162 ):
4163
4164 for key in args:
4165 if key not in [
4166 'migrate',
4167 'primarykey',
4168 'fake_migrate',
4169 'format',
4170 'trigger_name',
4171 'sequence_name',
4172 'polymodel']:
4173 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key)
4174 migrate = self._migrate_enabled and args.get('migrate',self._migrate)
4175 fake_migrate = self._fake_migrate_all or args.get('fake_migrate',self._fake_migrate)
4176 format = args.get('format',None)
4177 trigger_name = args.get('trigger_name', None)
4178 sequence_name = args.get('sequence_name', None)
4179 primarykey=args.get('primarykey',None)
4180 polymodel=args.get('polymodel',None)
4181 if not isinstance(tablename,str):
4182 raise SyntaxError, "missing table name"
4183 tablename = cleanup(tablename)
4184 lowertablename = tablename.lower()
4185
4186 if tablename.startswith('_') or hasattr(self,lowertablename) or \
4187 regex_python_keywords.match(tablename):
4188 raise SyntaxError, 'invalid table name: %s' % tablename
4189 elif lowertablename in self.tables:
4190 raise SyntaxError, 'table already defined: %s' % tablename
4191 elif self.check_reserved:
4192 self.check_reserved_keyword(tablename)
4193
4194 if self._common_fields:
4195 fields = [f for f in fields] + [f for f in self._common_fields]
4196
4197 t = self[tablename] = Table(self, tablename, *fields,
4198 **dict(primarykey=primarykey,
4199 trigger_name=trigger_name,
4200 sequence_name=sequence_name))
4201
4202 if self._uri in (None,'None'):
4203 return t
4204
4205 t._create_references()
4206
4207 if migrate or self._adapter.dbengine=='google:datastore':
4208 try:
4209 sql_locker.acquire()
4210 self._adapter.create_table(t,migrate=migrate,
4211 fake_migrate=fake_migrate,
4212 polymodel=polymodel)
4213 finally:
4214 sql_locker.release()
4215 else:
4216 t._dbt = None
4217 self.tables.append(tablename)
4218 t._format = format
4219 return t
4220
4222 for tablename in self.tables:
4223 yield self[tablename]
4224
4227
4230
4233
4235 if key[:1]!='_' and key in self:
4236 raise SyntaxError, \
4237 'Object %s exists and cannot be redefined' % key
4238 self[key] = value
4239
4241 return '<DAL ' + dict.__repr__(self) + '>'
4242
4244 if isinstance(query,Table):
4245 query = query._id>0
4246 elif isinstance(query,Field):
4247 query = query!=None
4248 return Set(self, query)
4249
4252
4255
4256 - def executesql(self, query, placeholders=None, as_dict=False):
4257 """
4258 placeholders is optional and will always be None when using DAL
4259 if using raw SQL with placeholders, placeholders may be
4260 a sequence of values to be substituted in
4261 or, *if supported by the DB driver*, a dictionary with keys
4262 matching named placeholders in your SQL.
4263
4264 Added 2009-12-05 "as_dict" optional argument. Will always be
4265 None when using DAL. If using raw SQL can be set to True
4266 and the results cursor returned by the DB driver will be
4267 converted to a sequence of dictionaries keyed with the db
4268 field names. Tested with SQLite but should work with any database
4269 since the cursor.description used to get field names is part of the
4270 Python dbi 2.0 specs. Results returned with as_dict = True are
4271 the same as those returned when applying .to_list() to a DAL query.
4272
4273 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
4274
4275 --bmeredyk
4276 """
4277 if placeholders:
4278 self._adapter.execute(query, placeholders)
4279 else:
4280 self._adapter.execute(query)
4281 if as_dict:
4282 if not hasattr(self._adapter.cursor,'description'):
4283 raise RuntimeError, "database does not support executesql(...,as_dict=True)"
4284
4285
4286
4287 columns = self._adapter.cursor.description
4288
4289 fields = [f[0] for f in columns]
4290
4291 data = self._adapter.cursor.fetchall()
4292
4293
4294 return [dict(zip(fields,row)) for row in data]
4295
4296 try:
4297 return self._adapter.cursor.fetchall()
4298 except:
4299 return None
4300
4302 for tablename in self.tables:
4303 by = self[tablename]._referenced_by
4304 by[:] = [item for item in by if not item[0] == other]
4305
4312
4313 - def import_from_csv_file(self, ifile, id_map={}, null='<NULL>',
4314 unique='uuid', *args, **kwargs):
4315 for line in ifile:
4316 line = line.strip()
4317 if not line:
4318 continue
4319 elif line == 'END':
4320 return
4321 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
4322 raise SyntaxError, 'invalid file format'
4323 else:
4324 tablename = line[6:]
4325 self[tablename].import_from_csv_file(ifile, id_map, null,
4326 unique, *args, **kwargs)
4327
4328
4330 """
4331 Helper class providing a comma-separated string having all the field names
4332 (prefixed by table name and '.')
4333
4334 normally only called from within gluon.sql
4335 """
4336
4339
4341 return ', '.join([str(field) for field in self.table])
4342
4343
4345
4347 if not self._record:
4348 self._record = self._table[int(self)]
4349 if not self._record:
4350 raise RuntimeError, "Using a recursive select but encountered a broken reference"
4351
4353 if key == 'id':
4354 return int(self)
4355 self.__allocate()
4356 return self._record.get(key, None)
4357
4364
4366 if key == 'id':
4367 return int(self)
4368 self.__allocate()
4369 return self._record.get(key, None)
4370
4372 self.__allocate()
4373 self._record[key] = value
4374
4375
4377 return marshal.loads(data)
4378
4380 try:
4381 marshal_dump = marshal.dumps(int(data))
4382 except AttributeError:
4383 marshal_dump = 'i%s' % struct.pack('<i', int(data))
4384 return (Reference_unpickler, (marshal_dump,))
4385
4386 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler)
4387
4388
4390
4391 """
4392 an instance of this class represents a database table
4393
4394 Example::
4395
4396 db = DAL(...)
4397 db.define_table('users', Field('name'))
4398 db.users.insert(name='me') # print db.users._insert(...) to see SQL
4399 db.users.drop()
4400 """
4401
4402 - def __init__(
4403 self,
4404 db,
4405 tablename,
4406 *fields,
4407 **args
4408 ):
4409 """
4410 Initializes the table and performs checking on the provided fields.
4411
4412 Each table will have automatically an 'id'.
4413
4414 If a field is of type Table, the fields (excluding 'id') from that table
4415 will be used instead.
4416
4417 :raises SyntaxError: when a supplied field is of incorrect type.
4418 """
4419 self._tablename = tablename
4420 self._sequence_name = args.get('sequence_name',None) or \
4421 db and db._adapter.sequence_name(tablename)
4422 self._trigger_name = args.get('trigger_name',None) or \
4423 db and db._adapter.trigger_name(tablename)
4424
4425 primarykey = args.get('primarykey', None)
4426 fieldnames,newfields=set(),[]
4427 if primarykey and not isinstance(primarykey,list):
4428 raise SyntaxError, "primarykey must be a list of fields from table '%s'" \
4429 % tablename
4430 elif primarykey:
4431 self._primarykey = primarykey
4432 elif not [f for f in fields if hasattr(f,'type') and f.type=='id']:
4433 field = Field('id', 'id')
4434 newfields.append(field)
4435 fieldnames.add('id')
4436 self._id = field
4437 for field in fields:
4438 if not isinstance(field, (Field, Table)):
4439 raise SyntaxError, \
4440 'define_table argument is not a Field or Table: %s' % field
4441 elif isinstance(field, Field) and not field.name in fieldnames:
4442 if hasattr(field, '_db'):
4443 field = copy.copy(field)
4444 newfields.append(field)
4445 fieldnames.add(field.name)
4446 if field.type=='id':
4447 self._id = field
4448 elif isinstance(field, Table):
4449 table = field
4450 for field in table:
4451 if not field.name in fieldnames and not field.type=='id':
4452 newfields.append(copy.copy(field))
4453 fieldnames.add(field.name)
4454 else:
4455
4456 pass
4457 fields = newfields
4458 self._db = db
4459 tablename = tablename
4460 self.fields = SQLCallableList()
4461 self.virtualfields = []
4462 fields = list(fields)
4463
4464 if db and self._db._adapter.uploads_in_blob==True:
4465 for field in fields:
4466 if isinstance(field, Field) and field.type == 'upload'\
4467 and field.uploadfield is True:
4468 tmp = field.uploadfield = '%s_blob' % field.name
4469 fields.append(self._db.Field(tmp, 'blob', default=''))
4470
4471 lower_fieldnames = set()
4472 for field in fields:
4473 if db and db.check_reserved:
4474 db.check_reserved_keyword(field.name)
4475
4476 if field.name.lower() in lower_fieldnames:
4477 raise SyntaxError, "duplicate field %s in table %s" % (field.name, tablename)
4478 else:
4479 lower_fieldnames.add(field.name.lower())
4480
4481 self.fields.append(field.name)
4482 self[field.name] = field
4483 if field.type == 'id':
4484 self['id'] = field
4485 field.tablename = field._tablename = tablename
4486 field.table = field._table = self
4487 field.db = field._db = self._db
4488 field.length = min(field.length,self._db and self._db._adapter.maxcharlength or INFINITY)
4489 if field.requires == DEFAULT:
4490 field.requires = sqlhtml_validators(field)
4491 self.ALL = SQLALL(self)
4492
4493 if hasattr(self,'_primarykey'):
4494 for k in self._primarykey:
4495 if k not in self.fields:
4496 raise SyntaxError, \
4497 "primarykey must be a list of fields from table '%s " % tablename
4498 else:
4499 self[k].notnull = True
4500
4502 errors = Row()
4503 for key,value in vars.items():
4504 value,error = self[key].validate(value)
4505 if error:
4506 errors[key] = error
4507 return errors
4508
4510 pr = self._db._pending_references
4511 self._referenced_by = []
4512 for fieldname in self.fields:
4513 field=self[fieldname]
4514 if isinstance(field.type,str) and field.type[:10] == 'reference ':
4515 ref = field.type[10:].strip()
4516 if not ref.split():
4517 raise SyntaxError, 'Table: reference to nothing: %s' %ref
4518 refs = ref.split('.')
4519 rtablename = refs[0]
4520 if not rtablename in self._db:
4521 pr[rtablename] = pr.get(rtablename,[]) + [field]
4522 continue
4523 rtable = self._db[rtablename]
4524 if len(refs)==2:
4525 rfieldname = refs[1]
4526 if not hasattr(rtable,'_primarykey'):
4527 raise SyntaxError,\
4528 'keyed tables can only reference other keyed tables (for now)'
4529 if rfieldname not in rtable.fields:
4530 raise SyntaxError,\
4531 "invalid field '%s' for referenced table '%s' in table '%s'" \
4532 % (rfieldname, rtablename, self._tablename)
4533 rtable._referenced_by.append((self._tablename, field.name))
4534 for referee in pr.get(self._tablename,[]):
4535 self._referenced_by.append((referee._tablename,referee.name))
4536
4538 return dict([(k, v) for (k, v) in record.items() if k
4539 in self.fields and (self[k].type!='id' or id)])
4540
4542 """ for keyed table only """
4543 query = None
4544 for k,v in key.iteritems():
4545 if k in self._primarykey:
4546 if query:
4547 query = query & (self[k] == v)
4548 else:
4549 query = (self[k] == v)
4550 else:
4551 raise SyntaxError, \
4552 'Field %s is not part of the primary key of %s' % \
4553 (k,self._tablename)
4554 return query
4555
4557 if not key:
4558 return None
4559 elif isinstance(key, dict):
4560 """ for keyed table """
4561 query = self._build_query(key)
4562 rows = self._db(query).select()
4563 if rows:
4564 return rows[0]
4565 return None
4566 elif str(key).isdigit():
4567 return self._db(self.id == key).select(limitby=(0,1)).first()
4568 elif key:
4569 return dict.__getitem__(self, str(key))
4570
4572 if key!=DEFAULT:
4573 if isinstance(key, Query):
4574 record = self._db(key).select(limitby=(0,1)).first()
4575 elif not str(key).isdigit():
4576 record = None
4577 else:
4578 record = self._db(self.id == key).select(limitby=(0,1)).first()
4579 if record:
4580 for k,v in kwargs.items():
4581 if record[k]!=v: return None
4582 return record
4583 elif kwargs:
4584 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.items()])
4585 return self._db(query).select(limitby=(0,1)).first()
4586 else:
4587 return None
4588
4590 if isinstance(key, dict) and isinstance(value, dict):
4591 """ option for keyed table """
4592 if set(key.keys()) == set(self._primarykey):
4593 value = self._filter_fields(value)
4594 kv = {}
4595 kv.update(value)
4596 kv.update(key)
4597 if not self.insert(**kv):
4598 query = self._build_query(key)
4599 self._db(query).update(**self._filter_fields(value))
4600 else:
4601 raise SyntaxError,\
4602 'key must have all fields from primary key: %s'%\
4603 (self._primarykey)
4604 elif str(key).isdigit():
4605 if key == 0:
4606 self.insert(**self._filter_fields(value))
4607 elif not self._db(self.id == key)\
4608 .update(**self._filter_fields(value)):
4609 raise SyntaxError, 'No such record: %s' % key
4610 else:
4611 if isinstance(key, dict):
4612 raise SyntaxError,\
4613 'value must be a dictionary: %s' % value
4614 dict.__setitem__(self, str(key), value)
4615
4617 if isinstance(key, dict):
4618 query = self._build_query(key)
4619 if not self._db(query).delete():
4620 raise SyntaxError, 'No such record: %s' % key
4621 elif not str(key).isdigit() or not self._db(self.id == key).delete():
4622 raise SyntaxError, 'No such record: %s' % key
4623
4626
4628 if key in self:
4629 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key
4630 self[key] = value
4631
4633 for fieldname in self.fields:
4634 yield self[fieldname]
4635
4637 return '<Table ' + dict.__repr__(self) + '>'
4638
4640 if self.get('_ot', None):
4641 return '%s AS %s' % (self._ot, self._tablename)
4642 return self._tablename
4643
4644 - def _drop(self, mode = ''):
4645 return self._db._adapter._drop(self, mode)
4646
4647 - def drop(self, mode = ''):
4648 return self._db._adapter.drop(self,mode)
4649
4650 - def _listify(self,fields,update=False):
4651 new_fields = []
4652 new_fields_names = []
4653 for name in fields:
4654 if not name in self.fields:
4655 raise SyntaxError, 'Field %s does not belong to the table' % name
4656 new_fields.append((self[name],fields[name]))
4657 new_fields_names.append(name)
4658 for ofield in self:
4659 if not ofield.name in new_fields_names:
4660 if not update and ofield.default!=None:
4661 new_fields.append((ofield,ofield.default))
4662 elif update and ofield.update!=None:
4663 new_fields.append((ofield,ofield.update))
4664 for ofield in self:
4665 if not ofield.name in new_fields_names and ofield.compute:
4666 try:
4667 new_fields.append((ofield,ofield.compute(Row(fields))))
4668 except KeyError:
4669 pass
4670 if not update and ofield.required and not ofield.name in new_fields_names:
4671 raise SyntaxError,'Table: missing required field: %s' % ofield.name
4672 return new_fields
4673
4676
4679
4681 response = Row()
4682 response.errors = self._validate(**fields)
4683 if not response.errors:
4684 response.id = self.insert(**fields)
4685 else:
4686 response.id = None
4687 return response
4688
4700
4707
4709 return self._db._adapter._truncate(self, mode)
4710
4712 return self._db._adapter.truncate(self, mode)
4713
4714 - def import_from_csv_file(
4715 self,
4716 csvfile,
4717 id_map=None,
4718 null='<NULL>',
4719 unique='uuid',
4720 *args, **kwargs
4721 ):
4722 """
4723 import records from csv file. Column headers must have same names as
4724 table fields. field 'id' is ignored. If column names read 'table.file'
4725 the 'table.' prefix is ignored.
4726 'unique' argument is a field which must be unique
4727 (typically a uuid field)
4728 """
4729
4730 delimiter = kwargs.get('delimiter', ',')
4731 quotechar = kwargs.get('quotechar', '"')
4732 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
4733
4734 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting)
4735 colnames = None
4736 if isinstance(id_map, dict):
4737 if not self._tablename in id_map:
4738 id_map[self._tablename] = {}
4739 id_map_self = id_map[self._tablename]
4740
4741 def fix(field, value, id_map):
4742 if value == null:
4743 value = None
4744 elif field.type in ('double','integer'):
4745 value = None
4746 elif field.type.startswith('list:string'):
4747 value = bar_decode_string(value)
4748 elif field.type.startswith('list:reference'):
4749 ref_table = field.type[10:].strip()
4750 value = [id_map[ref_table][int(v)] \
4751 for v in bar_decode_string(value)]
4752 elif field.type.startswith('list:'):
4753 value = bar_decode_integer(value)
4754 elif id_map and field.type.startswith('reference'):
4755 try:
4756 value = id_map[field.type[9:].strip()][value]
4757 except KeyError:
4758 pass
4759 return (field.name, value)
4760
4761 def is_id(colname):
4762 if colname in self:
4763 return self[colname].type == 'id'
4764 else:
4765 return False
4766
4767 for line in reader:
4768 if not line:
4769 break
4770 if not colnames:
4771 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
4772 cols, cid = [], []
4773 for i,colname in enumerate(colnames):
4774 if is_id(colname):
4775 cid = i
4776 else:
4777 cols.append(i)
4778 if colname == unique:
4779 unique_idx = i
4780 else:
4781 items = [fix(self[colnames[i]], line[i], id_map) \
4782 for i in cols if colnames[i] in self.fields]
4783
4784
4785 if not unique or unique not in colnames:
4786 new_id = self.insert(**dict(items))
4787 else:
4788 unique_value = line[unique_idx]
4789 query = self._db[self][unique] == unique_value
4790 record = self._db(query).select().first()
4791 if record:
4792 record.update_record(**dict(items))
4793 new_id = record[self._id.name]
4794 else:
4795 new_id = self.insert(**dict(items))
4796 if id_map and cid != []:
4797 id_map_self[line[cid]] = new_id
4798
4801
4802 - def on(self, query):
4803 return Expression(self._db,self._db._adapter.ON,self,query)
4804
4805
4806
4808
4809 - def __init__(
4810 self,
4811 db,
4812 op,
4813 first=None,
4814 second=None,
4815 type=None,
4816 ):
4817
4818 self.db = db
4819 self.op = op
4820 self.first = first
4821 self.second = second
4822
4823 if not type and first and hasattr(first,'type'):
4824 self.type = first.type
4825 else:
4826 self.type = type
4827
4830
4833
4836
4839
4841 return Expression(self.db, self.db._adapter.LOWER, self, None, self.type)
4842
4844 return Expression(self.db, self.db._adapter.UPPER, self, None, self.type)
4845
4848
4851
4854
4857
4860
4863
4866
4868 if start < 0:
4869 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
4870 else:
4871 pos0 = start + 1
4872
4873 if stop < 0:
4874 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
4875 elif stop == sys.maxint:
4876 length = self.len()
4877 else:
4878 length = '(%s - %s)' % (stop + 1, pos0)
4879 return Expression(self.db,self.db._adapter.SUBSTRING,
4880 self, (pos0, length), self.type)
4881
4883 return self[i:i + 1]
4884
4886 return self.db._adapter.expand(self,self.type)
4887
4889 return Expression(self.db,self.db._adapter.COMMA,self,other,self.type)
4890
4892 if hasattr(self,'_op') and self.op == self.db._adapter.INVERT:
4893 return self.first
4894 return Expression(self.db,self.db._adapter.INVERT,self,type=self.type)
4895
4897 return Expression(self.db,self.db._adapter.ADD,self,other,self.type)
4898
4900 if self.type == 'integer':
4901 result_type = 'integer'
4902 elif self.type in ['date','time','datetime','double']:
4903 result_type = 'double'
4904 else:
4905 raise SyntaxError, "subtraction operation not supported for type"
4906 return Expression(self.db,self.db._adapter.SUB,self,other,
4907 result_type)
4909 return Expression(self.db,self.db._adapter.MUL,self,other,self.type)
4910
4912 return Expression(self.db,self.db._adapter.DIV,self,other,self.type)
4913
4915 return Expression(self.db,self.db._adapter.MOD,self,other,self.type)
4916
4918 return Query(self.db, self.db._adapter.EQ, self, value)
4919
4921 return Query(self.db, self.db._adapter.NE, self, value)
4922
4924 return Query(self.db, self.db._adapter.LT, self, value)
4925
4927 return Query(self.db, self.db._adapter.LE, self, value)
4928
4930 return Query(self.db, self.db._adapter.GT, self, value)
4931
4933 return Query(self.db, self.db._adapter.GE, self, value)
4934
4935 - def like(self, value):
4936 return Query(self.db, self.db._adapter.LIKE, self, value)
4937
4939 return Query(self.db, self.db._adapter.BELONGS, self, value)
4940
4942 if not self.type in ('string', 'text'):
4943 raise SyntaxError, "startswith used with incompatible field type"
4944 return Query(self.db, self.db._adapter.STARTSWITH, self, value)
4945
4947 if not self.type in ('string', 'text'):
4948 raise SyntaxError, "endswith used with incompatible field type"
4949 return Query(self.db, self.db._adapter.ENDSWITH, self, value)
4950
4952 if not self.type in ('string', 'text') and not self.type.startswith('list:'):
4953 raise SyntaxError, "contains used with incompatible field type"
4954 return Query(self.db, self.db._adapter.CONTAINS, self, value)
4955
4958
4959
4960
4961
4963 """
4964 allows defining of custom SQL types
4965
4966 Example::
4967
4968 decimal = SQLCustomType(
4969 type ='double',
4970 native ='integer',
4971 encoder =(lambda x: int(float(x) * 100)),
4972 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
4973 )
4974
4975 db.define_table(
4976 'example',
4977 Field('value', type=decimal)
4978 )
4979
4980 :param type: the web2py type (default = 'string')
4981 :param native: the backend type
4982 :param encoder: how to encode the value to store it in the backend
4983 :param decoder: how to decode the value retrieved from the backend
4984 :param validator: what validators to use ( default = None, will use the
4985 default validator for type)
4986 """
4987
4988 - def __init__(
4989 self,
4990 type='string',
4991 native=None,
4992 encoder=None,
4993 decoder=None,
4994 validator=None,
4995 _class=None,
4996 ):
4997
4998 self.type = type
4999 self.native = native
5000 self.encoder = encoder or (lambda x: x)
5001 self.decoder = decoder or (lambda x: x)
5002 self.validator = validator
5003 self._class = _class or type
5004
5007
5010
5013
5016
5017
5018 -class Field(Expression):
5019
5020 """
5021 an instance of this class represents a database field
5022
5023 example::
5024
5025 a = Field(name, 'string', length=32, default=None, required=False,
5026 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
5027 notnull=False, unique=False,
5028 uploadfield=True, widget=None, label=None, comment=None,
5029 uploadfield=True, # True means store on disk,
5030 # 'a_field_name' means store in this field in db
5031 # False means file content will be discarded.
5032 writable=True, readable=True, update=None, authorize=None,
5033 autodelete=False, represent=None, uploadfolder=None,
5034 uploadseparate=False # upload to separate directories by uuid_keys
5035 # first 2 character and tablename.fieldname
5036 # False - old behavior
5037 # True - put uploaded file in
5038 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
5039 # directory)
5040
5041 to be used as argument of DAL.define_table
5042
5043 allowed field types:
5044 string, boolean, integer, double, text, blob,
5045 date, time, datetime, upload, password
5046
5047 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
5048 fields should have a default or they will be required in SQLFORMs
5049 the requires argument is used to validate the field input in SQLFORMs
5050
5051 """
5052
5053 - def __init__(
5054 self,
5055 fieldname,
5056 type='string',
5057 length=None,
5058 default=DEFAULT,
5059 required=False,
5060 requires=DEFAULT,
5061 ondelete='CASCADE',
5062 notnull=False,
5063 unique=False,
5064 uploadfield=True,
5065 widget=None,
5066 label=None,
5067 comment=None,
5068 writable=True,
5069 readable=True,
5070 update=None,
5071 authorize=None,
5072 autodelete=False,
5073 represent=None,
5074 uploadfolder=None,
5075 uploadseparate=False,
5076 compute=None,
5077 custom_store=None,
5078 custom_retrieve=None,
5079 ):
5080 self.db = None
5081 self.op = None
5082 self.first = None
5083 self.second = None
5084 if not isinstance(fieldname,str):
5085 raise SyntaxError, "missing field name"
5086 if fieldname.startswith(':'):
5087 fieldname,readable,writable=fieldname[1:],False,False
5088 elif fieldname.startswith('.'):
5089 fieldname,readable,writable=fieldname[1:],False,False
5090 if '=' in fieldname:
5091 fieldname,default = fieldname.split('=',1)
5092 self.name = fieldname = cleanup(fieldname)
5093 if hasattr(Table,fieldname) or fieldname[0] == '_' or \
5094 regex_python_keywords.match(fieldname):
5095 raise SyntaxError, 'Field: invalid field name: %s' % fieldname
5096 if isinstance(type, Table):
5097 type = 'reference ' + type._tablename
5098 self.type = type
5099 self.length = (length is None) and MAXCHARLENGTH or length
5100 if default==DEFAULT:
5101 self.default = update or None
5102 else:
5103 self.default = default
5104 self.required = required
5105 self.ondelete = ondelete.upper()
5106 self.notnull = notnull
5107 self.unique = unique
5108 self.uploadfield = uploadfield
5109 self.uploadfolder = uploadfolder
5110 self.uploadseparate = uploadseparate
5111 self.widget = widget
5112 self.label = label or ' '.join(item.capitalize() for item in fieldname.split('_'))
5113 self.comment = comment
5114 self.writable = writable
5115 self.readable = readable
5116 self.update = update
5117 self.authorize = authorize
5118 self.autodelete = autodelete
5119 if not represent and type in ('list:integer','list:string'):
5120 represent=lambda x: ', '.join(str(y) for y in x or [])
5121 self.represent = represent
5122 self.compute = compute
5123 self.isattachment = True
5124 self.custom_store = custom_store
5125 self.custom_retrieve = custom_retrieve
5126 if self.label is None:
5127 self.label = ' '.join([x.capitalize() for x in
5128 fieldname.split('_')])
5129 if requires is None:
5130 self.requires = []
5131 else:
5132 self.requires = requires
5133
5134 - def store(self, file, filename=None, path=None):
5135 if self.custom_store:
5136 return self.custom_store(file,filename,path)
5137 if not filename:
5138 filename = file.name
5139 filename = os.path.basename(filename.replace('/', os.sep)\
5140 .replace('\\', os.sep))
5141 m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
5142 extension = m and m.group('e') or 'txt'
5143 uuid_key = web2py_uuid().replace('-', '')[-16:]
5144 encoded_filename = base64.b16encode(filename).lower()
5145 newfilename = '%s.%s.%s.%s' % \
5146 (self._tablename, self.name, uuid_key, encoded_filename)
5147 newfilename = newfilename[:200] + '.' + extension
5148 if isinstance(self.uploadfield,Field):
5149 blob_uploadfield_name = self.uploadfield.uploadfield
5150 keys={self.uploadfield.name: newfilename,
5151 blob_uploadfield_name: file.read()}
5152 self.uploadfield.table.insert(**keys)
5153 elif self.uploadfield == True:
5154 if path:
5155 pass
5156 elif self.uploadfolder:
5157 path = self.uploadfolder
5158 elif self.db._adapter.folder:
5159 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5160 else:
5161 raise RuntimeError, "you must specify a Field(...,uploadfolder=...)"
5162 if self.uploadseparate:
5163 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2])
5164 if not os.path.exists(path):
5165 os.makedirs(path)
5166 pathfilename = os.path.join(path, newfilename)
5167 dest_file = open(pathfilename, 'wb')
5168 shutil.copyfileobj(file, dest_file)
5169 dest_file.close()
5170 return newfilename
5171
5173 if self.custom_retrieve:
5174 return self.custom_retrieve(name, path)
5175 import http
5176 if self.authorize or isinstance(self.uploadfield, str):
5177 row = self.db(self == name).select().first()
5178 if not row:
5179 raise http.HTTP(404)
5180 if self.authorize and not self.authorize(row):
5181 raise http.HTTP(403)
5182 try:
5183 m = regex_content.match(name)
5184 if not m or not self.isattachment:
5185 raise TypeError, 'Can\'t retrieve %s' % name
5186 filename = base64.b16decode(m.group('name'), True)
5187 filename = regex_cleanup_fn.sub('_', filename)
5188 except (TypeError, AttributeError):
5189 filename = name
5190 if isinstance(self.uploadfield, str):
5191 return (filename, cStringIO.StringIO(row[self.uploadfield] or ''))
5192 elif isinstance(self.uploadfield,Field):
5193 blob_uploadfield_name = self.uploadfield.uploadfield
5194 query = self.uploadfield == name
5195 data = self.uploadfield.table(query)[blob_uploadfield_name]
5196 return (filename, cStringIO.StringIO(data))
5197 else:
5198
5199 if path:
5200 pass
5201 elif self.uploadfolder:
5202 path = self.uploadfolder
5203 else:
5204 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5205 if self.uploadseparate:
5206 t = m.group('table')
5207 f = m.group('field')
5208 u = m.group('uuidkey')
5209 path = os.path.join(path,"%s.%s" % (t,f),u[:2])
5210 return (filename, open(os.path.join(path, name), 'rb'))
5211
5226
5238
5241
5244
5246 try:
5247 return '%s.%s' % (self.tablename, self.name)
5248 except:
5249 return '<no table>.%s' % self.name
5250
5251
5253
5254 """
5255 a query object necessary to define a set.
5256 it can be stored or can be passed to DAL.__call__() to obtain a Set
5257
5258 Example::
5259
5260 query = db.users.name=='Max'
5261 set = db(query)
5262 records = set.select()
5263
5264 """
5265
5266 - def __init__(
5267 self,
5268 db,
5269 op,
5270 first=None,
5271 second=None,
5272 ):
5273 self.db = db
5274 self.op = op
5275 self.first = first
5276 self.second = second
5277
5279 return self.db._adapter.expand(self)
5280
5282 return Query(self.db,self.db._adapter.AND,self,other)
5283
5285 return Query(self.db,self.db._adapter.OR,self,other)
5286
5288 if self.op==self.db._adapter.NOT:
5289 return self.first
5290 return Query(self.db,self.db._adapter.NOT,self)
5291
5292
5293 regex_quotes = re.compile("'[^']*'")
5294
5295
5297 if not orderby:
5298 return None
5299 orderby2 = orderby[0]
5300 for item in orderby[1:]:
5301 orderby2 = orderby2 | item
5302 return orderby2
5303
5304
5306
5307 """
5308 a Set represents a set of records in the database,
5309 the records are identified by the query=Query(...) object.
5310 normally the Set is generated by DAL.__call__(Query(...))
5311
5312 given a set, for example
5313 set = db(db.users.name=='Max')
5314 you can:
5315 set.update(db.users.name='Massimo')
5316 set.delete() # all elements in the set
5317 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
5318 and take subsets:
5319 subset = set(db.users.id<5)
5320 """
5321
5323 self.db = db
5324 self._db = db
5325 self.query = query
5326
5328 if isinstance(query,Table):
5329 query = query._id>0
5330 elif isinstance(query,Field):
5331 query = query!=None
5332 if self.query:
5333 return Set(self.db, self.query & query)
5334 else:
5335 return Set(self.db, query)
5336
5337 - def _count(self,distinct=None):
5338 return self.db._adapter._count(self.query,distinct)
5339
5340 - def _select(self, *fields, **attributes):
5341 return self.db._adapter._select(self.query,fields,attributes)
5342
5344 tablename=self.db._adapter.get_table(self.query)
5345 return self.db._adapter._delete(tablename,self.query)
5346
5347 - def _update(self, **update_fields):
5348 tablename = self.db._adapter.get_table(self.query)
5349 fields = self.db[tablename]._listify(update_fields,update=True)
5350 return self.db._adapter._update(tablename,self.query,fields)
5351
5353 return not self.select(limitby=(0,1))
5354
5355 - def count(self,distinct=None):
5356 return self.db._adapter.count(self.query,distinct)
5357
5358 - def select(self, *fields, **attributes):
5359 return self.db._adapter.select(self.query,fields,attributes)
5360
5365
5366 - def update(self, **update_fields):
5367 tablename = self.db._adapter.get_table(self.query)
5368 fields = self.db[tablename]._listify(update_fields,update=True)
5369 if not fields:
5370 raise SyntaxError, "No fields to update"
5371 self.delete_uploaded_files(update_fields)
5372 return self.db._adapter.update(tablename,self.query,fields)
5373
5375 table = self.db[self.db._adapter.tables(self.query)[0]]
5376
5377 if upload_fields:
5378 fields = upload_fields.keys()
5379 else:
5380 fields = table.fields
5381 fields = [f for f in fields if table[f].type == 'upload'
5382 and table[f].uploadfield == True
5383 and table[f].autodelete]
5384 if not fields:
5385 return
5386 for record in self.select(*[table[f] for f in fields]):
5387 for fieldname in fields:
5388 field = table[fieldname]
5389 oldname = record.get(fieldname, None)
5390 if not oldname:
5391 continue
5392 if upload_fields and oldname == upload_fields[fieldname]:
5393 continue
5394 uploadfolder = field.uploadfolder
5395 if not uploadfolder:
5396 uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
5397 if field.uploadseparate:
5398 items = oldname.split('.')
5399 uploadfolder = os.path.join(uploadfolder,
5400 "%s.%s" % (items[0], items[1]),
5401 items[2][:2])
5402 oldpath = os.path.join(uploadfolder, oldname)
5403 if os.path.exists(oldpath):
5404 os.unlink(oldpath)
5405
5406
5408 (colset, table, id) = pack
5409 b = a or dict(colset)
5410 c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
5411 table._db(table._id==id).update(**c)
5412 for (k, v) in c.items():
5413 colset[k] = v
5414
5415
5416 -class Rows(object):
5417
5418 """
5419 A wrapper for the return value of a select. It basically represents a table.
5420 It has an iterator and each row is represented as a dictionary.
5421 """
5422
5423
5424
5425 - def __init__(
5426 self,
5427 db=None,
5428 records=[],
5429 colnames=[],
5430 compact=True,
5431 rawrows=None
5432 ):
5433 self.db = db
5434 self.records = records
5435 self.colnames = colnames
5436 self.compact = compact
5437 self.response = rawrows
5438
5440 if not keyed_virtualfields:
5441 return self
5442 for row in self.records:
5443 for (tablename,virtualfields) in keyed_virtualfields.items():
5444 attributes = dir(virtualfields)
5445 virtualfields.__dict__.update(row)
5446 if not tablename in row:
5447 box = row[tablename] = Row()
5448 else:
5449 box = row[tablename]
5450 for attribute in attributes:
5451 if attribute[0] != '_':
5452 method = getattr(virtualfields,attribute)
5453 if hasattr(method,'im_func') and method.im_func.func_code.co_argcount:
5454 box[attribute]=method()
5455 return self
5456
5458 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
5459 records = self.records+other.records
5460 return Rows(self.db,records,self.colnames)
5461
5463 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects'
5464 records = self.records
5465 records += [record for record in other.records \
5466 if not record in records]
5467 return Rows(self.db,records,self.colnames)
5468
5470 if len(self.records):
5471 return 1
5472 return 0
5473
5475 return len(self.records)
5476
5478 return Rows(self.db,self.records[a:b],self.colnames)
5479
5481 row = self.records[i]
5482 keys = row.keys()
5483 if self.compact and len(keys) == 1 and keys[0] != '_extra':
5484 return row[row.keys()[0]]
5485 return row
5486
5488 """
5489 iterator over records
5490 """
5491
5492 for i in xrange(len(self)):
5493 yield self[i]
5494
5496 """
5497 serializes the table into a csv file
5498 """
5499
5500 s = cStringIO.StringIO()
5501 self.export_to_csv_file(s)
5502 return s.getvalue()
5503
5505 if not self.records:
5506 return None
5507 return self[0]
5508
5510 if not self.records:
5511 return None
5512 return self[-1]
5513
5515 """
5516 returns a new Rows object, a subset of the original object,
5517 filtered by the function f
5518 """
5519 if not self.records:
5520 return Rows(self.db, [], self.colnames)
5521 records = []
5522 for i in range(0,len(self)):
5523 row = self[i]
5524 if f(row):
5525 records.append(self.records[i])
5526 return Rows(self.db, records, self.colnames)
5527
5529 """
5530 removes elements from the calling Rows object, filtered by the function f,
5531 and returns a new Rows object containing the removed elements
5532 """
5533 if not self.records:
5534 return Rows(self.db, [], self.colnames)
5535 removed = []
5536 i=0
5537 while i<len(self):
5538 row = self[i]
5539 if f(row):
5540 removed.append(self.records[i])
5541 del self.records[i]
5542 else:
5543 i += 1
5544 return Rows(self.db, removed, self.colnames)
5545
5546 - def sort(self, f, reverse=False):
5547 """
5548 returns a list of sorted elements (not sorted in place)
5549 """
5550 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
5551
5552 - def as_list(self,
5553 compact=True,
5554 storage_to_dict=True,
5555 datetime_to_str=True):
5556 """
5557 returns the data as a list or dictionary.
5558 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5559 :param datetime_to_str: convert datetime fields as strings (default True)
5560 """
5561 (oc, self.compact) = (self.compact, compact)
5562 if storage_to_dict:
5563 items = [item.as_dict(datetime_to_str) for item in self]
5564 else:
5565 items = [item for item in self]
5566 self.compact = compact
5567 return items
5568
5569
5570 - def as_dict(self,
5571 key='id',
5572 compact=True,
5573 storage_to_dict=True,
5574 datetime_to_str=True):
5575 """
5576 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
5577
5578 :param key: the name of the field to be used as dict key, normally the id
5579 :param compact: ? (default True)
5580 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5581 :param datetime_to_str: convert datetime fields as strings (default True)
5582 """
5583 rows = self.as_list(compact, storage_to_dict, datetime_to_str)
5584 if isinstance(key,str) and key.count('.')==1:
5585 (table, field) = key.split('.')
5586 return dict([(r[table][field],r) for r in rows])
5587 elif isinstance(key,str):
5588 return dict([(r[key],r) for r in rows])
5589 else:
5590 return dict([(key(r),r) for r in rows])
5591
5593 """
5594 export data to csv, the first line contains the column names
5595
5596 :param ofile: where the csv must be exported to
5597 :param null: how null values must be represented (default '<NULL>')
5598 :param delimiter: delimiter to separate values (default ',')
5599 :param quotechar: character to use to quote string values (default '"')
5600 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
5601 :param represent: use the fields .represent value (default False)
5602 :param colnames: list of column names to use (default self.colnames)
5603 This will only work when exporting rows objects!!!!
5604 DO NOT use this with db.export_to_csv()
5605 """
5606 delimiter = kwargs.get('delimiter', ',')
5607 quotechar = kwargs.get('quotechar', '"')
5608 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
5609 represent = kwargs.get('represent', False)
5610 writer = csv.writer(ofile, delimiter=delimiter,
5611 quotechar=quotechar, quoting=quoting)
5612 colnames = kwargs.get('colnames', self.colnames)
5613
5614 writer.writerow(colnames)
5615
5616 def none_exception(value):
5617 """
5618 returns a cleaned up value that can be used for csv export:
5619 - unicode text is encoded as such
5620 - None values are replaced with the given representation (default <NULL>)
5621 """
5622 if value is None:
5623 return null
5624 elif isinstance(value, unicode):
5625 return value.encode('utf8')
5626 elif isinstance(value,Reference):
5627 return int(value)
5628 elif hasattr(value, 'isoformat'):
5629 return value.isoformat()[:19].replace('T', ' ')
5630 elif isinstance(value, (list,tuple)):
5631 return bar_encode(value)
5632 return value
5633
5634 for record in self:
5635 row = []
5636 for col in colnames:
5637 if not table_field.match(col):
5638 row.append(record._extra[col])
5639 else:
5640 (t, f) = col.split('.')
5641 field = self.db[t][f]
5642 if isinstance(record.get(t, None), (Row,dict)):
5643 value = record[t][f]
5644 else:
5645 value = record[f]
5646 if represent and field.represent:
5647 value = field.represent(value)
5648 row.append(none_exception(value))
5649 writer.writerow(row)
5650
5652 """
5653 serializes the table using sqlhtml.SQLTABLE (if present)
5654 """
5655
5656 import sqlhtml
5657 return sqlhtml.SQLTABLE(self).xml()
5658
5659 - def json(self, mode='object', default=None):
5660 """
5661 serializes the table to a JSON list of objects
5662 """
5663 mode = mode.lower()
5664 if not mode in ['object', 'array']:
5665 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode
5666
5667 def inner_loop(record, col):
5668 (t, f) = col.split('.')
5669 res = None
5670 if not table_field.match(col):
5671 res = record._extra[col]
5672 else:
5673 if isinstance(record.get(t, None), Row):
5674 res = record[t][f]
5675 else:
5676 res = record[f]
5677 if mode == 'object':
5678 return (f, res)
5679 else:
5680 return res
5681
5682 if mode == 'object':
5683 items = [dict([inner_loop(record, col) for col in
5684 self.colnames]) for record in self]
5685 else:
5686 items = [[inner_loop(record, col) for col in self.colnames]
5687 for record in self]
5688 if have_serializers:
5689 return serializers.json(items,default=default or serializers.custom_json)
5690 else:
5691 import simplejson
5692 return simplejson.dumps(items)
5693
5695 return cPickle.loads(data)
5696
5698 return Rows_unpickler, \
5699 (cPickle.dumps(data.as_list(storage_to_dict=True,
5700 datetime_to_str=False)),)
5701
5702 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler)
5703
5704
5705
5706
5707
5708
5710 """
5711
5712 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
5713 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
5714 >>> tmp = db.define_table('users',\
5715 Field('stringf', 'string', length=32, required=True),\
5716 Field('booleanf', 'boolean', default=False),\
5717 Field('passwordf', 'password', notnull=True),\
5718 Field('uploadf', 'upload'),\
5719 Field('blobf', 'blob'),\
5720 Field('integerf', 'integer', unique=True),\
5721 Field('doublef', 'double', unique=True,notnull=True),\
5722 Field('datef', 'date', default=datetime.date.today()),\
5723 Field('timef', 'time'),\
5724 Field('datetimef', 'datetime'),\
5725 migrate='test_user.table')
5726
5727 Insert a field
5728
5729 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
5730 uploadf=None, integerf=5, doublef=3.14,\
5731 datef=datetime.date(2001, 1, 1),\
5732 timef=datetime.time(12, 30, 15),\
5733 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
5734 1
5735
5736 Drop the table
5737
5738 >>> db.users.drop()
5739
5740 Examples of insert, select, update, delete
5741
5742 >>> tmp = db.define_table('person',\
5743 Field('name'),\
5744 Field('birth','date'),\
5745 migrate='test_person.table')
5746 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
5747 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
5748
5749 commented len(db().select(db.person.ALL))
5750 commented 2
5751
5752 >>> me = db(db.person.id==person_id).select()[0] # test select
5753 >>> me.name
5754 'Massimo'
5755 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
5756 1
5757 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
5758 1
5759
5760 Update a single record
5761
5762 >>> me.update_record(name=\"Max\")
5763 >>> me.name
5764 'Max'
5765
5766 Examples of complex search conditions
5767
5768 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
5769 1
5770 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
5771 1
5772 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
5773 1
5774 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
5775 >>> me.name
5776 'Max'
5777
5778 Examples of search conditions using extract from date/datetime/time
5779
5780 >>> len(db(db.person.birth.month()==12).select())
5781 1
5782 >>> len(db(db.person.birth.year()>1900).select())
5783 1
5784
5785 Example of usage of NULL
5786
5787 >>> len(db(db.person.birth==None).select()) ### test NULL
5788 0
5789 >>> len(db(db.person.birth!=None).select()) ### test NULL
5790 1
5791
5792 Examples of search conditions using lower, upper, and like
5793
5794 >>> len(db(db.person.name.upper()=='MAX').select())
5795 1
5796 >>> len(db(db.person.name.like('%ax')).select())
5797 1
5798 >>> len(db(db.person.name.upper().like('%AX')).select())
5799 1
5800 >>> len(db(~db.person.name.upper().like('%AX')).select())
5801 0
5802
5803 orderby, groupby and limitby
5804
5805 >>> people = db().select(db.person.name, orderby=db.person.name)
5806 >>> order = db.person.name|~db.person.birth
5807 >>> people = db().select(db.person.name, orderby=order)
5808
5809 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
5810
5811 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
5812
5813 Example of one 2 many relation
5814
5815 >>> tmp = db.define_table('dog',\
5816 Field('name'),\
5817 Field('birth','date'),\
5818 Field('owner',db.person),\
5819 migrate='test_dog.table')
5820 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
5821 1
5822
5823 A simple JOIN
5824
5825 >>> len(db(db.dog.owner==db.person.id).select())
5826 1
5827
5828 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
5829 1
5830
5831 Drop tables
5832
5833 >>> db.dog.drop()
5834 >>> db.person.drop()
5835
5836 Example of many 2 many relation and Set
5837
5838 >>> tmp = db.define_table('author', Field('name'),\
5839 migrate='test_author.table')
5840 >>> tmp = db.define_table('paper', Field('title'),\
5841 migrate='test_paper.table')
5842 >>> tmp = db.define_table('authorship',\
5843 Field('author_id', db.author),\
5844 Field('paper_id', db.paper),\
5845 migrate='test_authorship.table')
5846 >>> aid = db.author.insert(name='Massimo')
5847 >>> pid = db.paper.insert(title='QCD')
5848 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
5849
5850 Define a Set
5851
5852 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
5853 >>> rows = authored_papers.select(db.author.name, db.paper.title)
5854 >>> for row in rows: print row.author.name, row.paper.title
5855 Massimo QCD
5856
5857 Example of search condition using belongs
5858
5859 >>> set = (1, 2, 3)
5860 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
5861 >>> print rows[0].title
5862 QCD
5863
5864 Example of search condition using nested select
5865
5866 >>> nested_select = db()._select(db.authorship.paper_id)
5867 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
5868 >>> print rows[0].title
5869 QCD
5870
5871 Example of expressions
5872
5873 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
5874 >>> db(mynumber.id>0).delete()
5875 0
5876 >>> for i in range(10): tmp = mynumber.insert(x=i)
5877 >>> db(mynumber.id>0).select(mynumber.x.sum())[0](mynumber.x.sum())
5878 45
5879
5880 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
5881 5
5882
5883 Output in csv
5884
5885 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
5886 author.name,paper.title\r
5887 Massimo,QCD
5888
5889 Delete all leftover tables
5890
5891 >>> DAL.distributed_transaction_commit(db)
5892
5893 >>> db.mynumber.drop()
5894 >>> db.authorship.drop()
5895 >>> db.author.drop()
5896 >>> db.paper.drop()
5897 """
5898
5899
5900
5901
5902 SQLField = Field
5903 SQLTable = Table
5904 SQLXorable = Expression
5905 SQLQuery = Query
5906 SQLSet = Set
5907 SQLRows = Rows
5908 SQLStorage = Row
5909 SQLDB = DAL
5910 GQLDB = DAL
5911 DAL.Field = Field
5912 DAL.Table = Table
5913
5914
5915
5916
5917
5918 if __name__ == '__main__':
5919 import doctest
5920 doctest.testmod()
5921