1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite
21 - MySQL
22 - Postgres
23 - Oracle
24 - MS SQL
25 - DB2
26 - Interbase
27 - Ingres
28 - SapDB (experimental)
29 - Cubrid (experimental)
30 - CouchDB (experimental)
31 - MongoDB (in progress)
32 - Google:nosql
33 - Google:sql
34
35 Example of usage:
36
37 >>> # from dal import DAL, Field
38
39 ### create DAL connection (and create DB if not exists)
40 >>> db=DAL(('mysql://a:b@locahost/x','sqlite://storage.sqlite'),folder=None)
41
42 ### define a table 'person' (create/aster as necessary)
43 >>> person = db.define_table('person',Field('name','string'))
44
45 ### insert a record
46 >>> id = person.insert(name='James')
47
48 ### retrieve it by id
49 >>> james = person(id)
50
51 ### retrieve it by name
52 >>> james = person(name='James')
53
54 ### retrieve it by arbitrary query
55 >>> query = (person.name=='James')&(person.name.startswith('J'))
56 >>> james = db(query).select(person.ALL)[0]
57
58 ### update one record
59 >>> james.update_record(name='Jim')
60
61 ### update multiple records by query
62 >>> db(person.name.like('J%')).update(name='James')
63 1
64
65 ### delete records by query
66 >>> db(person.name.lower()=='jim').delete()
67 0
68
69 ### retrieve multiple records (rows)
70 >>> people = db(person).select(orderby=person.name,groupby=person.name,limitby=(0,100))
71
72 ### further filter them
73 >>> james = people.find(lambda row: row.name=='James').first()
74 >>> print james.id, james.name
75 1 James
76
77 ### check aggrgates
78 >>> counter = person.id.count()
79 >>> print db(person).select(counter).first()(counter)
80 1
81
82 ### delete one record
83 >>> james.delete_record()
84 1
85
86 ### delete (drop) entire database table
87 >>> person.drop()
88
89 Supported field types:
90 id string text boolean integer double decimal password upload blob time date datetime,
91
92 Supported DAL URI strings:
93 'sqlite://test.db'
94 'sqlite:memory'
95 'jdbc:sqlite://test.db'
96 'mysql://root:none@localhost/test'
97 'postgres://mdipierro:none@localhost/test'
98 'jdbc:postgres://mdipierro:none@localhost/test'
99 'mssql://web2py:none@A64X2/web2py_test'
100 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
101 'oracle://username:password@database'
102 'firebird://user:password@server:3050/database'
103 'db2://DSN=dsn;UID=user;PWD=pass'
104 'firebird://username:password@hostname/database'
105 'firebird_embedded://username:password@c://path'
106 'informix://user:password@server:3050/database'
107 'informixu://user:password@server:3050/database' # unicode informix
108 'google:datastore' # for google app engine datastore
109 'google:sql' # for google app engine with sql (mysql compatible)
110
111 For more info:
112 help(DAL)
113 help(Field)
114 """
115
116
117
118
119
120 __all__ = ['DAL', 'Field']
121 MAXCHARLENGTH = 512
122 INFINITY = 32768
123
124 import re
125 import sys
126 import locale
127 import os
128 import types
129 import cPickle
130 import datetime
131 import threading
132 import time
133 import cStringIO
134 import csv
135 import copy
136 import socket
137 import logging
138 import copy_reg
139 import base64
140 import shutil
141 import marshal
142 import decimal
143 import struct
144 import urllib
145 import hashlib
146 import uuid
147 import glob
148
149 CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType,
150 types.MethodType, types.BuiltinMethodType)
151
152
153
154
155
156 try:
157 from utils import web2py_uuid
158 except ImportError:
159 import uuid
161
162 try:
163 import portalocker
164 have_portalocker = True
165 except ImportError:
166 have_portalocker = False
167
168 try:
169 import serializers
170 have_serializers = True
171 except ImportError:
172 have_serializers = False
173
174 try:
175 import validators
176 have_validators = True
177 except ImportError:
178 have_validators = False
179
180 logger = logging.getLogger("web2py.dal")
181 DEFAULT = lambda:0
182
183 sql_locker = threading.RLock()
184 thread = threading.local()
185
186
187
188
189 regex_dbname = re.compile('^(\w+)(\:\w+)*')
190 table_field = re.compile('^[\w_]+\.[\w_]+$')
191 regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
192 regex_cleanup_fn = re.compile('[\'"\s;]+')
193 string_unpack=re.compile('(?<!\|)\|(?!\|)')
194 regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
195
196
197
198
199
200 drivers = []
201
202 try:
203 from pysqlite2 import dbapi2 as sqlite3
204 drivers.append('pysqlite2')
205 except ImportError:
206 try:
207 from sqlite3 import dbapi2 as sqlite3
208 drivers.append('SQLite3')
209 except ImportError:
210 logger.debug('no sqlite3 or pysqlite2.dbapi2 driver')
211
212 try:
213 import contrib.pymysql as pymysql
214 drivers.append('pymysql')
215 except ImportError:
216 logger.debug('no pymysql driver')
217
218 try:
219 import psycopg2
220 drivers.append('PostgreSQL')
221 except ImportError:
222 logger.debug('no psycopg2 driver')
223
224 try:
225 import cx_Oracle
226 drivers.append('Oracle')
227 except ImportError:
228 logger.debug('no cx_Oracle driver')
229
230 try:
231 import pyodbc
232 drivers.append('MSSQL/DB2')
233 except ImportError:
234 logger.debug('no MSSQL/DB2 driver')
235
236 try:
237 import kinterbasdb
238 drivers.append('Interbase')
239 except ImportError:
240 logger.debug('no kinterbasdb driver')
241
242 try:
243 import firebirdsql
244 drivers.append('Firebird')
245 except ImportError:
246 logger.debug('no Firebird driver')
247
248 try:
249 import informixdb
250 drivers.append('Informix')
251 logger.warning('Informix support is experimental')
252 except ImportError:
253 logger.debug('no informixdb driver')
254
255 try:
256 import sapdb
257 drivers.append('SAPDB')
258 logger.warning('SAPDB support is experimental')
259 except ImportError:
260 logger.debug('no sapdb driver')
261
262 try:
263 import cubriddb
264 drivers.append('Cubrid')
265 logger.warning('Cubrid support is experimental')
266 except ImportError:
267 logger.debug('no cubriddb driver')
268
269 try:
270 from com.ziclix.python.sql import zxJDBC
271 import java.sql
272 from org.sqlite import JDBC
273 drivers.append('zxJDBC')
274 logger.warning('zxJDBC support is experimental')
275 is_jdbc = True
276 except ImportError:
277 logger.debug('no zxJDBC driver')
278 is_jdbc = False
279
280 try:
281 import ingresdbi
282 drivers.append('Ingres')
283 except ImportError:
284 logger.debug('no Ingres driver')
285
286
287 try:
288 from new import classobj
289 from google.appengine.ext import db as gae
290 from google.appengine.api import namespace_manager, rdbms
291 from google.appengine.api.datastore_types import Key
292 from google.appengine.ext.db.polymodel import PolyModel
293
294 drivers.append('google')
295
297 """
298 GAE decimal implementation
299 """
300 data_type = decimal.Decimal
301
302 - def __init__(self, precision, scale, **kwargs):
303 super(GAEDecimalProperty, self).__init__(self, **kwargs)
304 d = '1.'
305 for x in range(scale):
306 d += '0'
307 self.round = decimal.Decimal(d)
308
315
317 if value:
318 return decimal.Decimal(value).quantize(self.round)
319 else:
320 return None
321
323 value = super(GAEDecimalProperty, self).validate(value)
324 if value is None or isinstance(value, decimal.Decimal):
325 return value
326 elif isinstance(value, basestring):
327 return decimal.Decimal(value)
328 raise gae.BadValueError("Property %s must be a Decimal or string." % self.name)
329
330 except ImportError:
331 pass
332
333
334
335
336
338
339 pools = {}
340
341 @staticmethod
344
345
346
347 @staticmethod
349 """ to close cleanly databases in a multithreaded environment """
350 if not hasattr(thread,'instances'):
351 return
352 while thread.instances:
353 instance = thread.instances.pop()
354 getattr(instance,action)()
355
356 really = True
357 if instance.pool_size:
358 sql_locker.acquire()
359 pool = ConnectionPool.pools[instance.uri]
360 if len(pool) < instance.pool_size:
361 pool.append(instance.connection)
362 really = False
363 sql_locker.release()
364 if really:
365 getattr(instance,'close')()
366 return
367
369 """ this actually does not make the folder. it has to be there """
370 if hasattr(thread,'folder'):
371 self.folder = thread.folder
372 else:
373 self.folder = thread.folder = ''
374
375
376 if False and self.folder and not os.path.exists(self.folder):
377 os.mkdir(self.folder)
378
396
397
398
399
400
401
403
404 maxcharlength = INFINITY
405 commit_on_alter_table = False
406 support_distributed_transaction = False
407 uploads_in_blob = False
408 types = {
409 'boolean': 'CHAR(1)',
410 'string': 'CHAR(%(length)s)',
411 'text': 'TEXT',
412 'password': 'CHAR(%(length)s)',
413 'blob': 'BLOB',
414 'upload': 'CHAR(%(length)s)',
415 'integer': 'INTEGER',
416 'double': 'DOUBLE',
417 'decimal': 'DOUBLE',
418 'date': 'DATE',
419 'time': 'TIME',
420 'datetime': 'TIMESTAMP',
421 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
422 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
423 'list:integer': 'TEXT',
424 'list:string': 'TEXT',
425 'list:reference': 'TEXT',
426 }
427
429 """
430 to be used ONLY for files that on GAE may not be on filesystem
431 """
432 return os.path.exists(filename)
433
434 - def file_open(self, filename, mode='rb', lock=True):
448
457
460
461 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
462 credential_decoder=lambda x:x, driver_args={},
463 adapter_args={}):
464 self.db = db
465 self.dbengine = "None"
466 self.uri = uri
467 self.pool_size = pool_size
468 self.folder = folder
469 self.db_codec = db_codec
470 class Dummy(object):
471 lastrowid = 1
472 def __getattr__(self, value):
473 return lambda *a, **b: []
474 self.connection = Dummy()
475 self.cursor = Dummy()
476
478 return '%s_sequence' % tablename
479
481 return '%s_sequence' % tablename
482
483
484 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
485 fields = []
486 sql_fields = {}
487 sql_fields_aux = {}
488 TFK = {}
489 tablename = table._tablename
490 sortable = 0
491 for field in table:
492 sortable += 1
493 k = field.name
494 if isinstance(field.type,SQLCustomType):
495 ftype = field.type.native or field.type.type
496 elif field.type.startswith('reference'):
497 referenced = field.type[10:].strip()
498 constraint_name = self.constraint_name(tablename, field.name)
499 if hasattr(table,'_primarykey'):
500 rtablename,rfieldname = referenced.split('.')
501 rtable = table._db[rtablename]
502 rfield = rtable[rfieldname]
503
504 if rfieldname in rtable._primarykey or rfield.unique:
505 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
506
507 if not rfield.unique and len(rtable._primarykey)>1 :
508
509 if rtablename not in TFK:
510 TFK[rtablename] = {}
511 TFK[rtablename][rfieldname] = field.name
512 else:
513 ftype = ftype + \
514 self.types['reference FK'] %dict(\
515 constraint_name=constraint_name,
516 table_name=tablename,
517 field_name=field.name,
518 foreign_key='%s (%s)'%(rtablename, rfieldname),
519 on_delete_action=field.ondelete)
520 else:
521
522 id_fieldname = referenced in table._db and table._db[referenced]._id.name or 'id'
523 ftype = self.types[field.type[:9]]\
524 % dict(table_name=tablename,
525 field_name=field.name,
526 constraint_name=constraint_name,
527 foreign_key=referenced + ('(%s)' % id_fieldname),
528 on_delete_action=field.ondelete)
529 elif field.type.startswith('list:reference'):
530 ftype = self.types[field.type[:14]]
531 elif field.type.startswith('decimal'):
532 precision, scale = [int(x) for x in field.type[8:-1].split(',')]
533 ftype = self.types[field.type[:7]] % \
534 dict(precision=precision,scale=scale)
535 elif not field.type in self.types:
536 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
537 (field.type, field.name)
538 else:
539 ftype = self.types[field.type]\
540 % dict(length=field.length)
541 if not field.type.startswith('id') and not field.type.startswith('reference'):
542 if field.notnull:
543 ftype += ' NOT NULL'
544 else:
545 ftype += self.ALLOW_NULL()
546 if field.unique:
547 ftype += ' UNIQUE'
548
549
550 sql_fields[field.name] = dict(sortable=sortable,
551 type=str(field.type),
552 sql=ftype)
553
554 if isinstance(field.default,(str,int,float)):
555
556
557
558
559
560 not_null = self.NOT_NULL(field.default,field.type)
561 ftype = ftype.replace('NOT NULL',not_null)
562 sql_fields_aux[field.name] = dict(sql=ftype)
563
564 fields.append('%s %s' % (field.name, ftype))
565 other = ';'
566
567
568 if self.dbengine == 'mysql':
569 if not hasattr(table, "_primarykey"):
570 fields.append('PRIMARY KEY(%s)' % table._id.name)
571 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
572
573 fields = ',\n '.join(fields)
574 for rtablename in TFK:
575 rfields = TFK[rtablename]
576 pkeys = table._db[rtablename]._primarykey
577 fkeys = [ rfields[k] for k in pkeys ]
578 fields = fields + ',\n ' + \
579 self.types['reference TFK'] %\
580 dict(table_name=tablename,
581 field_name=', '.join(fkeys),
582 foreign_table=rtablename,
583 foreign_key=', '.join(pkeys),
584 on_delete_action=field.ondelete)
585
586 if hasattr(table,'_primarykey'):
587 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
588 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
589 else:
590 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
591 (tablename, fields, other)
592
593 if self.uri.startswith('sqlite:///'):
594 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
595 dbpath = self.uri[9:self.uri.rfind('/')].decode('utf8').encode(path_encoding)
596 else:
597 dbpath = self.folder
598
599 if not migrate:
600 return query
601 elif self.uri.startswith('sqlite:memory'):
602 table._dbt = None
603 elif isinstance(migrate, str):
604 table._dbt = os.path.join(dbpath, migrate)
605 else:
606 table._dbt = os.path.join(dbpath, '%s_%s.table' \
607 % (table._db._uri_hash, tablename))
608 if table._dbt:
609 table._loggername = os.path.join(dbpath, 'sql.log')
610 logfile = self.file_open(table._loggername, 'a')
611 else:
612 logfile = None
613 if not table._dbt or not self.file_exists(table._dbt):
614 if table._dbt:
615 logfile.write('timestamp: %s\n'
616 % datetime.datetime.today().isoformat())
617 logfile.write(query + '\n')
618 if not fake_migrate:
619 self.create_sequence_and_triggers(query,table)
620 table._db.commit()
621 if table._dbt:
622 tfile = self.file_open(table._dbt, 'w')
623 cPickle.dump(sql_fields, tfile)
624 self.file_close(tfile)
625 if fake_migrate:
626 logfile.write('faked!\n')
627 else:
628 logfile.write('success!\n')
629 else:
630 tfile = self.file_open(table._dbt, 'r')
631 try:
632 sql_fields_old = cPickle.load(tfile)
633 except EOFError:
634 self.file_close(tfile)
635 self.file_close(logfile)
636 raise RuntimeError, 'File %s appears corrupted' % table._dbt
637 self.file_close(tfile)
638 if sql_fields != sql_fields_old:
639 self.migrate_table(table,
640 sql_fields, sql_fields_old,
641 sql_fields_aux, logfile,
642 fake_migrate=fake_migrate)
643 self.file_close(logfile)
644 return query
645
646 - def migrate_table(
647 self,
648 table,
649 sql_fields,
650 sql_fields_old,
651 sql_fields_aux,
652 logfile,
653 fake_migrate=False,
654 ):
655 tablename = table._tablename
656 def fix(item):
657 k,v=item
658 if not isinstance(v,dict):
659 v=dict(type='unkown',sql=v)
660 return k.lower(),v
661
662 sql_fields = dict(fix(v) for v in sql_fields.items())
663 sql_fields_old = dict(fix(v) for v in sql_fields_old.items())
664 sql_fields_aux = dict(fix(v) for v in sql_fields_aux.items())
665
666 keys = sql_fields.keys()
667 for key in sql_fields_old:
668 if not key in keys:
669 keys.append(key)
670 if self.dbengine == 'mssql':
671 new_add = '; ALTER TABLE %s ADD ' % tablename
672 else:
673 new_add = ', ADD '
674
675 metadata_change = False
676 sql_fields_current = copy.copy(sql_fields_old)
677 for key in keys:
678 query = None
679 if not key in sql_fields_old:
680 sql_fields_current[key] = sql_fields[key]
681 query = ['ALTER TABLE %s ADD %s %s;' % \
682 (tablename, key,
683 sql_fields_aux[key]['sql'].replace(', ', new_add))]
684 metadata_change = True
685 elif self.dbengine == 'sqlite':
686 if key in sql_fields:
687 sql_fields_current[key] = sql_fields[key]
688 metadata_change = True
689 elif not key in sql_fields:
690 del sql_fields_current[key]
691 if not self.dbengine in ('firebird',):
692 query = ['ALTER TABLE %s DROP COLUMN %s;' % (tablename, key)]
693 else:
694 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
695 metadata_change = True
696 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
697 and not isinstance(table[key].type, SQLCustomType) \
698 and not (table[key].type.startswith('reference') and \
699 sql_fields[key]['sql'].startswith('INT,') and \
700 sql_fields_old[key]['sql'].startswith('INT NOT NULL,')):
701 sql_fields_current[key] = sql_fields[key]
702 t = tablename
703 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
704 if not self.dbengine in ('firebird',):
705 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
706 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
707 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
708 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
709 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
710 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
711 else:
712 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
713 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
714 'ALTER TABLE %s DROP %s;' % (t, key),
715 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
716 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
717 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
718 metadata_change = True
719 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
720 sql_fields_current[key] = sql_fields[key]
721 metadata_change = True
722
723 if query:
724 logfile.write('timestamp: %s\n'
725 % datetime.datetime.today().isoformat())
726 table._db['_lastsql'] = '\n'.join(query)
727 for sub_query in query:
728 logfile.write(sub_query + '\n')
729 if not fake_migrate:
730 self.execute(sub_query)
731
732
733
734 if table._db._adapter.commit_on_alter_table:
735 table._db.commit()
736 tfile = self.file_open(table._dbt, 'w')
737 cPickle.dump(sql_fields_current, tfile)
738 self.file_close(tfile)
739 logfile.write('success!\n')
740 else:
741 logfile.write('faked!\n')
742 elif metadata_change:
743 tfile = self.file_open(table._dbt, 'w')
744 cPickle.dump(sql_fields_current, tfile)
745 self.file_close(tfile)
746
747 if metadata_change and \
748 not (query and self.dbengine in ('mysql','oracle','firebird')):
749 table._db.commit()
750 tfile = self.file_open(table._dbt, 'w')
751 cPickle.dump(sql_fields_current, tfile)
752 self.file_close(tfile)
753
756
759
761 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
762
765
768
771
774
776 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
777
780
783
785 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
786
788 return 'PRIMARY KEY(%s)' % key
789
790 - def _drop(self,table,mode):
791 return ['DROP TABLE %s;' % table]
792
793 - def drop(self, table, mode=''):
794 if table._dbt:
795 logfile = self.file_open(table._loggername, 'a')
796 queries = self._drop(table, mode)
797 for query in queries:
798 if table._dbt:
799 logfile.write(query + '\n')
800 self.execute(query)
801 table._db.commit()
802 del table._db[table._tablename]
803 del table._db.tables[table._db.tables.index(table._tablename)]
804 table._db._update_referenced_by(table._tablename)
805 if table._dbt:
806 self.file_delete(table._dbt)
807 logfile.write('success!\n')
808
810 keys = ','.join(f.name for f,v in fields)
811 values = ','.join(self.expand(v,f.type) for f,v in fields)
812 return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
813
814 - def insert(self,table,fields):
815 query = self._insert(table,fields)
816 try:
817 self.execute(query)
818 except Exception, e:
819 if isinstance(e,self.integrity_error_class()):
820 return None
821 raise e
822 if hasattr(table,'_primarykey'):
823 return dict([(k[0].name, k[1]) for k in fields \
824 if k[0].name in table._primarykey])
825 id = self.lastrowid(table)
826 if not isinstance(id,int):
827 return id
828 rid = Reference(id)
829 (rid._table, rid._record) = (table, None)
830 return rid
831
834
835 - def NOT(self,first):
837
838 - def AND(self,first,second):
840
841 - def OR(self,first,second):
843
845 if isinstance(second,str):
846 return '(%s IN (%s))' % (self.expand(first),second[:-1])
847 return '(%s IN (%s))' % (self.expand(first),
848 ','.join(self.expand(item,first.type) for item in second))
849
850 - def LIKE(self,first,second):
852
855
858
860 if first.type in ('string','text'):
861 key = '%'+str(second).replace('%','%%')+'%'
862 elif first.type.startswith('list:'):
863 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
864 return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
865
866 - def EQ(self,first,second=None):
870
871 - def NE(self,first,second=None):
875
876 - def LT(self,first,second=None):
878
879 - def LE(self,first,second=None):
881
882 - def GT(self,first,second=None):
884
885 - def GE(self,first,second=None):
887
888 - def ADD(self,first,second):
890
891 - def SUB(self,first,second):
893
894 - def MUL(self,first,second):
896
897 - def DIV(self,first,second):
899
900 - def MOD(self,first,second):
902
903 - def AS(self,first,second):
905
906 - def ON(self,first,second):
908
911
912 - def COMMA(self,first,second):
914
915 - def expand(self,expression,field_type=None):
916 if isinstance(expression,Field):
917 return str(expression)
918 elif isinstance(expression, (Expression, Query)):
919 if not expression.second is None:
920 return expression.op(expression.first, expression.second)
921 elif not expression.first is None:
922 return expression.op(expression.first)
923 else:
924 return expression.op()
925 elif field_type:
926 return self.represent(expression,field_type)
927 elif isinstance(expression,(list,tuple)):
928 return ','.join([self.represent(item,field_type) for item in expression])
929 else:
930 return str(expression)
931
932 - def alias(self,table,alias):
933 """
934 given a table object, makes a new table object
935 with alias name.
936 """
937 other = copy.copy(table)
938 other['_ot'] = other._tablename
939 other['ALL'] = SQLALL(other)
940 other['_tablename'] = alias
941 for fieldname in other.fields:
942 other[fieldname] = copy.copy(other[fieldname])
943 other[fieldname]._tablename = alias
944 other[fieldname].tablename = alias
945 other[fieldname].table = other
946 table._db[alias] = table
947 return other
948
950 tablename = table._tablename
951 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
952
954
955 if table._dbt:
956 logfile = self.file_open(table._loggername, 'a')
957 else:
958 class Logfile(object):
959 def write(self, value):
960 pass
961 def close(self):
962 pass
963 logfile = Logfile()
964
965 try:
966 queries = table._db._adapter._truncate(table, mode)
967 for query in queries:
968 logfile.write(query + '\n')
969 self.execute(query)
970 table._db.commit()
971 logfile.write('success!\n')
972 finally:
973 logfile.close()
974
975 - def _update(self,tablename,query,fields):
976 if query:
977 sql_w = ' WHERE ' + self.expand(query)
978 else:
979 sql_w = ''
980 sql_v = ','.join(['%s=%s' % (field.name, self.expand(value,field.type)) for (field,value) in fields])
981 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
982
983 - def update(self,tablename,query,fields):
984 sql = self._update(tablename,query,fields)
985 self.execute(sql)
986 try:
987 return self.cursor.rowcount
988 except:
989 return None
990
991 - def _delete(self,tablename, query):
992 if query:
993 sql_w = ' WHERE ' + self.expand(query)
994 else:
995 sql_w = ''
996 return 'DELETE FROM %s%s;' % (tablename, sql_w)
997
998 - def delete(self,tablename,query):
999 sql = self._delete(tablename,query)
1000
1001 db = self.db
1002 table = db[tablename]
1003 if self.dbengine=='sqlite' and table._referenced_by:
1004 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1005
1006 self.execute(sql)
1007 try:
1008 counter = self.cursor.rowcount
1009 except:
1010 counter = None
1011
1012 if self.dbengine=='sqlite' and counter:
1013 for tablename,fieldname in table._referenced_by:
1014 f = db[tablename][fieldname]
1015 if f.type=='reference '+table._tablename and f.ondelete=='CASCADE':
1016 db(db[tablename][fieldname].belongs(deleted)).delete()
1017
1018 return counter
1019
1021 tablenames = self.tables(query)
1022 if len(tablenames)==1:
1023 return tablenames[0]
1024 elif len(tablenames)<1:
1025 raise RuntimeError, "No table selected"
1026 else:
1027 raise RuntimeError, "Too many tables selected"
1028
1029 - def _select(self, query, fields, attributes):
1030 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
1031 'required','cache','left',
1032 'distinct','having', 'join')):
1033 raise SyntaxError, 'invalid select attribute: %s' % key
1034
1035 new_fields = []
1036 for item in fields:
1037 if isinstance(item,SQLALL):
1038 new_fields += item.table
1039 else:
1040 new_fields.append(item)
1041 fields = new_fields
1042 tablenames = self.tables(query)
1043 query = self.filter_tenant(query,tablenames)
1044 if not fields:
1045 for table in tablenames:
1046 for field in self.db[table]:
1047 fields.append(field)
1048 else:
1049 for field in fields:
1050 if isinstance(field,basestring) and table_field.match(field):
1051 tn,fn = field.split('.')
1052 field = self.db[tn][fn]
1053 for tablename in self.tables(field):
1054 if not tablename in tablenames:
1055 tablenames.append(tablename)
1056 if len(tablenames) < 1:
1057 raise SyntaxError, 'Set: no tables selected'
1058 sql_f = ', '.join([self.expand(f) for f in fields])
1059 self._colnames = [c.strip() for c in sql_f.split(', ')]
1060 if query:
1061 sql_w = ' WHERE ' + self.expand(query)
1062 else:
1063 sql_w = ''
1064 sql_o = ''
1065 sql_s = ''
1066 left = attributes.get('left', False)
1067 inner_join = attributes.get('join', False)
1068 distinct = attributes.get('distinct', False)
1069 groupby = attributes.get('groupby', False)
1070 orderby = attributes.get('orderby', False)
1071 having = attributes.get('having', False)
1072 limitby = attributes.get('limitby', False)
1073 if distinct is True:
1074 sql_s += 'DISTINCT'
1075 elif distinct:
1076 sql_s += 'DISTINCT ON (%s)' % distinct
1077 if inner_join:
1078 icommand = self.JOIN()
1079 if not isinstance(inner_join, (tuple, list)):
1080 inner_join = [inner_join]
1081 ijoint = [t._tablename for t in inner_join if not isinstance(t,Expression)]
1082 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1083 ijoinont = [t.first._tablename for t in ijoinon]
1084 iexcluded = [t for t in tablenames if not t in ijoint + ijoinont]
1085 if left:
1086 join = attributes['left']
1087 command = self.LEFT_JOIN()
1088 if not isinstance(join, (tuple, list)):
1089 join = [join]
1090 joint = [t._tablename for t in join if not isinstance(t,Expression)]
1091 joinon = [t for t in join if isinstance(t, Expression)]
1092
1093 tables_to_merge={}
1094 [tables_to_merge.update(dict.fromkeys(self.tables(t))) for t in joinon]
1095 joinont = [t.first._tablename for t in joinon]
1096 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1097 important_tablenames = joint + joinont + tables_to_merge.keys()
1098 excluded = [t for t in tablenames if not t in important_tablenames ]
1099 if inner_join and not left:
1100 sql_t = ', '.join(iexcluded)
1101 for t in ijoinon:
1102 sql_t += ' %s %s' % (icommand, str(t))
1103 elif not inner_join and left:
1104 sql_t = ', '.join([ t for t in excluded + tables_to_merge.keys()])
1105 if joint:
1106 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1107 for t in joinon:
1108 sql_t += ' %s %s' % (command, str(t))
1109 elif inner_join and left:
1110 sql_t = ','.join([ t for t in excluded + tables_to_merge.keys() if t in iexcluded ])
1111 for t in ijoinon:
1112 sql_t += ' %s %s' % (icommand, str(t))
1113 if joint:
1114 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1115 for t in joinon:
1116 sql_t += ' %s %s' % (command, str(t))
1117 else:
1118 sql_t = ', '.join(tablenames)
1119 if groupby:
1120 if isinstance(groupby, (list, tuple)):
1121 groupby = xorify(groupby)
1122 sql_o += ' GROUP BY %s' % self.expand(groupby)
1123 if having:
1124 sql_o += ' HAVING %s' % attributes['having']
1125 if orderby:
1126 if isinstance(orderby, (list, tuple)):
1127 orderby = xorify(orderby)
1128 if str(orderby) == '<random>':
1129 sql_o += ' ORDER BY %s' % self.RANDOM()
1130 else:
1131 sql_o += ' ORDER BY %s' % self.expand(orderby)
1132 if limitby:
1133 if not orderby and tablenames:
1134 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in ((hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey) or [self.db[t]._id.name])])
1135
1136 return self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1137
1138 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1139 if limitby:
1140 (lmin, lmax) = limitby
1141 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1142 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1143
1144 - def select(self,query,fields,attributes):
1145 """
1146 Always returns a Rows object, even if it may be empty
1147 """
1148 def response(sql):
1149 self.execute(sql)
1150 return self.cursor.fetchall()
1151 sql = self._select(query,fields,attributes)
1152 if attributes.get('cache', None):
1153 (cache_model, time_expire) = attributes['cache']
1154 del attributes['cache']
1155 key = self.uri + '/' + sql
1156 key = (key<=200) and key or hashlib.md5(key).hexdigest()
1157 rows = cache_model(key, lambda: response(sql), time_expire)
1158 else:
1159 rows = response(sql)
1160 if isinstance(rows,tuple):
1161 rows = list(rows)
1162 limitby = attributes.get('limitby',None) or (0,)
1163 rows = self.rowslice(rows,limitby[0],None)
1164 return self.parse(rows,self._colnames)
1165
1166 - def _count(self,query,distinct=None):
1167 tablenames = self.tables(query)
1168 if query:
1169 sql_w = ' WHERE ' + self.expand(query)
1170 else:
1171 sql_w = ''
1172 sql_t = ','.join(tablenames)
1173 if distinct:
1174 if isinstance(distinct,(list,tuple)):
1175 distinct = xorify(distinct)
1176 sql_d = self.expand(distinct)
1177 return 'SELECT count(DISTINCT %s) FROM %s%s' % (sql_d, sql_t, sql_w)
1178 return 'SELECT count(*) FROM %s%s' % (sql_t, sql_w)
1179
1180 - def count(self,query,distinct=None):
1181 self.execute(self._count(query,distinct))
1182 return self.cursor.fetchone()[0]
1183
1184
1195
1197 return self.connection.commit()
1198
1201
1203 return self.connection.close()
1204
1207
1210
1213
1216
1219
1221 return '%s_%s__constraint' % (table,fieldname)
1222
1225
1227 self.db._lastsql = a[0]
1228 t0 = time.time()
1229 ret = self.cursor.execute(*a,**b)
1230 self.db._timings.append((a[0],time.time()-t0))
1231 return ret
1232
1235
1237 if isinstance(obj,CALLABLETYPES):
1238 obj = obj()
1239 if isinstance(fieldtype, SQLCustomType):
1240 return fieldtype.encoder(obj)
1241 if isinstance(obj, (Expression, Field)):
1242 return str(obj)
1243 if fieldtype.startswith('list:'):
1244 if not obj:
1245 obj = []
1246 if not isinstance(obj, (list, tuple)):
1247 obj = [obj]
1248 if isinstance(obj, (list, tuple)):
1249 obj = bar_encode(obj)
1250 if obj is None:
1251 return 'NULL'
1252 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
1253 return 'NULL'
1254 r = self.represent_exceptions(obj,fieldtype)
1255 if r != None:
1256 return r
1257 if fieldtype == 'boolean':
1258 if obj and not str(obj)[:1].upper() in ['F', '0']:
1259 return "'T'"
1260 else:
1261 return "'F'"
1262 if fieldtype == 'id' or fieldtype == 'integer':
1263 return str(int(obj))
1264 if fieldtype.startswith('decimal'):
1265 return str(obj)
1266 elif fieldtype.startswith('reference'):
1267 if fieldtype.find('.')>0:
1268 return repr(obj)
1269 elif isinstance(obj, (Row, Reference)):
1270 return str(obj['id'])
1271 return str(int(obj))
1272 elif fieldtype == 'double':
1273 return repr(float(obj))
1274 if isinstance(obj, unicode):
1275 obj = obj.encode(self.db_codec)
1276 if fieldtype == 'blob':
1277 obj = base64.b64encode(str(obj))
1278 elif fieldtype == 'date':
1279 if isinstance(obj, (datetime.date, datetime.datetime)):
1280 obj = obj.isoformat()[:10]
1281 else:
1282 obj = str(obj)
1283 elif fieldtype == 'datetime':
1284 if isinstance(obj, datetime.datetime):
1285 obj = obj.isoformat()[:19].replace('T',' ')
1286 elif isinstance(obj, datetime.date):
1287 obj = obj.isoformat()[:10]+' 00:00:00'
1288 else:
1289 obj = str(obj)
1290 elif fieldtype == 'time':
1291 if isinstance(obj, datetime.time):
1292 obj = obj.isoformat()[:10]
1293 else:
1294 obj = str(obj)
1295 if not isinstance(obj,str):
1296 obj = str(obj)
1297 try:
1298 obj.decode(self.db_codec)
1299 except:
1300 obj = obj.decode('latin1').encode(self.db_codec)
1301 return "'%s'" % obj.replace("'", "''")
1302
1305
1308
1311
1312 - def rowslice(self,rows,minimum=0,maximum=None):
1313 """ by default this function does nothing, overload when db does not do slicing """
1314 return rows
1315
1316 - def parse(self, rows, colnames, blob_decode=True):
1317 db = self.db
1318 virtualtables = []
1319 new_rows = []
1320 for (i,row) in enumerate(rows):
1321 new_row = Row()
1322 for j,colname in enumerate(colnames):
1323 value = row[j]
1324 if not table_field.match(colnames[j]):
1325 if not '_extra' in new_row:
1326 new_row['_extra'] = Row()
1327 new_row['_extra'][colnames[j]] = value
1328 select_as_parser = re.compile("\s+AS\s+(\S+)")
1329 new_column_name = select_as_parser.search(colnames[j])
1330 if not new_column_name is None:
1331 column_name = new_column_name.groups(0)
1332 setattr(new_row,column_name[0],value)
1333 continue
1334 (tablename, fieldname) = colname.split('.')
1335 table = db[tablename]
1336 field = table[fieldname]
1337 field_type = field.type
1338 if field.type != 'blob' and isinstance(value, str):
1339 try:
1340 value = value.decode(db._db_codec)
1341 except Exception:
1342 pass
1343 if isinstance(value, unicode):
1344 value = value.encode('utf-8')
1345 if not tablename in new_row:
1346 colset = new_row[tablename] = Row()
1347 virtualtables.append(tablename)
1348 else:
1349 colset = new_row[tablename]
1350
1351 if isinstance(field_type, SQLCustomType):
1352 colset[fieldname] = field_type.decoder(value)
1353
1354 elif not isinstance(field_type, str) or value is None:
1355 colset[fieldname] = value
1356 elif isinstance(field_type, str) and \
1357 field_type.startswith('reference'):
1358 referee = field_type[10:].strip()
1359 if not '.' in referee:
1360 colset[fieldname] = rid = Reference(value)
1361 (rid._table, rid._record) = (db[referee], None)
1362 else:
1363 colset[fieldname] = value
1364 elif field_type == 'boolean':
1365 if value == True or str(value)[:1].lower() == 't':
1366 colset[fieldname] = True
1367 else:
1368 colset[fieldname] = False
1369 elif field_type == 'date' \
1370 and (not isinstance(value, datetime.date)\
1371 or isinstance(value, datetime.datetime)):
1372 (y, m, d) = [int(x) for x in
1373 str(value)[:10].strip().split('-')]
1374 colset[fieldname] = datetime.date(y, m, d)
1375 elif field_type == 'time' \
1376 and not isinstance(value, datetime.time):
1377 time_items = [int(x) for x in
1378 str(value)[:8].strip().split(':')[:3]]
1379 if len(time_items) == 3:
1380 (h, mi, s) = time_items
1381 else:
1382 (h, mi, s) = time_items + [0]
1383 colset[fieldname] = datetime.time(h, mi, s)
1384 elif field_type == 'datetime'\
1385 and not isinstance(value, datetime.datetime):
1386 (y, m, d) = [int(x) for x in
1387 str(value)[:10].strip().split('-')]
1388 time_items = [int(x) for x in
1389 str(value)[11:19].strip().split(':')[:3]]
1390 if len(time_items) == 3:
1391 (h, mi, s) = time_items
1392 else:
1393 (h, mi, s) = time_items + [0]
1394 colset[fieldname] = datetime.datetime(y, m, d, h, mi, s)
1395 elif field_type == 'blob' and blob_decode:
1396 colset[fieldname] = base64.b64decode(str(value))
1397 elif field_type.startswith('decimal'):
1398 decimals = [int(x) for x in field_type[8:-1].split(',')][-1]
1399 if self.dbengine == 'sqlite':
1400 value = ('%.' + str(decimals) + 'f') % value
1401 if not isinstance(value, decimal.Decimal):
1402 value = decimal.Decimal(str(value))
1403 colset[fieldname] = value
1404 elif field_type.startswith('list:integer'):
1405 if not self.dbengine=='google:datastore':
1406 colset[fieldname] = bar_decode_integer(value)
1407 else:
1408 colset[fieldname] = value
1409 elif field_type.startswith('list:reference'):
1410 if not self.dbengine=='google:datastore':
1411 colset[fieldname] = bar_decode_integer(value)
1412 else:
1413 colset[fieldname] = value
1414 elif field_type.startswith('list:string'):
1415 if not self.dbengine=='google:datastore':
1416 colset[fieldname] = bar_decode_string(value)
1417 else:
1418 colset[fieldname] = value
1419 else:
1420 colset[fieldname] = value
1421 if field_type == 'id':
1422 id = colset[field.name]
1423 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
1424 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
1425 for (referee_table, referee_name) in \
1426 table._referenced_by:
1427 s = db[referee_table][referee_name]
1428 if not referee_table in colset:
1429
1430 colset[referee_table] = Set(db, s == id)
1431
1432
1433 colset['id'] = id
1434 new_rows.append(new_row)
1435 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
1436 for tablename in virtualtables:
1437 for item in db[tablename].virtualfields:
1438 try:
1439 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
1440 except KeyError:
1441
1442 pass
1443 return rowsobj
1444
1446 fieldname = self.db._request_tenant
1447 for tablename in tablenames:
1448 table = self.db[tablename]
1449 if fieldname in table:
1450 default = table[fieldname].default
1451 if default!=None:
1452 query = query&(table[fieldname]==default)
1453 return query
1454
1455
1456
1457
1458
1460
1462 return "web2py_extract('%s',%s)" % (what,self.expand(field))
1463
1464 @staticmethod
1466 table = {
1467 'year': (0, 4),
1468 'month': (5, 7),
1469 'day': (8, 10),
1470 'hour': (11, 13),
1471 'minute': (14, 16),
1472 'second': (17, 19),
1473 }
1474 try:
1475 (i, j) = table[lookup]
1476 return int(s[i:j])
1477 except:
1478 return None
1479
1480 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1481 credential_decoder=lambda x:x, driver_args={},
1482 adapter_args={}):
1483 self.db = db
1484 self.dbengine = "sqlite"
1485 self.uri = uri
1486 self.pool_size = pool_size
1487 self.folder = folder
1488 self.db_codec = db_codec
1489 self.find_or_make_work_folder()
1490 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1]
1491 if uri.startswith('sqlite:memory'):
1492 dbpath = ':memory:'
1493 else:
1494 dbpath = uri.split('://')[1]
1495 if dbpath[0] != '/':
1496 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1497 if not 'check_same_thread' in driver_args:
1498 driver_args['check_same_thread'] = False
1499 def connect(dbpath=dbpath, driver_args=driver_args):
1500 return sqlite3.Connection(dbpath, **driver_args)
1501 self.pool_connection(connect)
1502 self.cursor = self.connection.cursor()
1503 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1504
1506 tablename = table._tablename
1507 return ['DELETE FROM %s;' % tablename,
1508 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
1509
1512
1513
1515
1516 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1517 credential_decoder=lambda x:x, driver_args={},
1518 adapter_args={}):
1519 self.db = db
1520 self.dbengine = "sqlite"
1521 self.uri = uri
1522 self.pool_size = pool_size
1523 self.folder = folder
1524 self.db_codec = db_codec
1525 self.find_or_make_work_folder()
1526 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1]
1527 if uri.startswith('sqlite:memory'):
1528 dbpath = ':memory:'
1529 else:
1530 dbpath = uri.split('://')[1]
1531 if dbpath[0] != '/':
1532 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1533 def connect(dbpath=dbpath,driver_args=driver_args):
1534 return zxJDBC.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath),**driver_args)
1535 self.pool_connection(connect)
1536 self.cursor = self.connection.cursor()
1537 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1538
1541
1542
1544
1545 driver = globals().get('pymysql',None)
1546 maxcharlength = 255
1547 commit_on_alter_table = True
1548 support_distributed_transaction = True
1549 types = {
1550 'boolean': 'CHAR(1)',
1551 'string': 'VARCHAR(%(length)s)',
1552 'text': 'LONGTEXT',
1553 'password': 'VARCHAR(%(length)s)',
1554 'blob': 'LONGBLOB',
1555 'upload': 'VARCHAR(%(length)s)',
1556 'integer': 'INT',
1557 'double': 'DOUBLE',
1558 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1559 'date': 'DATE',
1560 'time': 'TIME',
1561 'datetime': 'DATETIME',
1562 'id': 'INT AUTO_INCREMENT NOT NULL',
1563 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1564 'list:integer': 'LONGTEXT',
1565 'list:string': 'LONGTEXT',
1566 'list:reference': 'LONGTEXT',
1567 }
1568
1571
1573 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1574
1575 - def _drop(self,table,mode):
1576
1577 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,'SET FOREIGN_KEY_CHECKS=1;']
1578
1581
1585
1588
1591
1593 return '; ALTER TABLE %s ADD ' % table
1594
1595 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1596 credential_decoder=lambda x:x, driver_args={},
1597 adapter_args={}):
1598 self.db = db
1599 self.dbengine = "mysql"
1600 self.uri = uri
1601 self.pool_size = pool_size
1602 self.folder = folder
1603 self.db_codec = db_codec
1604 self.find_or_make_work_folder()
1605 uri = uri.split('://')[1]
1606 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
1607 if not m:
1608 raise SyntaxError, \
1609 "Invalid URI string in DAL: %s" % self.uri
1610 user = credential_decoder(m.group('user'))
1611 if not user:
1612 raise SyntaxError, 'User required'
1613 password = credential_decoder(m.group('password'))
1614 if not password:
1615 password = ''
1616 host = m.group('host')
1617 if not host:
1618 raise SyntaxError, 'Host name required'
1619 db = m.group('db')
1620 if not db:
1621 raise SyntaxError, 'Database name required'
1622 port = int(m.group('port') or '3306')
1623 charset = m.group('charset') or 'utf8'
1624 driver_args.update(dict(db=db,
1625 user=credential_decoder(user),
1626 passwd=credential_decoder(password),
1627 host=host,
1628 port=port,
1629 charset=charset))
1630 def connect(driver_args=driver_args):
1631 return self.driver.connect(**driver_args)
1632 self.pool_connection(connect)
1633 self.cursor = self.connection.cursor()
1634 self.execute('SET FOREIGN_KEY_CHECKS=1;')
1635 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
1636
1638 self.execute('select last_insert_id();')
1639 return int(self.cursor.fetchone()[0])
1640
1641
1642 -class PostgreSQLAdapter(BaseAdapter):
1643
1644 support_distributed_transaction = True
1645 types = {
1646 'boolean': 'CHAR(1)',
1647 'string': 'VARCHAR(%(length)s)',
1648 'text': 'TEXT',
1649 'password': 'VARCHAR(%(length)s)',
1650 'blob': 'BYTEA',
1651 'upload': 'VARCHAR(%(length)s)',
1652 'integer': 'INTEGER',
1653 'double': 'FLOAT8',
1654 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1655 'date': 'DATE',
1656 'time': 'TIME',
1657 'datetime': 'TIMESTAMP',
1658 'id': 'SERIAL PRIMARY KEY',
1659 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1660 'list:integer': 'TEXT',
1661 'list:string': 'TEXT',
1662 'list:reference': 'TEXT',
1663 }
1664
1665 - def sequence_name(self,table):
1666 return '%s_id_Seq' % table
1667
1670
1673
1674 - def prepare(self,key):
1675 self.execute("PREPARE TRANSACTION '%s';" % key)
1676
1677 - def commit_prepared(self,key):
1678 self.execute("COMMIT PREPARED '%s';" % key)
1679
1680 - def rollback_prepared(self,key):
1681 self.execute("ROLLBACK PREPARED '%s';" % key)
1682
1683 - def create_sequence_and_triggers(self, query, table, **args):
1684
1685
1686
1687
1688 self.execute(query)
1689
1690 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1691 credential_decoder=lambda x:x, driver_args={},
1692 adapter_args={}):
1693 self.db = db
1694 self.dbengine = "postgres"
1695 self.uri = uri
1696 self.pool_size = pool_size
1697 self.folder = folder
1698 self.db_codec = db_codec
1699 self.find_or_make_work_folder()
1700 uri = uri.split('://')[1]
1701 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
1702 if not m:
1703 raise SyntaxError, "Invalid URI string in DAL"
1704 user = credential_decoder(m.group('user'))
1705 if not user:
1706 raise SyntaxError, 'User required'
1707 password = credential_decoder(m.group('password'))
1708 if not password:
1709 password = ''
1710 host = m.group('host')
1711 if not host:
1712 raise SyntaxError, 'Host name required'
1713 db = m.group('db')
1714 if not db:
1715 raise SyntaxError, 'Database name required'
1716 port = m.group('port') or '5432'
1717 sslmode = m.group('sslmode')
1718 if sslmode:
1719 msg = ("dbname='%s' user='%s' host='%s'"
1720 "port=%s password='%s' sslmode='%s'") \
1721 % (db, user, host, port, password, sslmode)
1722 else:
1723 msg = ("dbname='%s' user='%s' host='%s'"
1724 "port=%s password='%s'") \
1725 % (db, user, host, port, password)
1726 def connect(msg=msg,driver_args=driver_args):
1727 return psycopg2.connect(msg,**driver_args)
1728 self.pool_connection(connect)
1729 self.connection.set_client_encoding('UTF8')
1730 self.cursor = self.connection.cursor()
1731 self.execute('BEGIN;')
1732 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1733 self.execute("SET standard_conforming_strings=on;")
1734
1735 - def lastrowid(self,table):
1736 self.execute("select currval('%s')" % table._sequence_name)
1737 return int(self.cursor.fetchone()[0])
1738
1739 - def LIKE(self,first,second):
1740 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second,'string'))
1741
1742 - def STARTSWITH(self,first,second):
1743 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second+'%','string'))
1744
1745 - def ENDSWITH(self,first,second):
1746 return '(%s ILIKE %s)' % (self.expand(first),self.expand('%'+second,'string'))
1747
1748 - def CONTAINS(self,first,second):
1749 if first.type in ('string','text'):
1750 key = '%'+str(second).replace('%','%%')+'%'
1751 elif first.type.startswith('list:'):
1752 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
1753 return '(%s ILIKE %s)' % (self.expand(first),self.expand(key,'string'))
1754
1755 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
1756
1757 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1758 credential_decoder=lambda x:x, driver_args={},
1759 adapter_args={}):
1760 self.db = db
1761 self.dbengine = "postgres"
1762 self.uri = uri
1763 self.pool_size = pool_size
1764 self.folder = folder
1765 self.db_codec = db_codec
1766 self.find_or_make_work_folder()
1767 uri = uri.split('://')[1]
1768 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(uri)
1769 if not m:
1770 raise SyntaxError, "Invalid URI string in DAL"
1771 user = credential_decoder(m.group('user'))
1772 if not user:
1773 raise SyntaxError, 'User required'
1774 password = credential_decoder(m.group('password'))
1775 if not password:
1776 password = ''
1777 host = m.group('host')
1778 if not host:
1779 raise SyntaxError, 'Host name required'
1780 db = m.group('db')
1781 if not db:
1782 raise SyntaxError, 'Database name required'
1783 port = m.group('port') or '5432'
1784 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
1785 def connect(msg=msg,driver_args=driver_args):
1786 return zxJDBC.connect(*msg,**driver_args)
1787 self.pool_connection(connect)
1788 self.connection.set_client_encoding('UTF8')
1789 self.cursor = self.connection.cursor()
1790 self.execute('BEGIN;')
1791 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1792
1793
1795 commit_on_alter_table = False
1796 types = {
1797 'boolean': 'CHAR(1)',
1798 'string': 'VARCHAR2(%(length)s)',
1799 'text': 'CLOB',
1800 'password': 'VARCHAR2(%(length)s)',
1801 'blob': 'CLOB',
1802 'upload': 'VARCHAR2(%(length)s)',
1803 'integer': 'INT',
1804 'double': 'FLOAT',
1805 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1806 'date': 'DATE',
1807 'time': 'CHAR(8)',
1808 'datetime': 'DATE',
1809 'id': 'NUMBER PRIMARY KEY',
1810 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1811 'list:integer': 'CLOB',
1812 'list:string': 'CLOB',
1813 'list:reference': 'CLOB',
1814 }
1815
1817 return '%s_sequence' % tablename
1818
1820 return '%s_trigger' % tablename
1821
1823 return 'LEFT OUTER JOIN'
1824
1826 return 'dbms_random.value'
1827
1828 - def NOT_NULL(self,default,field_type):
1829 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
1830
1831 - def _drop(self,table,mode):
1834
1835 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1836 if limitby:
1837 (lmin, lmax) = limitby
1838 if len(sql_w) > 1:
1839 sql_w_row = sql_w + ' AND w_row > %i' % lmin
1840 else:
1841 sql_w_row = 'WHERE w_row > %i' % lmin
1842 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
1843 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1844
1850
1852 if fieldtype == 'blob':
1853 obj = base64.b64encode(str(obj))
1854 return ":CLOB('%s')" % obj
1855 elif fieldtype == 'date':
1856 if isinstance(obj, (datetime.date, datetime.datetime)):
1857 obj = obj.isoformat()[:10]
1858 else:
1859 obj = str(obj)
1860 return "to_date('%s','yyyy-mm-dd')" % obj
1861 elif fieldtype == 'datetime':
1862 if isinstance(obj, datetime.datetime):
1863 obj = obj.isoformat()[:19].replace('T',' ')
1864 elif isinstance(obj, datetime.date):
1865 obj = obj.isoformat()[:10]+' 00:00:00'
1866 else:
1867 obj = str(obj)
1868 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
1869 return None
1870
1871 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1872 credential_decoder=lambda x:x, driver_args={},
1873 adapter_args={}):
1874 self.db = db
1875 self.dbengine = "oracle"
1876 self.uri = uri
1877 self.pool_size = pool_size
1878 self.folder = folder
1879 self.db_codec = db_codec
1880 self.find_or_make_work_folder()
1881 uri = uri.split('://')[1]
1882 if not 'threaded' in driver_args:
1883 driver_args['threaded']=True
1884 def connect(uri=uri,driver_args=driver_args):
1885 return cx_Oracle.connect(uri,**driver_args)
1886 self.pool_connection(connect)
1887 self.cursor = self.connection.cursor()
1888 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1889 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1890 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
1891
1893 args = []
1894 i = 1
1895 while True:
1896 m = self.oracle_fix.match(command)
1897 if not m:
1898 break
1899 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
1900 args.append(m.group('clob')[6:-2].replace("''", "'"))
1901 i += 1
1902 return self.log_execute(command[:-1], args)
1903
1911
1916
1917
1919 types = {
1920 'boolean': 'BIT',
1921 'string': 'VARCHAR(%(length)s)',
1922 'text': 'TEXT',
1923 'password': 'VARCHAR(%(length)s)',
1924 'blob': 'IMAGE',
1925 'upload': 'VARCHAR(%(length)s)',
1926 'integer': 'INT',
1927 'double': 'FLOAT',
1928 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1929 'date': 'DATETIME',
1930 'time': 'CHAR(8)',
1931 'datetime': 'DATETIME',
1932 'id': 'INT IDENTITY PRIMARY KEY',
1933 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1934 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1935 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
1936 'list:integer': 'TEXT',
1937 'list:string': 'TEXT',
1938 'list:reference': 'TEXT',
1939 }
1940
1942 return "DATEPART(%s,%s)" % (what, self.expand(field))
1943
1945 return 'LEFT OUTER JOIN'
1946
1949
1952
1954 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1955
1957 return 'PRIMARY KEY CLUSTERED (%s)' % key
1958
1959 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1960 if limitby:
1961 (lmin, lmax) = limitby
1962 sql_s += ' TOP %i' % lmax
1963 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1964
1966 if fieldtype == 'boolean':
1967 if obj and not str(obj)[0].upper() == 'F':
1968 return '1'
1969 else:
1970 return '0'
1971 return None
1972
1973 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1974 credential_decoder=lambda x:x, driver_args={},
1975 adapter_args={}, fake_connect=False):
1976 self.db = db
1977 self.dbengine = "mssql"
1978 self.uri = uri
1979 self.pool_size = pool_size
1980 self.folder = folder
1981 self.db_codec = db_codec
1982 self.find_or_make_work_folder()
1983
1984 uri = uri.split('://')[1]
1985 if '@' not in uri:
1986 try:
1987 m = re.compile('^(?P<dsn>.+)$').match(uri)
1988 if not m:
1989 raise SyntaxError, \
1990 'Parsing uri string(%s) has no result' % self.uri
1991 dsn = m.group('dsn')
1992 if not dsn:
1993 raise SyntaxError, 'DSN required'
1994 except SyntaxError, e:
1995 logger.error('NdGpatch error')
1996 raise e
1997 cnxn = 'DSN=%s' % dsn
1998 else:
1999 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(uri)
2000 if not m:
2001 raise SyntaxError, \
2002 "Invalid URI string in DAL: %s" % uri
2003 user = credential_decoder(m.group('user'))
2004 if not user:
2005 raise SyntaxError, 'User required'
2006 password = credential_decoder(m.group('password'))
2007 if not password:
2008 password = ''
2009 host = m.group('host')
2010 if not host:
2011 raise SyntaxError, 'Host name required'
2012 db = m.group('db')
2013 if not db:
2014 raise SyntaxError, 'Database name required'
2015 port = m.group('port') or '1433'
2016
2017
2018
2019 argsdict = { 'DRIVER':'{SQL Server}' }
2020 urlargs = m.group('urlargs') or ''
2021 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
2022 for argmatch in argpattern.finditer(urlargs):
2023 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
2024 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()])
2025 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
2026 % (host, port, db, user, password, urlargs)
2027 def connect(cnxn=cnxn,driver_args=driver_args):
2028 return pyodbc.connect(cnxn,**driver_args)
2029 if not fake_connect:
2030 self.pool_connection(connect)
2031 self.cursor = self.connection.cursor()
2032
2034
2035 self.execute('SELECT SCOPE_IDENTITY();')
2036 return int(self.cursor.fetchone()[0])
2037
2040
2041 - def rowslice(self,rows,minimum=0,maximum=None):
2042 if maximum is None:
2043 return rows[minimum:]
2044 return rows[minimum:maximum]
2045
2046
2048 types = {
2049 'boolean': 'CHAR(1)',
2050 'string': 'NVARCHAR(%(length)s)',
2051 'text': 'NTEXT',
2052 'password': 'NVARCHAR(%(length)s)',
2053 'blob': 'IMAGE',
2054 'upload': 'NVARCHAR(%(length)s)',
2055 'integer': 'INT',
2056 'double': 'FLOAT',
2057 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2058 'date': 'DATETIME',
2059 'time': 'CHAR(8)',
2060 'datetime': 'DATETIME',
2061 'id': 'INT IDENTITY PRIMARY KEY',
2062 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2063 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2064 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2065 'list:integer': 'NTEXT',
2066 'list:string': 'NTEXT',
2067 'list:reference': 'NTEXT',
2068 }
2069
2071 value = BaseAdapter.represent(self, obj, fieldtype)
2072 if fieldtype == 'string' or fieldtype == 'text' and value[:1]=="'":
2073 value = 'N'+value
2074 return value
2075
2078
2079
2081
2082 commit_on_alter_table = False
2083 support_distributed_transaction = True
2084 types = {
2085 'boolean': 'CHAR(1)',
2086 'string': 'VARCHAR(%(length)s)',
2087 'text': 'BLOB SUB_TYPE 1',
2088 'password': 'VARCHAR(%(length)s)',
2089 'blob': 'BLOB SUB_TYPE 0',
2090 'upload': 'VARCHAR(%(length)s)',
2091 'integer': 'INTEGER',
2092 'double': 'DOUBLE PRECISION',
2093 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
2094 'date': 'DATE',
2095 'time': 'TIME',
2096 'datetime': 'TIMESTAMP',
2097 'id': 'INTEGER PRIMARY KEY',
2098 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2099 'list:integer': 'BLOB SUB_TYPE 1',
2100 'list:string': 'BLOB SUB_TYPE 1',
2101 'list:reference': 'BLOB SUB_TYPE 1',
2102 }
2103
2105 return 'genid_%s' % tablename
2106
2108 return 'trg_id_%s' % tablename
2109
2112
2113 - def NOT_NULL(self,default,field_type):
2114 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2115
2117 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
2118
2119 - def _drop(self,table,mode):
2122
2123 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2124 if limitby:
2125 (lmin, lmax) = limitby
2126 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin)
2127 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2128
2130 return ['DELETE FROM %s;' % table._tablename,
2131 'SET GENERATOR %s TO 0;' % table._sequence_name]
2132
2133 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2134 credential_decoder=lambda x:x, driver_args={},
2135 adapter_args={}):
2136 self.db = db
2137 self.dbengine = "firebird"
2138 self.uri = uri
2139 self.pool_size = pool_size
2140 self.folder = folder
2141 self.db_codec = db_codec
2142 self.find_or_make_work_folder()
2143 uri = uri.split('://')[1]
2144 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2145 if not m:
2146 raise SyntaxError, "Invalid URI string in DAL: %s" % uri
2147 user = credential_decoder(m.group('user'))
2148 if not user:
2149 raise SyntaxError, 'User required'
2150 password = credential_decoder(m.group('password'))
2151 if not password:
2152 password = ''
2153 host = m.group('host')
2154 if not host:
2155 raise SyntaxError, 'Host name required'
2156 port = int(m.group('port') or 3050)
2157 db = m.group('db')
2158 if not db:
2159 raise SyntaxError, 'Database name required'
2160 charset = m.group('charset') or 'UTF8'
2161 driver_args.update(dict(dsn='%s/%s:%s' % (host,port,db),
2162 user = credential_decoder(user),
2163 password = credential_decoder(password),
2164 charset = charset))
2165 def connect(driver_args=driver_args, adapter_args=adapter_args):
2166 if adapter_args.has_key('driver_name'):
2167 if adapter_args['driver_name'] == 'kinterbasdb':
2168 conn = kinterbasdb.connect(**driver_args)
2169 elif adapter_args['driver_name'] == 'firebirdsql':
2170 conn = firebirdsql.connect(**driver_args)
2171 else:
2172 conn = kinterbasdb.connect(**driver_args)
2173
2174 return conn
2175
2176 self.pool_connection(connect)
2177
2178 self.cursor = self.connection.cursor()
2179
2188
2193
2194
2196
2197 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2198 credential_decoder=lambda x:x, driver_args={},
2199 adapter_args={}):
2200 self.db = db
2201 self.dbengine = "firebird"
2202 self.uri = uri
2203 self.pool_size = pool_size
2204 self.folder = folder
2205 self.db_codec = db_codec
2206 self.find_or_make_work_folder()
2207 uri = uri.split('://')[1]
2208 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2209 if not m:
2210 raise SyntaxError, \
2211 "Invalid URI string in DAL: %s" % self.uri
2212 user = credential_decoder(m.group('user'))
2213 if not user:
2214 raise SyntaxError, 'User required'
2215 password = credential_decoder(m.group('password'))
2216 if not password:
2217 password = ''
2218 pathdb = m.group('path')
2219 if not pathdb:
2220 raise SyntaxError, 'Path required'
2221 charset = m.group('charset')
2222 if not charset:
2223 charset = 'UTF8'
2224 host = ''
2225 driver_args.update(dict(host=host,
2226 database=pathdb,
2227 user=credential_decoder(user),
2228 password=credential_decoder(password),
2229 charset=charset))
2230
2231
2232 def connect(driver_args=driver_args, adapter_args=adapter_args):
2233 if adapter_args.has_key('driver_name'):
2234 if adapter_args['driver_name'] == 'kinterbasdb':
2235 conn = kinterbasdb.connect(**driver_args)
2236 elif adapter_args['driver_name'] == 'firebirdsql':
2237 conn = firebirdsql.connect(**driver_args)
2238 else:
2239 conn = kinterbasdb.connect(**driver_args)
2240
2241 return conn
2242
2243 self.pool_connection(connect)
2244
2245 self.cursor = self.connection.cursor()
2246
2247
2342
2347
2350
2353
2354
2356 types = {
2357 'boolean': 'CHAR(1)',
2358 'string': 'VARCHAR(%(length)s)',
2359 'text': 'CLOB',
2360 'password': 'VARCHAR(%(length)s)',
2361 'blob': 'BLOB',
2362 'upload': 'VARCHAR(%(length)s)',
2363 'integer': 'INT',
2364 'double': 'DOUBLE',
2365 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2366 'date': 'DATE',
2367 'time': 'TIME',
2368 'datetime': 'TIMESTAMP',
2369 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2370 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2371 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2372 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2373 'list:integer': 'CLOB',
2374 'list:string': 'CLOB',
2375 'list:reference': 'CLOB',
2376 }
2377
2379 return 'LEFT OUTER JOIN'
2380
2383
2384 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2385 if limitby:
2386 (lmin, lmax) = limitby
2387 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
2388 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2389
2391 if fieldtype == 'blob':
2392 obj = base64.b64encode(str(obj))
2393 return "BLOB('%s')" % obj
2394 elif fieldtype == 'datetime':
2395 if isinstance(obj, datetime.datetime):
2396 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
2397 elif isinstance(obj, datetime.date):
2398 obj = obj.isoformat()[:10]+'-00.00.00'
2399 return "'%s'" % obj
2400 return None
2401
2402 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2403 credential_decoder=lambda x:x, driver_args={},
2404 adapter_args={}):
2405 self.db = db
2406 self.dbengine = "db2"
2407 self.uri = uri
2408 self.pool_size = pool_size
2409 self.folder = folder
2410 self.db_codec = db_codec
2411 self.find_or_make_work_folder()
2412 cnxn = uri.split('://', 1)[1]
2413 def connect(cnxn=cnxn,driver_args=driver_args):
2414 return pyodbc.connect(cnxn,**driver_args)
2415 self.pool_connection(connect)
2416 self.cursor = self.connection.cursor()
2417
2419 if command[-1:]==';':
2420 command = command[:-1]
2421 return self.log_execute(command)
2422
2424 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
2425 return int(self.cursor.fetchone()[0])
2426
2427 - def rowslice(self,rows,minimum=0,maximum=None):
2428 if maximum is None:
2429 return rows[minimum:]
2430 return rows[minimum:maximum]
2431
2432
2433 INGRES_SEQNAME='ii***lineitemsequence'
2434
2435
2436
2438
2439 types = {
2440 'boolean': 'CHAR(1)',
2441 'string': 'VARCHAR(%(length)s)',
2442 'text': 'CLOB',
2443 'password': 'VARCHAR(%(length)s)',
2444 'blob': 'BLOB',
2445 'upload': 'VARCHAR(%(length)s)',
2446 'integer': 'INTEGER4',
2447 'double': 'FLOAT8',
2448 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2449 'date': 'ANSIDATE',
2450 'time': 'TIME WITHOUT TIME ZONE',
2451 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2452 'id': 'integer4 not null unique with default next value for %s' % INGRES_SEQNAME,
2453 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2454 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2455 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2456 'list:integer': 'CLOB',
2457 'list:string': 'CLOB',
2458 'list:reference': 'CLOB',
2459 }
2460
2462 return 'LEFT OUTER JOIN'
2463
2466
2467 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2468 if limitby:
2469 (lmin, lmax) = limitby
2470 fetch_amt = lmax - lmin
2471 if fetch_amt:
2472 sql_s += ' FIRST %d ' % (fetch_amt, )
2473 if lmin:
2474
2475 sql_o += ' OFFSET %d' % (lmin, )
2476 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2477
2478 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2479 credential_decoder=lambda x:x, driver_args={},
2480 adapter_args={}):
2481 self.db = db
2482 self.dbengine = "ingres"
2483 self.uri = uri
2484 self.pool_size = pool_size
2485 self.folder = folder
2486 self.db_codec = db_codec
2487 self.find_or_make_work_folder()
2488 connstr = self._uri.split(':', 1)[1]
2489
2490 connstr = connstr.lstrip()
2491 while connstr.startswith('/'):
2492 connstr = connstr[1:]
2493 database_name=connstr
2494 vnode = '(local)'
2495 servertype = 'ingres'
2496 trace = (0, None)
2497 driver_args.update(dict(database=database_name,
2498 vnode=vnode,
2499 servertype=servertype,
2500 trace=trace))
2501 def connect(driver_args=driver_args):
2502 return ingresdbi.connect(**driver_args)
2503 self.pool_connection(connect)
2504 self.cursor = self.connection.cursor()
2505
2507
2508
2509
2510 if hasattr(table,'_primarykey'):
2511 modify_tbl_sql = 'modify %s to btree unique on %s' % \
2512 (table._tablename,
2513 ', '.join(["'%s'" % x for x in table.primarykey]))
2514 self.execute(modify_tbl_sql)
2515 else:
2516 tmp_seqname='%s_iisq' % table._tablename
2517 query=query.replace(INGRES_SEQNAME, tmp_seqname)
2518 self.execute('create sequence %s' % tmp_seqname)
2519 self.execute(query)
2520 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
2521
2522
2524 tmp_seqname='%s_iisq' % table
2525 self.execute('select current value for %s' % tmp_seqname)
2526 return int(self.cursor.fetchone()[0])
2527
2530
2531
2533 types = {
2534 'boolean': 'CHAR(1)',
2535 'string': 'NVARCHAR(%(length)s)',
2536 'text': 'NCLOB',
2537 'password': 'NVARCHAR(%(length)s)',
2538 'blob': 'BLOB',
2539 'upload': 'VARCHAR(%(length)s)',
2540 'integer': 'INTEGER4',
2541 'double': 'FLOAT8',
2542 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2543 'date': 'ANSIDATE',
2544 'time': 'TIME WITHOUT TIME ZONE',
2545 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2546 'id': 'integer4 not null unique with default next value for %s'% INGRES_SEQNAME,
2547 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2548 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2549 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2550 'list:integer': 'NCLOB',
2551 'list:string': 'NCLOB',
2552 'list:reference': 'NCLOB',
2553 }
2554
2556
2557 support_distributed_transaction = False
2558 types = {
2559 'boolean': 'CHAR(1)',
2560 'string': 'VARCHAR(%(length)s)',
2561 'text': 'LONG',
2562 'password': 'VARCHAR(%(length)s)',
2563 'blob': 'LONG',
2564 'upload': 'VARCHAR(%(length)s)',
2565 'integer': 'INT',
2566 'double': 'FLOAT',
2567 'decimal': 'FIXED(%(precision)s,%(scale)s)',
2568 'date': 'DATE',
2569 'time': 'TIME',
2570 'datetime': 'TIMESTAMP',
2571 'id': 'INT PRIMARY KEY',
2572 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2573 'list:integer': 'LONG',
2574 'list:string': 'LONG',
2575 'list:reference': 'LONG',
2576 }
2577
2579 return '%s_id_Seq' % table
2580
2581 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2582 if limitby:
2583 (lmin, lmax) = limitby
2584 if len(sql_w) > 1:
2585 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2586 else:
2587 sql_w_row = 'WHERE w_row > %i' % lmin
2588 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2589 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2590
2592
2593 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
2594 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
2595 % (table._tablename, table._id.name, table._sequence_name))
2596 self.execute(query)
2597
2598 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2599 credential_decoder=lambda x:x, driver_args={},
2600 adapter_args={}):
2601 self.db = db
2602 self.dbengine = "sapdb"
2603 self.uri = uri
2604 self.pool_size = pool_size
2605 self.folder = folder
2606 self.db_codec = db_codec
2607 self.find_or_make_work_folder()
2608 uri = uri.split('://')[1]
2609 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
2610 if not m:
2611 raise SyntaxError, "Invalid URI string in DAL"
2612 user = credential_decoder(m.group('user'))
2613 if not user:
2614 raise SyntaxError, 'User required'
2615 password = credential_decoder(m.group('password'))
2616 if not password:
2617 password = ''
2618 host = m.group('host')
2619 if not host:
2620 raise SyntaxError, 'Host name required'
2621 db = m.group('db')
2622 if not db:
2623 raise SyntaxError, 'Database name required'
2624 def connect(user=user,password=password,database=db,host=host,driver_args=driver_args):
2625 return sapdb.Connection(user,password,database,host,**driver_args)
2626 self.pool_connection(connect)
2627
2628 self.cursor = self.connection.cursor()
2629
2631 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
2632 return int(self.cursor.fetchone()[0])
2633
2635
2636 driver = globals().get('cubriddb',None)
2637
2638 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2639 credential_decoder=lambda x:x, driver_args={},
2640 adapter_args={}):
2641 self.db = db
2642 self.dbengine = "cubrid"
2643 self.uri = uri
2644 self.pool_size = pool_size
2645 self.folder = folder
2646 self.db_codec = db_codec
2647 self.find_or_make_work_folder()
2648 uri = uri.split('://')[1]
2649 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2650 if not m:
2651 raise SyntaxError, \
2652 "Invalid URI string in DAL: %s" % self.uri
2653 user = credential_decoder(m.group('user'))
2654 if not user:
2655 raise SyntaxError, 'User required'
2656 password = credential_decoder(m.group('password'))
2657 if not password:
2658 password = ''
2659 host = m.group('host')
2660 if not host:
2661 raise SyntaxError, 'Host name required'
2662 db = m.group('db')
2663 if not db:
2664 raise SyntaxError, 'Database name required'
2665 port = int(m.group('port') or '30000')
2666 charset = m.group('charset') or 'utf8'
2667 user=credential_decoder(user),
2668 passwd=credential_decoder(password),
2669 def connect(host,port,db,user,passwd,driver_args=driver_args):
2670 return self.driver.connect(host,port,db,user,passwd,**driver_args)
2671 self.pool_connection(connect)
2672 self.cursor = self.connection.cursor()
2673 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2674 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2675
2676
2677
2678
2680
2681 web2py_filesystem = False
2682
2684 if db._adapter.dbengine != 'mysql':
2685 raise RuntimeError, "only MySQL can store metadata .table files in database for now"
2686 self.db = db
2687 self.filename = filename
2688 self.mode = mode
2689 if not self.web2py_filesystem:
2690 self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
2691 DatabaseStoredFile.web2py_filesystem = True
2692 self.p=0
2693 self.data = ''
2694 if mode in ('r','rw','a'):
2695 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" % filename
2696 rows = self.db.executesql(query)
2697 if rows:
2698 self.data = rows[0][0]
2699 elif os.path.exists(filename):
2700 datafile = open(filename, 'r')
2701 try:
2702 self.data = datafile.read()
2703 finally:
2704 datafile.close()
2705 elif mode in ('r','rw'):
2706 raise RuntimeError, "File %s does not exist" % filename
2707
2708 - def read(self, bytes):
2709 data = self.data[self.p:self.p+bytes]
2710 self.p += len(data)
2711 return data
2712
2714 i = self.data.find('\n',self.p)+1
2715 if i>0:
2716 data, self.p = self.data[self.p:i], i
2717 else:
2718 data, self.p = self.data[self.p:], len(self.data)
2719 return data
2720
2723
2725 self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
2726 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')" % \
2727 (self.filename, self.data.replace("'","''"))
2728 self.db.executesql(query)
2729 self.db.commit()
2730
2731 @staticmethod
2733 if os.path.exists(filename):
2734 return True
2735 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
2736 if db.executesql(query):
2737 return True
2738 return False
2739
2740
2742
2745
2746 - def file_open(self, filename, mode='rb', lock=True):
2748
2751
2753 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
2754 self.db.executesql(query)
2755 self.db.commit()
2756
2758
2759 - def __init__(self, db, uri='google:sql://realm:domain/database', pool_size=0,
2760 folder=None, db_codec='UTF-8', check_reserved=None,
2761 migrate=True, fake_migrate=False,
2762 credential_decoder = lambda x:x, driver_args={},
2763 adapter_args={}):
2764
2765 self.db = db
2766 self.dbengine = "mysql"
2767 self.uri = uri
2768 self.pool_size = pool_size
2769 self.folder = folder
2770 self.db_codec = db_codec
2771 self.folder = folder or '$HOME/'+thread.folder.split('/applications/',1)[1]
2772
2773 m = re.compile('^(?P<instance>.*)/(?P<db>.*)$').match(self.uri[len('google:sql://'):])
2774 if not m:
2775 raise SyntaxError, "Invalid URI string in SQLDB: %s" % self._uri
2776 instance = credential_decoder(m.group('instance'))
2777 db = credential_decoder(m.group('db'))
2778 driver_args['instance'] = instance
2779 if not migrate:
2780 driver_args['database'] = db
2781 def connect(driver_args=driver_args):
2782 return rdbms.connect(**driver_args)
2783 self.pool_connection(connect)
2784 self.cursor = self.connection.cursor()
2785 if migrate:
2786
2787 self.execute('CREATE DATABASE IF NOT EXISTS %s' % db)
2788 self.execute('USE %s' % db)
2789 self.execute("SET FOREIGN_KEY_CHECKS=1;")
2790 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2791
2793
2795 if isinstance(obj,CALLABLETYPES):
2796 obj = obj()
2797 if isinstance(fieldtype, SQLCustomType):
2798 return fieldtype.encoder(obj)
2799 if isinstance(obj, (Expression, Field)):
2800 raise SyntaxError, "non supported on GAE"
2801 if self.dbengine=='google:datastore' in globals():
2802 if isinstance(fieldtype, gae.Property):
2803 return obj
2804 if fieldtype.startswith('list:'):
2805 if not obj:
2806 obj = []
2807 if not isinstance(obj, (list, tuple)):
2808 obj = [obj]
2809 if obj == '' and not fieldtype[:2] in ['st','te','pa','up']:
2810 return None
2811 if obj != None:
2812 if isinstance(obj, list) and not fieldtype.startswith('list'):
2813 obj = [self.represent(o, fieldtype) for o in obj]
2814 elif fieldtype in ('integer','id'):
2815 obj = long(obj)
2816 elif fieldtype == 'double':
2817 obj = float(obj)
2818 elif fieldtype.startswith('reference'):
2819 if isinstance(obj, (Row, Reference)):
2820 obj = obj['id']
2821 obj = long(obj)
2822 elif fieldtype == 'boolean':
2823 if obj and not str(obj)[0].upper() == 'F':
2824 obj = True
2825 else:
2826 obj = False
2827 elif fieldtype == 'date':
2828 if not isinstance(obj, datetime.date):
2829 (y, m, d) = [int(x) for x in str(obj).strip().split('-')]
2830 obj = datetime.date(y, m, d)
2831 elif isinstance(obj,datetime.datetime):
2832 (y, m, d) = (obj.year, obj.month, obj.day)
2833 obj = datetime.date(y, m, d)
2834 elif fieldtype == 'time':
2835 if not isinstance(obj, datetime.time):
2836 time_items = [int(x) for x in str(obj).strip().split(':')[:3]]
2837 if len(time_items) == 3:
2838 (h, mi, s) = time_items
2839 else:
2840 (h, mi, s) = time_items + [0]
2841 obj = datetime.time(h, mi, s)
2842 elif fieldtype == 'datetime':
2843 if not isinstance(obj, datetime.datetime):
2844 (y, m, d) = [int(x) for x in str(obj)[:10].strip().split('-')]
2845 time_items = [int(x) for x in str(obj)[11:].strip().split(':')[:3]]
2846 while len(time_items)<3:
2847 time_items.append(0)
2848 (h, mi, s) = time_items
2849 obj = datetime.datetime(y, m, d, h, mi, s)
2850 elif fieldtype == 'blob':
2851 pass
2852 elif fieldtype.startswith('list:string'):
2853 if obj!=None and not isinstance(obj,(list,tuple)):
2854 obj=[obj]
2855 return [str(x) for x in obj]
2856 elif fieldtype.startswith('list:'):
2857 if obj!=None and not isinstance(obj,(list,tuple)):
2858 obj=[obj]
2859 return [int(x) for x in obj]
2860 elif isinstance(obj, str):
2861 obj = obj.decode('utf8')
2862 elif not isinstance(obj, unicode):
2863 obj = unicode(obj)
2864 return obj
2865
2867 return 'insert %s in %s' % (fields, table)
2868
2869 - def _count(self,query,distinct=None):
2870 return 'count %s' % repr(query)
2871
2872 - def _select(self,query,fields,attributes):
2873 return 'select %s where %s' % (repr(fields), repr(query))
2874
2875 - def _delete(self,tablename, query):
2876 return 'delete %s where %s' % (repr(tablename),repr(query))
2877
2878 - def _update(self,tablename,query,fields):
2879 return 'update %s (%s) where %s' % (repr(tablename),
2880 repr(fields),repr(query))
2881
2883 """
2884 remember: no transactions on many NoSQL
2885 """
2886 pass
2887
2889 """
2890 remember: no transactions on many NoSQL
2891 """
2892 pass
2893
2895 """
2896 remember: no transactions on many NoSQL
2897 """
2898 pass
2899
2900
2901
2902 - def OR(self,first,second): raise SyntaxError, "Not supported"
2903 - def AND(self,first,second): raise SyntaxError, "Not supported"
2904 - def AS(self,first,second): raise SyntaxError, "Not supported"
2905 - def ON(self,first,second): raise SyntaxError, "Not supported"
2906 - def STARTSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2907 - def ENDSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2908 - def ADD(self,first,second): raise SyntaxError, "Not supported"
2909 - def SUB(self,first,second): raise SyntaxError, "Not supported"
2910 - def MUL(self,first,second): raise SyntaxError, "Not supported"
2911 - def DIV(self,first,second): raise SyntaxError, "Not supported"
2912 - def LOWER(self,first): raise SyntaxError, "Not supported"
2913 - def UPPER(self,first): raise SyntaxError, "Not supported"
2915 - def AGGREGATE(self,first,what): raise SyntaxError, "Not supported"
2916 - def LEFT_JOIN(self): raise SyntaxError, "Not supported"
2917 - def RANDOM(self): raise SyntaxError, "Not supported"
2918 - def SUBSTRING(self,field,parameters): raise SyntaxError, "Not supported"
2919 - def PRIMARY_KEY(self,key): raise SyntaxError, "Not supported"
2920 - def LIKE(self,first,second): raise SyntaxError, "Not supported"
2921 - def drop(self,table,mode): raise SyntaxError, "Not supported"
2922 - def alias(self,table,alias): raise SyntaxError, "Not supported"
2923 - def migrate_table(self,*a,**b): raise SyntaxError, "Not supported"
2925 - def prepare(self,key): raise SyntaxError, "Not supported"
2928 - def concat_add(self,table): raise SyntaxError, "Not supported"
2929 - def constraint_name(self, table, fieldname): raise SyntaxError, "Not supported"
2931 - def log_execute(self,*a,**b): raise SyntaxError, "Not supported"
2932 - def execute(self,*a,**b): raise SyntaxError, "Not supported"
2934 - def lastrowid(self,table): raise SyntaxError, "Not supported"
2936 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError, "Not supported"
2937
2938
2939 -class GAEF(object):
2940 - def __init__(self,name,op,value,apply):
2941 self.name=name=='id' and '__key__' or name
2942 self.op=op
2943 self.value=value
2944 self.apply=apply
2946 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
2947
2949 uploads_in_blob = True
2950 types = {}
2951
2953 - def file_open(self, filename, mode='rb', lock=True): pass
2954 - def file_close(self, fileobj, unlock=True): pass
2955
2956 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2957 credential_decoder=lambda x:x, driver_args={},
2958 adapter_args={}):
2959 self.types.update({
2960 'boolean': gae.BooleanProperty,
2961 'string': (lambda: gae.StringProperty(multiline=True)),
2962 'text': gae.TextProperty,
2963 'password': gae.StringProperty,
2964 'blob': gae.BlobProperty,
2965 'upload': gae.StringProperty,
2966 'integer': gae.IntegerProperty,
2967 'double': gae.FloatProperty,
2968 'decimal': GAEDecimalProperty,
2969 'date': gae.DateProperty,
2970 'time': gae.TimeProperty,
2971 'datetime': gae.DateTimeProperty,
2972 'id': None,
2973 'reference': gae.IntegerProperty,
2974 'list:string': (lambda: gae.StringListProperty(default=None)),
2975 'list:integer': (lambda: gae.ListProperty(int,default=None)),
2976 'list:reference': (lambda: gae.ListProperty(int,default=None)),
2977 })
2978 self.db = db
2979 self.uri = uri
2980 self.dbengine = 'google:datastore'
2981 self.folder = folder
2982 db['_lastsql'] = ''
2983 self.db_codec = 'UTF-8'
2984 self.pool_size = 0
2985 match = re.compile('.*://(?P<namespace>.+)').match(uri)
2986 if match:
2987 namespace_manager.set_namespace(match.group('namespace'))
2988
2989 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
2990 myfields = {}
2991 for k in table.fields:
2992 if isinstance(polymodel,Table) and k in polymodel.fields():
2993 continue
2994 field = table[k]
2995 attr = {}
2996 if isinstance(field.type, SQLCustomType):
2997 ftype = self.types[field.type.native or field.type.type](**attr)
2998 elif isinstance(field.type, gae.Property):
2999 ftype = field.type
3000 elif field.type.startswith('id'):
3001 continue
3002 elif field.type.startswith('decimal'):
3003 precision, scale = field.type[7:].strip('()').split(',')
3004 precision = int(precision)
3005 scale = int(scale)
3006 ftype = GAEDecimalProperty(precision, scale, **attr)
3007 elif field.type.startswith('reference'):
3008 if field.notnull:
3009 attr = dict(required=True)
3010 referenced = field.type[10:].strip()
3011 ftype = self.types[field.type[:9]](table._db[referenced])
3012 elif field.type.startswith('list:reference'):
3013 if field.notnull:
3014 attr = dict(required=True)
3015 referenced = field.type[15:].strip()
3016 ftype = self.types[field.type[:14]](**attr)
3017 elif field.type.startswith('list:'):
3018 ftype = self.types[field.type](**attr)
3019 elif not field.type in self.types\
3020 or not self.types[field.type]:
3021 raise SyntaxError, 'Field: unknown field type: %s' % field.type
3022 else:
3023 ftype = self.types[field.type](**attr)
3024 myfields[field.name] = ftype
3025 if not polymodel:
3026 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
3027 elif polymodel==True:
3028 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
3029 elif isinstance(polymodel,Table):
3030 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
3031 else:
3032 raise SyntaxError, "polymodel must be None, True, a table or a tablename"
3033 return None
3034
3035 - def expand(self,expression,field_type=None):
3036 if isinstance(expression,Field):
3037 if expression.type in ('text','blob'):
3038 raise SyntaxError, 'AppEngine does not index by: %s' % expression.type
3039 return expression.name
3040 elif isinstance(expression, (Expression, Query)):
3041 if not expression.second is None:
3042 return expression.op(expression.first, expression.second)
3043 elif not expression.first is None:
3044 return expression.op(expression.first)
3045 else:
3046 return expression.op()
3047 elif field_type:
3048 return self.represent(expression,field_type)
3049 elif isinstance(expression,(list,tuple)):
3050 return ','.join([self.represent(item,field_type) for item in expression])
3051 else:
3052 return str(expression)
3053
3054
3055 - def AND(self,first,second):
3061
3062 - def EQ(self,first,second=None):
3063 if isinstance(second, Key):
3064 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
3065 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
3066
3067 - def NE(self,first,second=None):
3068 if first.type != 'id':
3069 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
3070 else:
3071 second = Key.from_path(first._tablename, long(second))
3072 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
3073
3074 - def LT(self,first,second=None):
3075 if first.type != 'id':
3076 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
3077 else:
3078 second = Key.from_path(first._tablename, long(second))
3079 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
3080
3081 - def LE(self,first,second=None):
3082 if first.type != 'id':
3083 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
3084 else:
3085 second = Key.from_path(first._tablename, long(second))
3086 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
3087
3088 - def GT(self,first,second=None):
3089 if first.type != 'id' or second==0 or second == '0':
3090 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
3091 else:
3092 second = Key.from_path(first._tablename, long(second))
3093 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
3094
3095 - def GE(self,first,second=None):
3096 if first.type != 'id':
3097 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
3098 else:
3099 second = Key.from_path(first._tablename, long(second))
3100 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
3101
3104
3105 - def COMMA(self,first,second):
3107
3108 - def BELONGS(self,first,second=None):
3109 if not isinstance(second,(list, tuple)):
3110 raise SyntaxError, "Not supported"
3111 if first.type != 'id':
3112 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
3113 else:
3114 second = [Key.from_path(first._tablename, i) for i in second]
3115 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
3116
3121
3122 - def NOT(self,first):
3123 nops = { self.EQ: self.NE,
3124 self.NE: self.EQ,
3125 self.LT: self.GE,
3126 self.GT: self.LE,
3127 self.LE: self.GT,
3128 self.GE: self.LT}
3129 if not isinstance(first,Query):
3130 raise SyntaxError, "Not suported"
3131 nop = nops.get(first.op,None)
3132 if not nop:
3133 raise SyntaxError, "Not suported %s" % first.op.__name__
3134 first.op = nop
3135 return self.expand(first)
3136
3138 self.db(table._id > 0).delete()
3139
3140 - def select_raw(self,query,fields=[],attributes={}):
3141 new_fields = []
3142 for item in fields:
3143 if isinstance(item,SQLALL):
3144 new_fields += item.table
3145 else:
3146 new_fields.append(item)
3147 fields = new_fields
3148 if query:
3149 tablename = self.get_table(query)
3150 elif fields:
3151 tablename = fields[0].tablename
3152 query = fields[0].table._id>0
3153 else:
3154 raise SyntaxError, "Unable to determine a tablename"
3155 query = self.filter_tenant(query,[tablename])
3156 tableobj = self.db[tablename]._tableobj
3157 items = tableobj.all()
3158 filters = self.expand(query)
3159 for filter in filters:
3160 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
3161 continue
3162 elif filter.name=='__key__' and filter.op=='=':
3163 if filter.value==0:
3164 items = []
3165 elif isinstance(filter.value, Key):
3166 item = tableobj.get(filter.value)
3167 items = (item and [item]) or []
3168 else:
3169 item = tableobj.get_by_id(filter.value)
3170 items = (item and [item]) or []
3171 elif isinstance(items,list):
3172 items = [i for i in items if filter.apply(getattr(item,filter.name),
3173 filter.value)]
3174 else:
3175 if filter.name=='__key__': items.order('__key__')
3176 items = items.filter('%s %s' % (filter.name,filter.op),filter.value)
3177 if not isinstance(items,list):
3178 if attributes.get('left', None):
3179 raise SyntaxError, 'Set: no left join in appengine'
3180 if attributes.get('groupby', None):
3181 raise SyntaxError, 'Set: no groupby in appengine'
3182 orderby = attributes.get('orderby', False)
3183 if orderby:
3184
3185 if isinstance(orderby, (list, tuple)):
3186 orderby = xorify(orderby)
3187 if isinstance(orderby,Expression):
3188 orderby = self.expand(orderby)
3189 orders = orderby.split(', ')
3190 for order in orders:
3191 order={'-id':'-__key__','id':'__key__'}.get(order,order)
3192 items = items.order(order)
3193 if attributes.get('limitby', None):
3194 (lmin, lmax) = attributes['limitby']
3195 (limit, offset) = (lmax - lmin, lmin)
3196 items = items.fetch(limit, offset=offset)
3197 fields = self.db[tablename].fields
3198 return (items, tablename, fields)
3199
3200 - def select(self,query,fields,attributes):
3201 (items, tablename, fields) = self.select_raw(query,fields,attributes)
3202
3203 rows = [
3204 [t=='id' and int(item.key().id()) or getattr(item, t) for t in fields]
3205 for item in items]
3206 colnames = ['%s.%s' % (tablename, t) for t in fields]
3207 return self.parse(rows, colnames, False)
3208
3209
3210 - def count(self,query,distinct=None):
3211 if distinct:
3212 raise RuntimeError, "COUNT DISTINCT not supported"
3213 (items, tablename, fields) = self.select_raw(query)
3214
3215 try:
3216 return len(items)
3217 except TypeError:
3218 return items.count(limit=None)
3219
3220 - def delete(self,tablename, query):
3221 """
3222 This function was changed on 2010-05-04 because according to
3223 http://code.google.com/p/googleappengine/issues/detail?id=3119
3224 GAE no longer support deleting more than 1000 records.
3225 """
3226
3227 (items, tablename, fields) = self.select_raw(query)
3228
3229 if not isinstance(items,list):
3230 counter = items.count(limit=None)
3231 leftitems = items.fetch(1000)
3232 while len(leftitems):
3233 gae.delete(leftitems)
3234 leftitems = items.fetch(1000)
3235 else:
3236 counter = len(items)
3237 gae.delete(items)
3238 return counter
3239
3240 - def update(self,tablename,query,update_fields):
3241
3242 (items, tablename, fields) = self.select_raw(query)
3243 counter = 0
3244 for item in items:
3245 for field, value in update_fields:
3246 setattr(item, field.name, self.represent(value,field.type))
3247 item.put()
3248 counter += 1
3249 logger.info(str(counter))
3250 return counter
3251
3252 - def insert(self,table,fields):
3253 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
3254
3255 tmp = table._tableobj(**dfields)
3256 tmp.put()
3257 rid = Reference(tmp.key().id())
3258 (rid._table, rid._record) = (table, None)
3259 return rid
3260
3262 parsed_items = []
3263 for item in items:
3264 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
3265 parsed_items.append(table._tableobj(**dfields))
3266 gae.put(parsed_items)
3267 return True
3268
3269 try:
3270 import couchdb
3271 drivers.append('CouchDB')
3272 except ImportError:
3273 logger.debug('no couchdb driver')
3274
3276 return uuid.UUID(uuidv).int
3277
3279 return str(uuid.UUID(int=n))
3280
3282 uploads_in_blob = True
3283 types = {
3284 'boolean': bool,
3285 'string': str,
3286 'text': str,
3287 'password': str,
3288 'blob': str,
3289 'upload': str,
3290 'integer': long,
3291 'double': float,
3292 'date': datetime.date,
3293 'time': datetime.time,
3294 'datetime': datetime.datetime,
3295 'id': long,
3296 'reference': long,
3297 'list:string': list,
3298 'list:integer': list,
3299 'list:reference': list,
3300 }
3301
3303 - def file_open(self, filename, mode='rb', lock=True): pass
3304 - def file_close(self, fileobj, unlock=True): pass
3305
3306 - def expand(self,expression,field_type=None):
3307 if isinstance(expression,Field):
3308 if expression.type=='id':
3309 return "%s._id" % expression.tablename
3310 return BaseAdapter.expand(self,expression,field_type)
3311
3312 - def AND(self,first,second):
3314
3315 - def OR(self,first,second):
3317
3318 - def EQ(self,first,second):
3322
3323 - def NE(self,first,second):
3327
3328 - def COMMA(self,first,second):
3330
3332 value = NoSQLAdapter.represent(self, obj, fieldtype)
3333 if fieldtype=='id':
3334 return repr(str(int(value)))
3335 return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
3336
3337 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
3338 pool_size=0,folder=None,db_codec ='UTF-8',
3339 credential_decoder=lambda x:x, driver_args={},
3340 adapter_args={}):
3341 self.db = db
3342 self.uri = uri
3343 self.dbengine = 'couchdb'
3344 self.folder = folder
3345 db['_lastsql'] = ''
3346 self.db_codec = 'UTF-8'
3347 self.pool_size = pool_size
3348
3349 url='http://'+uri[10:]
3350 def connect(url=url,driver_args=driver_args):
3351 return couchdb.Server(url,**driver_args)
3352 self.pool_connection(connect)
3353
3354 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
3355 if migrate:
3356 try:
3357 self.connection.create(table._tablename)
3358 except:
3359 pass
3360
3361 - def insert(self,table,fields):
3368
3369 - def _select(self,query,fields,attributes):
3370 if not isinstance(query,Query):
3371 raise SyntaxError, "Not Supported"
3372 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
3373 'required','cache','left',
3374 'distinct','having')):
3375 raise SyntaxError, 'invalid select attribute: %s' % key
3376 new_fields=[]
3377 for item in fields:
3378 if isinstance(item,SQLALL):
3379 new_fields += item.table
3380 else:
3381 new_fields.append(item)
3382 def uid(fd):
3383 return fd=='id' and '_id' or fd
3384 def get(row,fd):
3385 return fd=='id' and int(row['_id']) or row.get(fd,None)
3386 fields = new_fields
3387 tablename = self.get_table(query)
3388 fieldnames = [f.name for f in (fields or self.db[tablename])]
3389 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
3390 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
3391 fn="function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);}" %\
3392 dict(t=tablename,
3393 query=self.expand(query),
3394 order='%s._id' % tablename,
3395 fields=fields)
3396 return fn, colnames
3397
3398 - def select(self,query,fields,attributes):
3399 if not isinstance(query,Query):
3400 raise SyntaxError, "Not Supported"
3401 fn, colnames = self._select(query,fields,attributes)
3402 tablename = colnames[0].split('.')[0]
3403 ctable = self.connection[tablename]
3404 rows = [cols['value'] for cols in ctable.query(fn)]
3405 return self.parse(rows, colnames, False)
3406
3407 - def delete(self,tablename,query):
3408 if not isinstance(query,Query):
3409 raise SyntaxError, "Not Supported"
3410 if query.first.type=='id' and query.op==self.EQ:
3411 id = query.second
3412 tablename = query.first.tablename
3413 assert(tablename == query.first.tablename)
3414 ctable = self.connection[tablename]
3415 try:
3416 del ctable[str(id)]
3417 return 1
3418 except couchdb.http.ResourceNotFound:
3419 return 0
3420 else:
3421 tablename = self.get_table(query)
3422 rows = self.select(query,[self.db[tablename]._id],{})
3423 ctable = self.connection[tablename]
3424 for row in rows:
3425 del ctable[str(row.id)]
3426 return len(rows)
3427
3428 - def update(self,tablename,query,fields):
3429 if not isinstance(query,Query):
3430 raise SyntaxError, "Not Supported"
3431 if query.first.type=='id' and query.op==self.EQ:
3432 id = query.second
3433 tablename = query.first.tablename
3434 ctable = self.connection[tablename]
3435 try:
3436 doc = ctable[str(id)]
3437 for key,value in fields:
3438 doc[key.name] = NoSQLAdapter.represent(self,value,self.db[tablename][key.name].type)
3439 ctable.save(doc)
3440 return 1
3441 except couchdb.http.ResourceNotFound:
3442 return 0
3443 else:
3444 tablename = self.get_table(query)
3445 rows = self.select(query,[self.db[tablename]._id],{})
3446 ctable = self.connection[tablename]
3447 table = self.db[tablename]
3448 for row in rows:
3449 doc = ctable[str(row.id)]
3450 for key,value in fields:
3451 doc[key.name] = NoSQLAdapter.represent(self,value,table[key.name].type)
3452 ctable.save(doc)
3453 return len(rows)
3454
3455 - def count(self,query,distinct=None):
3456 if distinct:
3457 raise RuntimeError, "COUNT DISTINCT not supported"
3458 if not isinstance(query,Query):
3459 raise SyntaxError, "Not Supported"
3460 tablename = self.get_table(query)
3461 rows = self.select(query,[self.db[tablename]._id],{})
3462 return len(rows)
3463
3465 """
3466 validates that the given text is clean: only contains [0-9a-zA-Z_]
3467 """
3468
3469 if re.compile('[^0-9a-zA-Z_]').findall(text):
3470 raise SyntaxError, \
3471 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
3472 % text
3473 return text
3474
3475
3476 try:
3477 import pymongo
3478 drivers.append('mongoDB')
3479 except:
3480 logger.debug('no mongoDB driver')
3481
3483 uploads_in_blob = True
3484 types = {
3485 'boolean': bool,
3486 'string': str,
3487 'text': str,
3488 'password': str,
3489 'blob': str,
3490 'upload': str,
3491 'integer': long,
3492 'double': float,
3493 'date': datetime.date,
3494 'time': datetime.time,
3495 'datetime': datetime.datetime,
3496 'id': long,
3497 'reference': long,
3498 'list:string': list,
3499 'list:integer': list,
3500 'list:reference': list,
3501 }
3502
3503 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
3504 pool_size=0,folder=None,db_codec ='UTF-8',
3505 credential_decoder=lambda x:x, driver_args={},
3506 adapter_args={}):
3507 self.db = db
3508 self.uri = uri
3509 self.dbengine = 'mongodb'
3510 self.folder = folder
3511 db['_lastsql'] = ''
3512 self.db_codec = 'UTF-8'
3513 self.pool_size = pool_size
3514
3515 m = re.compile('^(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(self._uri[10:])
3516 if not m:
3517 raise SyntaxError, "Invalid URI string in DAL: %s" % self._uri
3518 host = m.group('host')
3519 if not host:
3520 raise SyntaxError, 'mongodb: host name required'
3521 dbname = m.group('db')
3522 if not dbname:
3523 raise SyntaxError, 'mongodb: db name required'
3524 port = m.group('port') or 27017
3525 driver_args.update(dict(host=host,port=port))
3526 def connect(dbname=dbname,driver_args=driver_args):
3527 return pymongo.Connection(**driver_args)[dbname]
3528 self.pool_connection(connect)
3529
3530 - def insert(self,table,fields):
3535
3536
3538 raise RuntimeError, "Not implemented"
3539
3540 - def select(self,query,fields,attributes):
3541 raise RuntimeError, "Not implemented"
3542
3543 - def delete(self,tablename, query):
3544 raise RuntimeError, "Not implemented"
3545
3546 - def update(self,tablename,query,fields):
3547 raise RuntimeError, "Not implemented"
3548
3549
3550
3551
3552
3553
3554 ADAPTERS = {
3555 'sqlite': SQLiteAdapter,
3556 'sqlite:memory': SQLiteAdapter,
3557 'mysql': MySQLAdapter,
3558 'postgres': PostgreSQLAdapter,
3559 'oracle': OracleAdapter,
3560 'mssql': MSSQLAdapter,
3561 'mssql2': MSSQL2Adapter,
3562 'db2': DB2Adapter,
3563 'informix': InformixAdapter,
3564 'firebird': FireBirdAdapter,
3565 'firebird_embedded': FireBirdAdapter,
3566 'ingres': IngresAdapter,
3567 'ingresu': IngresUnicodeAdapter,
3568 'sapdb': SAPDBAdapter,
3569 'cubrid': CubridAdapter,
3570 'jdbc:sqlite': JDBCSQLiteAdapter,
3571 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
3572 'jdbc:postgres': JDBCPostgreSQLAdapter,
3573 'gae': GoogleDatastoreAdapter,
3574 'google:datastore': GoogleDatastoreAdapter,
3575 'google:sql': GoogleSQLAdapter,
3576 'couchdb': CouchDBAdapter,
3577 'mongodb': MongoDBAdapter,
3578 }
3579
3580
3582 """
3583 Field type validation, using web2py's validators mechanism.
3584
3585 makes sure the content of a field is in line with the declared
3586 fieldtype
3587 """
3588 if not have_validators:
3589 return []
3590 field_type, field_length = field.type, field.length
3591 if isinstance(field_type, SQLCustomType):
3592 if hasattr(field_type, 'validator'):
3593 return field_type.validator
3594 else:
3595 field_type = field_type.type
3596 elif not isinstance(field_type,str):
3597 return []
3598 requires=[]
3599 def ff(r,id):
3600 row=r(id)
3601 if not row:
3602 return id
3603 elif hasattr(r, '_format') and isinstance(r._format,str):
3604 return r._format % row
3605 elif hasattr(r, '_format') and callable(r._format):
3606 return r._format(row)
3607 else:
3608 return id
3609 if field_type == 'string':
3610 requires.append(validators.IS_LENGTH(field_length))
3611 elif field_type == 'text':
3612 requires.append(validators.IS_LENGTH(2 ** 16))
3613 elif field_type == 'password':
3614 requires.append(validators.IS_LENGTH(field_length))
3615 elif field_type == 'double':
3616 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
3617 elif field_type == 'integer':
3618 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
3619 elif field_type.startswith('decimal'):
3620 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
3621 elif field_type == 'date':
3622 requires.append(validators.IS_DATE())
3623 elif field_type == 'time':
3624 requires.append(validators.IS_TIME())
3625 elif field_type == 'datetime':
3626 requires.append(validators.IS_DATETIME())
3627 elif field.db and field_type.startswith('reference') and \
3628 field_type.find('.') < 0 and \
3629 field_type[10:] in field.db.tables:
3630 referenced = field.db[field_type[10:]]
3631 def repr_ref(id, r=referenced, f=ff): return f(r, id)
3632 field.represent = field.represent or repr_ref
3633 if hasattr(referenced, '_format') and referenced._format:
3634 requires = validators.IS_IN_DB(field.db,referenced._id,
3635 referenced._format)
3636 if field.unique:
3637 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3638 if field.tablename == field_type[10:]:
3639 return validators.IS_EMPTY_OR(requires)
3640 return requires
3641 elif field.db and field_type.startswith('list:reference') and \
3642 field_type.find('.') < 0 and \
3643 field_type[15:] in field.db.tables:
3644 referenced = field.db[field_type[15:]]
3645 def list_ref_repr(ids, r=referenced, f=ff):
3646 if not ids:
3647 return None
3648 refs = r._db(r._id.belongs(ids)).select(r._id)
3649 return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
3650 field.represent = field.represent or list_ref_repr
3651 if hasattr(referenced, '_format') and referenced._format:
3652 requires = validators.IS_IN_DB(field.db,referenced._id,
3653 referenced._format,multiple=True)
3654 else:
3655 requires = validators.IS_IN_DB(field.db,referenced._id,
3656 multiple=True)
3657 if field.unique:
3658 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3659 return requires
3660 elif field_type.startswith('list:'):
3661 def repr_list(values): return', '.join(str(v) for v in (values or []))
3662 field.represent = field.represent or repr_list
3663 if field.unique:
3664 requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
3665 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
3666 if field.notnull and not field_type[:2] in sff:
3667 requires.insert(0, validators.IS_NOT_EMPTY())
3668 elif not field.notnull and field_type[:2] in sff and requires:
3669 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
3670 return requires
3671
3672
3674 return str(item).replace('|', '||')
3675
3678
3680 return [int(x) for x in value.split('|') if x.strip()]
3681
3684
3685
3687
3688 """
3689 a dictionary that lets you do d['a'] as well as d.a
3690 this is only used to store a Row
3691 """
3692
3694 key=str(key)
3695 if key in self.get('_extra',{}):
3696 return self._extra[key]
3697 return dict.__getitem__(self, key)
3698
3701
3704
3707
3710
3712 return '<Row ' + dict.__repr__(self) + '>'
3713
3716
3718 try:
3719 return self.as_dict() == other.as_dict()
3720 except AttributeError:
3721 return False
3722
3724 return not (self == other)
3725
3727 return Row(dict(self))
3728
3729 - def as_dict(self,datetime_to_str=False):
3730 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list)
3731 d = dict(self)
3732 for k in copy.copy(d.keys()):
3733 v=d[k]
3734 if d[k] is None:
3735 continue
3736 elif isinstance(v,Row):
3737 d[k]=v.as_dict()
3738 elif isinstance(v,Reference):
3739 d[k]=int(v)
3740 elif isinstance(v,decimal.Decimal):
3741 d[k]=float(v)
3742 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
3743 if datetime_to_str:
3744 d[k] = v.isoformat().replace('T',' ')[:19]
3745 elif not isinstance(v,SERIALIZABLE_TYPES):
3746 del d[k]
3747 return d
3748
3749
3751 return Row(cPickle.loads(data))
3752
3755
3756 copy_reg.pickle(Row, Row_pickler, Row_unpickler)
3757
3758
3759
3760
3761
3762
3763
3766 return copy.copy(self)
3767
3768
3770
3771 """
3772 an instance of this class represents a database connection
3773
3774 Example::
3775
3776 db = DAL('sqlite://test.db')
3777 db.define_table('tablename', Field('fieldname1'),
3778 Field('fieldname2'))
3779 """
3780
3781 @staticmethod
3783 """
3784 # ## this allows gluon to set a folder for this thread
3785 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
3786 """
3787 BaseAdapter.set_folder(folder)
3788
3789 @staticmethod
3802
3803 @staticmethod
3825
3826
3827 - def __init__(self, uri='sqlite://dummy.db', pool_size=0, folder=None,
3828 db_codec='UTF-8', check_reserved=None,
3829 migrate=True, fake_migrate=False,
3830 migrate_enabled=True, fake_migrate_all=False,
3831 decode_credentials=False, driver_args=None,
3832 adapter_args={}, attempts=5, auto_import=False):
3833 """
3834 Creates a new Database Abstraction Layer instance.
3835
3836 Keyword arguments:
3837
3838 :uri: string that contains information for connecting to a database.
3839 (default: 'sqlite://dummy.db')
3840 :pool_size: How many open connections to make to the database object.
3841 :folder: <please update me>
3842 :db_codec: string encoding of the database (default: 'UTF-8')
3843 :check_reserved: list of adapters to check tablenames and column names
3844 against sql reserved keywords. (Default None)
3845
3846 * 'common' List of sql keywords that are common to all database types
3847 such as "SELECT, INSERT". (recommended)
3848 * 'all' Checks against all known SQL keywords. (not recommended)
3849 <adaptername> Checks against the specific adapters list of keywords
3850 (recommended)
3851 * '<adaptername>_nonreserved' Checks against the specific adapters
3852 list of nonreserved keywords. (if available)
3853 :migrate (defaults to True) sets default migrate behavior for all tables
3854 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
3855 :migrate_enabled (defaults to True). If set to False disables ALL migrations
3856 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
3857 :attempts (defaults to 5). Number of times to attempt connecting
3858 """
3859 if not decode_credentials:
3860 credential_decoder = lambda cred: cred
3861 else:
3862 credential_decoder = lambda cred: urllib.unquote(cred)
3863 if folder:
3864 self.set_folder(folder)
3865 self._uri = uri
3866 self._pool_size = pool_size
3867 self._db_codec = db_codec
3868 self._lastsql = ''
3869 self._timings = []
3870 self._pending_references = {}
3871 self._request_tenant = 'request_tenant'
3872 self._common_fields = []
3873 if not str(attempts).isdigit() or attempts < 0:
3874 attempts = 5
3875 if uri:
3876 uris = isinstance(uri,(list,tuple)) and uri or [uri]
3877 error = ''
3878 connected = False
3879 for k in range(attempts):
3880 for uri in uris:
3881 try:
3882 if is_jdbc and not uri.startswith('jdbc:'):
3883 uri = 'jdbc:'+uri
3884 self._dbname = regex_dbname.match(uri).group()
3885 if not self._dbname in ADAPTERS:
3886 raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
3887
3888 args = (self,uri,pool_size,folder,db_codec,credential_decoder,driver_args or {}, adapter_args)
3889 self._adapter = ADAPTERS[self._dbname](*args)
3890 connected = True
3891 break
3892 except SyntaxError:
3893 raise
3894 except Exception, error:
3895 pass
3896 if connected:
3897 break
3898 else:
3899 time.sleep(1)
3900 if not connected:
3901 raise RuntimeError, "Failure to connect, tried %d times:\n%s" % (attempts, error)
3902 else:
3903 args = (self,'None',0,folder,db_codec)
3904 self._adapter = BaseAdapter(*args)
3905 migrate = fake_migrate = False
3906 adapter = self._adapter
3907 self._uri_hash = hashlib.md5(adapter.uri).hexdigest()
3908 self.tables = SQLCallableList()
3909 self.check_reserved = check_reserved
3910 if self.check_reserved:
3911 from reserved_sql_keywords import ADAPTERS as RSK
3912 self.RSK = RSK
3913 self._migrate = migrate
3914 self._fake_migrate = fake_migrate
3915 self._migrate_enabled = migrate_enabled
3916 self._fake_migrate_all = fake_migrate_all
3917 if auto_import:
3918 self.import_table_definitions(adapter.folder)
3919
3921 pattern = os.path.join(path,self._uri_hash+'_*.table')
3922 for filename in glob.glob(pattern):
3923 tfile = self._adapter.file_open(filename, 'r')
3924 try:
3925 sql_fields = cPickle.load(tfile)
3926 name = filename[len(pattern)-7:-6]
3927 mf = [(value['sortable'],Field(key,type=value['type'])) \
3928 for key, value in sql_fields.items()]
3929 mf.sort(lambda a,b: cmp(a[0],b[0]))
3930 self.define_table(name,*[item[1] for item in mf],
3931 **dict(migrate=migrate,fake_migrate=fake_migrate))
3932 finally:
3933 self._adapter.file_close(tfile)
3934
3936 """
3937 Validates ``name`` against SQL keywords
3938 Uses self.check_reserve which is a list of
3939 operators to use.
3940 self.check_reserved
3941 ['common', 'postgres', 'mysql']
3942 self.check_reserved
3943 ['all']
3944 """
3945 for backend in self.check_reserved:
3946 if name.upper() in self.RSK[backend]:
3947 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
3948
3950 if self.has_key(tablename):
3951 return True
3952 else:
3953 return False
3954
3955 - def parse_as_rest(self,patterns,args,vars,query=None,nested_select=True):
3956 """
3957 EXAMPLE:
3958
3959 db.define_table('person',Field('name'),Field('info'))
3960 db.define_table('pet',Field('person',db.person),Field('name'),Field('info'))
3961
3962 @request.restful()
3963 def index():
3964 def GET(*args,**vars):
3965 patterns = [
3966 "/persons[person]",
3967 "/{person.name.startswith}",
3968 "/{person.name}/:field",
3969 "/{person.name}/pets[pet.person]",
3970 "/{person.name}/pet[pet.person]/{pet.name}",
3971 "/{person.name}/pet[pet.person]/{pet.name}/:field"
3972 ]
3973 parser = db.parse_as_rest(patterns,args,vars)
3974 if parser.status == 200:
3975 return dict(content=parser.response)
3976 else:
3977 raise HTTP(parser.status,parser.error)
3978 def POST(table_name,**vars):
3979 if table_name == 'person':
3980 return db.person.validate_and_insert(**vars)
3981 elif table_name == 'pet':
3982 return db.pet.validate_and_insert(**vars)
3983 else:
3984 raise HTTP(400)
3985 return locals()
3986 """
3987
3988 db = self
3989 re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
3990 re2 = re.compile('^.+\[.+\]$')
3991
3992 def auto_table(table,base='',depth=0):
3993 patterns = []
3994 for field in db[table].fields:
3995 if base:
3996 tag = '%s/%s' % (base,field.replace('_','-'))
3997 else:
3998 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
3999 f = db[table][field]
4000 if not f.readable: continue
4001 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
4002 tag += '/{%s.%s}' % (table,field)
4003 patterns.append(tag)
4004 patterns.append(tag+'/:field')
4005 elif f.type.startswith('boolean'):
4006 tag += '/{%s.%s}' % (table,field)
4007 patterns.append(tag)
4008 patterns.append(tag+'/:field')
4009 elif f.type.startswith('double') or f.type.startswith('integer'):
4010 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
4011 patterns.append(tag)
4012 patterns.append(tag+'/:field')
4013 elif f.type.startswith('list:'):
4014 tag += '/{%s.%s.contains}' % (table,field)
4015 patterns.append(tag)
4016 patterns.append(tag+'/:field')
4017 elif f.type in ('date','datetime'):
4018 tag+= '/{%s.%s.year}' % (table,field)
4019 patterns.append(tag)
4020 patterns.append(tag+'/:field')
4021 tag+='/{%s.%s.month}' % (table,field)
4022 patterns.append(tag)
4023 patterns.append(tag+'/:field')
4024 tag+='/{%s.%s.day}' % (table,field)
4025 patterns.append(tag)
4026 patterns.append(tag+'/:field')
4027 if f.type in ('datetime','time'):
4028 tag+= '/{%s.%s.hour}' % (table,field)
4029 patterns.append(tag)
4030 patterns.append(tag+'/:field')
4031 tag+='/{%s.%s.minute}' % (table,field)
4032 patterns.append(tag)
4033 patterns.append(tag+'/:field')
4034 tag+='/{%s.%s.second}' % (table,field)
4035 patterns.append(tag)
4036 patterns.append(tag+'/:field')
4037 if depth>0:
4038 for rtable,rfield in db[table]._referenced_by:
4039 tag+='/%s[%s.%s]' % (rtable,rtable,rfield)
4040 patterns.append(tag)
4041 patterns += auto_table(rtable,base=tag,depth=depth-1)
4042 return patterns
4043
4044 if patterns=='auto':
4045 patterns=[]
4046 for table in db.tables:
4047 if not table.startswith('auth_'):
4048 patterns += auto_table(table,base='',depth=1)
4049 else:
4050 i = 0
4051 while i<len(patterns):
4052 pattern = patterns[i]
4053 tokens = pattern.split('/')
4054 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
4055 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],'/'.join(tokens[:-1]))
4056 patterns = patterns[:i]+new_patterns+patterns[i+1:]
4057 i += len(new_patterns)
4058 else:
4059 i += 1
4060 if '/'.join(args) == 'patterns':
4061 return Row({'status':200,'pattern':'list',
4062 'error':None,'response':patterns})
4063 for pattern in patterns:
4064 otable=table=None
4065 dbset=db(query)
4066 i=0
4067 tags = pattern[1:].split('/')
4068
4069 if len(tags)!=len(args):
4070 continue
4071 for tag in tags:
4072
4073 if re1.match(tag):
4074
4075 tokens = tag[1:-1].split('.')
4076 table, field = tokens[0], tokens[1]
4077 if not otable or table == otable:
4078 if len(tokens)==2 or tokens[2]=='eq':
4079 query = db[table][field]==args[i]
4080 elif tokens[2]=='ne':
4081 query = db[table][field]!=args[i]
4082 elif tokens[2]=='lt':
4083 query = db[table][field]<args[i]
4084 elif tokens[2]=='gt':
4085 query = db[table][field]>args[i]
4086 elif tokens[2]=='ge':
4087 query = db[table][field]>=args[i]
4088 elif tokens[2]=='le':
4089 query = db[table][field]<=args[i]
4090 elif tokens[2]=='year':
4091 query = db[table][field].year()==args[i]
4092 elif tokens[2]=='month':
4093 query = db[table][field].month()==args[i]
4094 elif tokens[2]=='day':
4095 query = db[table][field].day()==args[i]
4096 elif tokens[2]=='hour':
4097 query = db[table][field].hour()==args[i]
4098 elif tokens[2]=='minute':
4099 query = db[table][field].minutes()==args[i]
4100 elif tokens[2]=='second':
4101 query = db[table][field].seconds()==args[i]
4102 elif tokens[2]=='startswith':
4103 query = db[table][field].startswith(args[i])
4104 elif tokens[2]=='contains':
4105 query = db[table][field].contains(args[i])
4106 else:
4107 raise RuntimeError, "invalid pattern: %s" % pattern
4108 if len(tokens)==4 and tokens[3]=='not':
4109 query = ~query
4110 elif len(tokens)>=4:
4111 raise RuntimeError, "invalid pattern: %s" % pattern
4112 dbset=dbset(query)
4113 else:
4114 raise RuntimeError, "missing relation in pattern: %s" % pattern
4115 elif otable and re2.match(tag) and args[i]==tag[:tag.find('[')]:
4116
4117 ref = tag[tag.find('[')+1:-1]
4118 if '.' in ref:
4119 table,field = ref.split('.')
4120
4121 if nested_select:
4122 try:
4123 dbset=db(db[table][field].belongs(dbset._select(db[otable]._id)))
4124 except ValueError:
4125 return Row({'status':400,'pattern':pattern,
4126 'error':'invalid path','response':None})
4127 else:
4128 items = [item.id for item in dbset.select(db[otable]._id)]
4129 dbset=db(db[table][field].belongs(items))
4130 else:
4131 dbset=dbset(db[ref])
4132 elif tag==':field' and table:
4133
4134 field = args[i]
4135 if not field in db[table]: break
4136 try:
4137 item = dbset.select(db[table][field],limitby=(0,1)).first()
4138 except ValueError:
4139 return Row({'status':400,'pattern':pattern,
4140 'error':'invalid path','response':None})
4141 if not item:
4142 return Row({'status':404,'pattern':pattern,
4143 'error':'record not found','response':None})
4144 else:
4145 return Row({'status':200,'response':item[field],
4146 'pattern':pattern})
4147 elif tag != args[i]:
4148 break
4149 otable = table
4150 i += 1
4151 if i==len(tags) and table:
4152 otable,ofield = vars.get('order','%s.%s' % (table,field)).split('.',1)
4153 try:
4154 if otable[:1]=='~': orderby = ~db[otable[1:]][ofield]
4155 else: orderby = db[otable][ofield]
4156 except KeyError:
4157 return Row({'status':400,'error':'invalid orderby','response':None})
4158 fields = [field for field in db[table] if field.readable]
4159 count = dbset.count()
4160 try:
4161 limits = (int(vars.get('min',0)),int(vars.get('max',1000)))
4162 if limits[0]<0 or limits[1]<limits[0]: raise ValueError
4163 except ValueError:
4164 Row({'status':400,'error':'invalid limits','response':None})
4165 if count > limits[1]-limits[0]:
4166 Row({'status':400,'error':'too many records','response':None})
4167 try:
4168 response = dbset.select(limitby=limits,orderby=orderby,*fields)
4169 except ValueError:
4170 return Row({'status':400,'pattern':pattern,
4171 'error':'invalid path','response':None})
4172 return Row({'status':200,'response':response,'pattern':pattern})
4173 return Row({'status':400,'error':'no mathcing pattern','response':None})
4174
4175
4176 - def define_table(
4177 self,
4178 tablename,
4179 *fields,
4180 **args
4181 ):
4182
4183 for key in args:
4184 if key not in [
4185 'migrate',
4186 'primarykey',
4187 'fake_migrate',
4188 'format',
4189 'trigger_name',
4190 'sequence_name',
4191 'polymodel']:
4192 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key)
4193 migrate = self._migrate_enabled and args.get('migrate',self._migrate)
4194 fake_migrate = self._fake_migrate_all or args.get('fake_migrate',self._fake_migrate)
4195 format = args.get('format',None)
4196 trigger_name = args.get('trigger_name', None)
4197 sequence_name = args.get('sequence_name', None)
4198 primarykey=args.get('primarykey',None)
4199 polymodel=args.get('polymodel',None)
4200 if not isinstance(tablename,str):
4201 raise SyntaxError, "missing table name"
4202 tablename = cleanup(tablename)
4203 lowertablename = tablename.lower()
4204
4205 if tablename.startswith('_') or hasattr(self,lowertablename) or \
4206 regex_python_keywords.match(tablename):
4207 raise SyntaxError, 'invalid table name: %s' % tablename
4208 elif lowertablename in self.tables:
4209 raise SyntaxError, 'table already defined: %s' % tablename
4210 elif self.check_reserved:
4211 self.check_reserved_keyword(tablename)
4212
4213 if self._common_fields:
4214 fields = [f for f in fields] + [f for f in self._common_fields]
4215
4216 t = self[tablename] = Table(self, tablename, *fields,
4217 **dict(primarykey=primarykey,
4218 trigger_name=trigger_name,
4219 sequence_name=sequence_name))
4220
4221 if self._uri in (None,'None'):
4222 return t
4223
4224 t._create_references()
4225
4226 if migrate or self._adapter.dbengine=='google:datastore':
4227 try:
4228 sql_locker.acquire()
4229 self._adapter.create_table(t,migrate=migrate,
4230 fake_migrate=fake_migrate,
4231 polymodel=polymodel)
4232 finally:
4233 sql_locker.release()
4234 else:
4235 t._dbt = None
4236 self.tables.append(tablename)
4237 t._format = format
4238 return t
4239
4241 for tablename in self.tables:
4242 yield self[tablename]
4243
4246
4249
4252
4254 if key[:1]!='_' and key in self:
4255 raise SyntaxError, \
4256 'Object %s exists and cannot be redefined' % key
4257 self[key] = value
4258
4260 return '<DAL ' + dict.__repr__(self) + '>'
4261
4263 if isinstance(query,Table):
4264 query = query._id>0
4265 elif isinstance(query,Field):
4266 query = query!=None
4267 return Set(self, query)
4268
4271
4274
4275 - def executesql(self, query, placeholders=None, as_dict=False):
4276 """
4277 placeholders is optional and will always be None when using DAL
4278 if using raw SQL with placeholders, placeholders may be
4279 a sequence of values to be substituted in
4280 or, *if supported by the DB driver*, a dictionary with keys
4281 matching named placeholders in your SQL.
4282
4283 Added 2009-12-05 "as_dict" optional argument. Will always be
4284 None when using DAL. If using raw SQL can be set to True
4285 and the results cursor returned by the DB driver will be
4286 converted to a sequence of dictionaries keyed with the db
4287 field names. Tested with SQLite but should work with any database
4288 since the cursor.description used to get field names is part of the
4289 Python dbi 2.0 specs. Results returned with as_dict = True are
4290 the same as those returned when applying .to_list() to a DAL query.
4291
4292 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
4293
4294 --bmeredyk
4295 """
4296 if placeholders:
4297 self._adapter.execute(query, placeholders)
4298 else:
4299 self._adapter.execute(query)
4300 if as_dict:
4301 if not hasattr(self._adapter.cursor,'description'):
4302 raise RuntimeError, "database does not support executesql(...,as_dict=True)"
4303
4304
4305
4306 columns = self._adapter.cursor.description
4307
4308 fields = [f[0] for f in columns]
4309
4310 data = self._adapter.cursor.fetchall()
4311
4312
4313 return [dict(zip(fields,row)) for row in data]
4314
4315 try:
4316 return self._adapter.cursor.fetchall()
4317 except:
4318 return None
4319
4321 for tablename in self.tables:
4322 by = self[tablename]._referenced_by
4323 by[:] = [item for item in by if not item[0] == other]
4324
4331
4332 - def import_from_csv_file(self, ifile, id_map={}, null='<NULL>',
4333 unique='uuid', *args, **kwargs):
4334 for line in ifile:
4335 line = line.strip()
4336 if not line:
4337 continue
4338 elif line == 'END':
4339 return
4340 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
4341 raise SyntaxError, 'invalid file format'
4342 else:
4343 tablename = line[6:]
4344 self[tablename].import_from_csv_file(ifile, id_map, null,
4345 unique, *args, **kwargs)
4346
4347
4349 """
4350 Helper class providing a comma-separated string having all the field names
4351 (prefixed by table name and '.')
4352
4353 normally only called from within gluon.sql
4354 """
4355
4358
4360 return ', '.join([str(field) for field in self.table])
4361
4362
4364
4366 if not self._record:
4367 self._record = self._table[int(self)]
4368 if not self._record:
4369 raise RuntimeError, "Using a recursive select but encountered a broken reference: %s %d"%(self._table, int(self))
4370
4372 if key == 'id':
4373 return int(self)
4374 self.__allocate()
4375 return self._record.get(key, None)
4376
4383
4385 if key == 'id':
4386 return int(self)
4387 self.__allocate()
4388 return self._record.get(key, None)
4389
4391 self.__allocate()
4392 self._record[key] = value
4393
4394
4396 return marshal.loads(data)
4397
4399 try:
4400 marshal_dump = marshal.dumps(int(data))
4401 except AttributeError:
4402 marshal_dump = 'i%s' % struct.pack('<i', int(data))
4403 return (Reference_unpickler, (marshal_dump,))
4404
4405 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler)
4406
4407
4409
4410 """
4411 an instance of this class represents a database table
4412
4413 Example::
4414
4415 db = DAL(...)
4416 db.define_table('users', Field('name'))
4417 db.users.insert(name='me') # print db.users._insert(...) to see SQL
4418 db.users.drop()
4419 """
4420
4421 - def __init__(
4422 self,
4423 db,
4424 tablename,
4425 *fields,
4426 **args
4427 ):
4428 """
4429 Initializes the table and performs checking on the provided fields.
4430
4431 Each table will have automatically an 'id'.
4432
4433 If a field is of type Table, the fields (excluding 'id') from that table
4434 will be used instead.
4435
4436 :raises SyntaxError: when a supplied field is of incorrect type.
4437 """
4438 self._tablename = tablename
4439 self._sequence_name = args.get('sequence_name',None) or \
4440 db and db._adapter.sequence_name(tablename)
4441 self._trigger_name = args.get('trigger_name',None) or \
4442 db and db._adapter.trigger_name(tablename)
4443
4444 primarykey = args.get('primarykey', None)
4445 fieldnames,newfields=set(),[]
4446 if primarykey and not isinstance(primarykey,list):
4447 raise SyntaxError, "primarykey must be a list of fields from table '%s'" \
4448 % tablename
4449 elif primarykey:
4450 self._primarykey = primarykey
4451 elif not [f for f in fields if hasattr(f,'type') and f.type=='id']:
4452 field = Field('id', 'id')
4453 newfields.append(field)
4454 fieldnames.add('id')
4455 self._id = field
4456 for field in fields:
4457 if not isinstance(field, (Field, Table)):
4458 raise SyntaxError, \
4459 'define_table argument is not a Field or Table: %s' % field
4460 elif isinstance(field, Field) and not field.name in fieldnames:
4461 if hasattr(field, '_db'):
4462 field = copy.copy(field)
4463 newfields.append(field)
4464 fieldnames.add(field.name)
4465 if field.type=='id':
4466 self._id = field
4467 elif isinstance(field, Table):
4468 table = field
4469 for field in table:
4470 if not field.name in fieldnames and not field.type=='id':
4471 newfields.append(copy.copy(field))
4472 fieldnames.add(field.name)
4473 else:
4474
4475 pass
4476 fields = newfields
4477 self._db = db
4478 tablename = tablename
4479 self.fields = SQLCallableList()
4480 self.virtualfields = []
4481 fields = list(fields)
4482
4483 if db and self._db._adapter.uploads_in_blob==True:
4484 for field in fields:
4485 if isinstance(field, Field) and field.type == 'upload'\
4486 and field.uploadfield is True:
4487 tmp = field.uploadfield = '%s_blob' % field.name
4488 fields.append(self._db.Field(tmp, 'blob', default=''))
4489
4490 lower_fieldnames = set()
4491 for field in fields:
4492 if db and db.check_reserved:
4493 db.check_reserved_keyword(field.name)
4494
4495 if field.name.lower() in lower_fieldnames:
4496 raise SyntaxError, "duplicate field %s in table %s" % (field.name, tablename)
4497 else:
4498 lower_fieldnames.add(field.name.lower())
4499
4500 self.fields.append(field.name)
4501 self[field.name] = field
4502 if field.type == 'id':
4503 self['id'] = field
4504 field.tablename = field._tablename = tablename
4505 field.table = field._table = self
4506 field.db = field._db = self._db
4507 field.length = min(field.length,self._db and self._db._adapter.maxcharlength or INFINITY)
4508 if field.requires == DEFAULT:
4509 field.requires = sqlhtml_validators(field)
4510 self.ALL = SQLALL(self)
4511
4512 if hasattr(self,'_primarykey'):
4513 for k in self._primarykey:
4514 if k not in self.fields:
4515 raise SyntaxError, \
4516 "primarykey must be a list of fields from table '%s " % tablename
4517 else:
4518 self[k].notnull = True
4519
4521 errors = Row()
4522 for key,value in vars.items():
4523 value,error = self[key].validate(value)
4524 if error:
4525 errors[key] = error
4526 return errors
4527
4529 pr = self._db._pending_references
4530 self._referenced_by = []
4531 for fieldname in self.fields:
4532 field=self[fieldname]
4533 if isinstance(field.type,str) and field.type[:10] == 'reference ':
4534 ref = field.type[10:].strip()
4535 if not ref.split():
4536 raise SyntaxError, 'Table: reference to nothing: %s' %ref
4537 refs = ref.split('.')
4538 rtablename = refs[0]
4539 if not rtablename in self._db:
4540 pr[rtablename] = pr.get(rtablename,[]) + [field]
4541 continue
4542 rtable = self._db[rtablename]
4543 if len(refs)==2:
4544 rfieldname = refs[1]
4545 if not hasattr(rtable,'_primarykey'):
4546 raise SyntaxError,\
4547 'keyed tables can only reference other keyed tables (for now)'
4548 if rfieldname not in rtable.fields:
4549 raise SyntaxError,\
4550 "invalid field '%s' for referenced table '%s' in table '%s'" \
4551 % (rfieldname, rtablename, self._tablename)
4552 rtable._referenced_by.append((self._tablename, field.name))
4553 for referee in pr.get(self._tablename,[]):
4554 self._referenced_by.append((referee._tablename,referee.name))
4555
4557 return dict([(k, v) for (k, v) in record.items() if k
4558 in self.fields and (self[k].type!='id' or id)])
4559
4561 """ for keyed table only """
4562 query = None
4563 for k,v in key.iteritems():
4564 if k in self._primarykey:
4565 if query:
4566 query = query & (self[k] == v)
4567 else:
4568 query = (self[k] == v)
4569 else:
4570 raise SyntaxError, \
4571 'Field %s is not part of the primary key of %s' % \
4572 (k,self._tablename)
4573 return query
4574
4576 if not key:
4577 return None
4578 elif isinstance(key, dict):
4579 """ for keyed table """
4580 query = self._build_query(key)
4581 rows = self._db(query).select()
4582 if rows:
4583 return rows[0]
4584 return None
4585 elif str(key).isdigit():
4586 return self._db(self._id == key).select(limitby=(0,1)).first()
4587 elif key:
4588 return dict.__getitem__(self, str(key))
4589
4591 if key!=DEFAULT:
4592 if isinstance(key, Query):
4593 record = self._db(key).select(limitby=(0,1)).first()
4594 elif not str(key).isdigit():
4595 record = None
4596 else:
4597 record = self._db(self._id == key).select(limitby=(0,1)).first()
4598 if record:
4599 for k,v in kwargs.items():
4600 if record[k]!=v: return None
4601 return record
4602 elif kwargs:
4603 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.items()])
4604 return self._db(query).select(limitby=(0,1)).first()
4605 else:
4606 return None
4607
4609 if isinstance(key, dict) and isinstance(value, dict):
4610 """ option for keyed table """
4611 if set(key.keys()) == set(self._primarykey):
4612 value = self._filter_fields(value)
4613 kv = {}
4614 kv.update(value)
4615 kv.update(key)
4616 if not self.insert(**kv):
4617 query = self._build_query(key)
4618 self._db(query).update(**self._filter_fields(value))
4619 else:
4620 raise SyntaxError,\
4621 'key must have all fields from primary key: %s'%\
4622 (self._primarykey)
4623 elif str(key).isdigit():
4624 if key == 0:
4625 self.insert(**self._filter_fields(value))
4626 elif not self._db(self._id == key)\
4627 .update(**self._filter_fields(value)):
4628 raise SyntaxError, 'No such record: %s' % key
4629 else:
4630 if isinstance(key, dict):
4631 raise SyntaxError,\
4632 'value must be a dictionary: %s' % value
4633 dict.__setitem__(self, str(key), value)
4634
4636 if isinstance(key, dict):
4637 query = self._build_query(key)
4638 if not self._db(query).delete():
4639 raise SyntaxError, 'No such record: %s' % key
4640 elif not str(key).isdigit() or not self._db(self._id == key).delete():
4641 raise SyntaxError, 'No such record: %s' % key
4642
4645
4647 if key in self:
4648 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key
4649 self[key] = value
4650
4652 for fieldname in self.fields:
4653 yield self[fieldname]
4654
4656 return '<Table ' + dict.__repr__(self) + '>'
4657
4659 if self.get('_ot', None):
4660 return '%s AS %s' % (self._ot, self._tablename)
4661 return self._tablename
4662
4663 - def _drop(self, mode = ''):
4664 return self._db._adapter._drop(self, mode)
4665
4666 - def drop(self, mode = ''):
4667 return self._db._adapter.drop(self,mode)
4668
4669 - def _listify(self,fields,update=False):
4670 new_fields = []
4671 new_fields_names = []
4672 for name in fields:
4673 if not name in self.fields:
4674 if name != 'id':
4675 raise SyntaxError, 'Field %s does not belong to the table' % name
4676 else:
4677 new_fields.append((self[name],fields[name]))
4678 new_fields_names.append(name)
4679 for ofield in self:
4680 if not ofield.name in new_fields_names:
4681 if not update and ofield.default!=None:
4682 new_fields.append((ofield,ofield.default))
4683 elif update and ofield.update!=None:
4684 new_fields.append((ofield,ofield.update))
4685 for ofield in self:
4686 if not ofield.name in new_fields_names and ofield.compute:
4687 try:
4688 new_fields.append((ofield,ofield.compute(Row(fields))))
4689 except KeyError:
4690 pass
4691 if not update and ofield.required and not ofield.name in new_fields_names:
4692 raise SyntaxError,'Table: missing required field: %s' % ofield.name
4693 return new_fields
4694
4697
4700
4702 response = Row()
4703 response.errors = self._validate(**fields)
4704 if not response.errors:
4705 response.id = self.insert(**fields)
4706 else:
4707 response.id = None
4708 return response
4709
4721
4728
4730 return self._db._adapter._truncate(self, mode)
4731
4733 return self._db._adapter.truncate(self, mode)
4734
4735 - def import_from_csv_file(
4736 self,
4737 csvfile,
4738 id_map=None,
4739 null='<NULL>',
4740 unique='uuid',
4741 *args, **kwargs
4742 ):
4743 """
4744 import records from csv file. Column headers must have same names as
4745 table fields. field 'id' is ignored. If column names read 'table.file'
4746 the 'table.' prefix is ignored.
4747 'unique' argument is a field which must be unique
4748 (typically a uuid field)
4749 """
4750
4751 delimiter = kwargs.get('delimiter', ',')
4752 quotechar = kwargs.get('quotechar', '"')
4753 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
4754
4755 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting)
4756 colnames = None
4757 if isinstance(id_map, dict):
4758 if not self._tablename in id_map:
4759 id_map[self._tablename] = {}
4760 id_map_self = id_map[self._tablename]
4761
4762 def fix(field, value, id_map):
4763 if value == null:
4764 value = None
4765 elif field.type in ('double','integer'):
4766 value = None
4767 elif field.type.startswith('list:string'):
4768 value = bar_decode_string(value)
4769 elif field.type.startswith('list:reference'):
4770 ref_table = field.type[10:].strip()
4771 value = [id_map[ref_table][int(v)] \
4772 for v in bar_decode_string(value)]
4773 elif field.type.startswith('list:'):
4774 value = bar_decode_integer(value)
4775 elif id_map and field.type.startswith('reference'):
4776 try:
4777 value = id_map[field.type[9:].strip()][value]
4778 except KeyError:
4779 pass
4780 return (field.name, value)
4781
4782 def is_id(colname):
4783 if colname in self:
4784 return self[colname].type == 'id'
4785 else:
4786 return False
4787
4788 for line in reader:
4789 if not line:
4790 break
4791 if not colnames:
4792 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
4793 cols, cid = [], []
4794 for i,colname in enumerate(colnames):
4795 if is_id(colname):
4796 cid = i
4797 else:
4798 cols.append(i)
4799 if colname == unique:
4800 unique_idx = i
4801 else:
4802 items = [fix(self[colnames[i]], line[i], id_map) \
4803 for i in cols if colnames[i] in self.fields]
4804
4805
4806 if not unique or unique not in colnames:
4807 new_id = self.insert(**dict(items))
4808 else:
4809 unique_value = line[unique_idx]
4810 query = self._db[self][unique] == unique_value
4811 record = self._db(query).select().first()
4812 if record:
4813 record.update_record(**dict(items))
4814 new_id = record[self._id.name]
4815 else:
4816 new_id = self.insert(**dict(items))
4817 if id_map and cid != []:
4818 id_map_self[line[cid]] = new_id
4819
4822
4823 - def on(self, query):
4824 return Expression(self._db,self._db._adapter.ON,self,query)
4825
4826
4827
4829
4830 - def __init__(
4831 self,
4832 db,
4833 op,
4834 first=None,
4835 second=None,
4836 type=None,
4837 ):
4838
4839 self.db = db
4840 self.op = op
4841 self.first = first
4842 self.second = second
4843
4844 if not type and first and hasattr(first,'type'):
4845 self.type = first.type
4846 else:
4847 self.type = type
4848
4851
4854
4857
4860
4862 return Expression(self.db, self.db._adapter.LOWER, self, None, self.type)
4863
4865 return Expression(self.db, self.db._adapter.UPPER, self, None, self.type)
4866
4869
4872
4875
4878
4881
4884
4887
4889 if start < 0:
4890 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
4891 else:
4892 pos0 = start + 1
4893
4894 if stop < 0:
4895 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
4896 elif stop == sys.maxint:
4897 length = self.len()
4898 else:
4899 length = '(%s - %s)' % (stop + 1, pos0)
4900 return Expression(self.db,self.db._adapter.SUBSTRING,
4901 self, (pos0, length), self.type)
4902
4904 return self[i:i + 1]
4905
4907 return self.db._adapter.expand(self,self.type)
4908
4910 return Expression(self.db,self.db._adapter.COMMA,self,other,self.type)
4911
4913 if hasattr(self,'_op') and self.op == self.db._adapter.INVERT:
4914 return self.first
4915 return Expression(self.db,self.db._adapter.INVERT,self,type=self.type)
4916
4918 return Expression(self.db,self.db._adapter.ADD,self,other,self.type)
4919
4921 if self.type == 'integer':
4922 result_type = 'integer'
4923 elif self.type in ['date','time','datetime','double']:
4924 result_type = 'double'
4925 else:
4926 raise SyntaxError, "subtraction operation not supported for type"
4927 return Expression(self.db,self.db._adapter.SUB,self,other,
4928 result_type)
4930 return Expression(self.db,self.db._adapter.MUL,self,other,self.type)
4931
4933 return Expression(self.db,self.db._adapter.DIV,self,other,self.type)
4934
4936 return Expression(self.db,self.db._adapter.MOD,self,other,self.type)
4937
4939 return Query(self.db, self.db._adapter.EQ, self, value)
4940
4942 return Query(self.db, self.db._adapter.NE, self, value)
4943
4945 return Query(self.db, self.db._adapter.LT, self, value)
4946
4948 return Query(self.db, self.db._adapter.LE, self, value)
4949
4951 return Query(self.db, self.db._adapter.GT, self, value)
4952
4954 return Query(self.db, self.db._adapter.GE, self, value)
4955
4956 - def like(self, value):
4957 return Query(self.db, self.db._adapter.LIKE, self, value)
4958
4960 return Query(self.db, self.db._adapter.BELONGS, self, value)
4961
4963 if not self.type in ('string', 'text'):
4964 raise SyntaxError, "startswith used with incompatible field type"
4965 return Query(self.db, self.db._adapter.STARTSWITH, self, value)
4966
4968 if not self.type in ('string', 'text'):
4969 raise SyntaxError, "endswith used with incompatible field type"
4970 return Query(self.db, self.db._adapter.ENDSWITH, self, value)
4971
4973 if not self.type in ('string', 'text') and not self.type.startswith('list:'):
4974 raise SyntaxError, "contains used with incompatible field type"
4975 return Query(self.db, self.db._adapter.CONTAINS, self, value)
4976
4979
4980
4981
4982
4984 """
4985 allows defining of custom SQL types
4986
4987 Example::
4988
4989 decimal = SQLCustomType(
4990 type ='double',
4991 native ='integer',
4992 encoder =(lambda x: int(float(x) * 100)),
4993 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
4994 )
4995
4996 db.define_table(
4997 'example',
4998 Field('value', type=decimal)
4999 )
5000
5001 :param type: the web2py type (default = 'string')
5002 :param native: the backend type
5003 :param encoder: how to encode the value to store it in the backend
5004 :param decoder: how to decode the value retrieved from the backend
5005 :param validator: what validators to use ( default = None, will use the
5006 default validator for type)
5007 """
5008
5009 - def __init__(
5010 self,
5011 type='string',
5012 native=None,
5013 encoder=None,
5014 decoder=None,
5015 validator=None,
5016 _class=None,
5017 ):
5018
5019 self.type = type
5020 self.native = native
5021 self.encoder = encoder or (lambda x: x)
5022 self.decoder = decoder or (lambda x: x)
5023 self.validator = validator
5024 self._class = _class or type
5025
5028
5031
5034
5037
5038
5039 -class Field(Expression):
5040
5041 """
5042 an instance of this class represents a database field
5043
5044 example::
5045
5046 a = Field(name, 'string', length=32, default=None, required=False,
5047 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
5048 notnull=False, unique=False,
5049 uploadfield=True, widget=None, label=None, comment=None,
5050 uploadfield=True, # True means store on disk,
5051 # 'a_field_name' means store in this field in db
5052 # False means file content will be discarded.
5053 writable=True, readable=True, update=None, authorize=None,
5054 autodelete=False, represent=None, uploadfolder=None,
5055 uploadseparate=False # upload to separate directories by uuid_keys
5056 # first 2 character and tablename.fieldname
5057 # False - old behavior
5058 # True - put uploaded file in
5059 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
5060 # directory)
5061
5062 to be used as argument of DAL.define_table
5063
5064 allowed field types:
5065 string, boolean, integer, double, text, blob,
5066 date, time, datetime, upload, password
5067
5068 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
5069 fields should have a default or they will be required in SQLFORMs
5070 the requires argument is used to validate the field input in SQLFORMs
5071
5072 """
5073
5074 - def __init__(
5075 self,
5076 fieldname,
5077 type='string',
5078 length=None,
5079 default=DEFAULT,
5080 required=False,
5081 requires=DEFAULT,
5082 ondelete='CASCADE',
5083 notnull=False,
5084 unique=False,
5085 uploadfield=True,
5086 widget=None,
5087 label=None,
5088 comment=None,
5089 writable=True,
5090 readable=True,
5091 update=None,
5092 authorize=None,
5093 autodelete=False,
5094 represent=None,
5095 uploadfolder=None,
5096 uploadseparate=False,
5097 compute=None,
5098 custom_store=None,
5099 custom_retrieve=None,
5100 ):
5101 self.db = None
5102 self.op = None
5103 self.first = None
5104 self.second = None
5105 if not isinstance(fieldname,str):
5106 raise SyntaxError, "missing field name"
5107 if fieldname.startswith(':'):
5108 fieldname,readable,writable=fieldname[1:],False,False
5109 elif fieldname.startswith('.'):
5110 fieldname,readable,writable=fieldname[1:],False,False
5111 if '=' in fieldname:
5112 fieldname,default = fieldname.split('=',1)
5113 self.name = fieldname = cleanup(fieldname)
5114 if hasattr(Table,fieldname) or fieldname[0] == '_' or \
5115 regex_python_keywords.match(fieldname):
5116 raise SyntaxError, 'Field: invalid field name: %s' % fieldname
5117 if isinstance(type, Table):
5118 type = 'reference ' + type._tablename
5119 self.type = type
5120 self.length = (length is None) and MAXCHARLENGTH or length
5121 if default==DEFAULT:
5122 self.default = update or None
5123 else:
5124 self.default = default
5125 self.required = required
5126 self.ondelete = ondelete.upper()
5127 self.notnull = notnull
5128 self.unique = unique
5129 self.uploadfield = uploadfield
5130 self.uploadfolder = uploadfolder
5131 self.uploadseparate = uploadseparate
5132 self.widget = widget
5133 self.label = label or ' '.join(item.capitalize() for item in fieldname.split('_'))
5134 self.comment = comment
5135 self.writable = writable
5136 self.readable = readable
5137 self.update = update
5138 self.authorize = authorize
5139 self.autodelete = autodelete
5140 if not represent and type in ('list:integer','list:string'):
5141 represent=lambda x: ', '.join(str(y) for y in x or [])
5142 self.represent = represent
5143 self.compute = compute
5144 self.isattachment = True
5145 self.custom_store = custom_store
5146 self.custom_retrieve = custom_retrieve
5147 if self.label is None:
5148 self.label = ' '.join([x.capitalize() for x in
5149 fieldname.split('_')])
5150 if requires is None:
5151 self.requires = []
5152 else:
5153 self.requires = requires
5154
5155 - def store(self, file, filename=None, path=None):
5156 if self.custom_store:
5157 return self.custom_store(file,filename,path)
5158 if not filename:
5159 filename = file.name
5160 filename = os.path.basename(filename.replace('/', os.sep)\
5161 .replace('\\', os.sep))
5162 m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
5163 extension = m and m.group('e') or 'txt'
5164 uuid_key = web2py_uuid().replace('-', '')[-16:]
5165 encoded_filename = base64.b16encode(filename).lower()
5166 newfilename = '%s.%s.%s.%s' % \
5167 (self._tablename, self.name, uuid_key, encoded_filename)
5168 newfilename = newfilename[:200] + '.' + extension
5169 if isinstance(self.uploadfield,Field):
5170 blob_uploadfield_name = self.uploadfield.uploadfield
5171 keys={self.uploadfield.name: newfilename,
5172 blob_uploadfield_name: file.read()}
5173 self.uploadfield.table.insert(**keys)
5174 elif self.uploadfield == True:
5175 if path:
5176 pass
5177 elif self.uploadfolder:
5178 path = self.uploadfolder
5179 elif self.db._adapter.folder:
5180 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5181 else:
5182 raise RuntimeError, "you must specify a Field(...,uploadfolder=...)"
5183 if self.uploadseparate:
5184 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2])
5185 if not os.path.exists(path):
5186 os.makedirs(path)
5187 pathfilename = os.path.join(path, newfilename)
5188 dest_file = open(pathfilename, 'wb')
5189 try:
5190 shutil.copyfileobj(file, dest_file)
5191 finally:
5192 dest_file.close()
5193 return newfilename
5194
5196 if self.custom_retrieve:
5197 return self.custom_retrieve(name, path)
5198 import http
5199 if self.authorize or isinstance(self.uploadfield, str):
5200 row = self.db(self == name).select().first()
5201 if not row:
5202 raise http.HTTP(404)
5203 if self.authorize and not self.authorize(row):
5204 raise http.HTTP(403)
5205 try:
5206 m = regex_content.match(name)
5207 if not m or not self.isattachment:
5208 raise TypeError, 'Can\'t retrieve %s' % name
5209 filename = base64.b16decode(m.group('name'), True)
5210 filename = regex_cleanup_fn.sub('_', filename)
5211 except (TypeError, AttributeError):
5212 filename = name
5213 if isinstance(self.uploadfield, str):
5214 return (filename, cStringIO.StringIO(row[self.uploadfield] or ''))
5215 elif isinstance(self.uploadfield,Field):
5216 blob_uploadfield_name = self.uploadfield.uploadfield
5217 query = self.uploadfield == name
5218 data = self.uploadfield.table(query)[blob_uploadfield_name]
5219 return (filename, cStringIO.StringIO(data))
5220 else:
5221
5222 if path:
5223 pass
5224 elif self.uploadfolder:
5225 path = self.uploadfolder
5226 else:
5227 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5228 if self.uploadseparate:
5229 t = m.group('table')
5230 f = m.group('field')
5231 u = m.group('uuidkey')
5232 path = os.path.join(path,"%s.%s" % (t,f),u[:2])
5233 return (filename, open(os.path.join(path, name), 'rb'))
5234
5249
5261
5264
5267
5269 try:
5270 return '%s.%s' % (self.tablename, self.name)
5271 except:
5272 return '<no table>.%s' % self.name
5273
5274
5276
5277 """
5278 a query object necessary to define a set.
5279 it can be stored or can be passed to DAL.__call__() to obtain a Set
5280
5281 Example::
5282
5283 query = db.users.name=='Max'
5284 set = db(query)
5285 records = set.select()
5286
5287 """
5288
5289 - def __init__(
5290 self,
5291 db,
5292 op,
5293 first=None,
5294 second=None,
5295 ):
5296 self.db = db
5297 self.op = op
5298 self.first = first
5299 self.second = second
5300
5302 return self.db._adapter.expand(self)
5303
5305 return Query(self.db,self.db._adapter.AND,self,other)
5306
5308 return Query(self.db,self.db._adapter.OR,self,other)
5309
5311 if self.op==self.db._adapter.NOT:
5312 return self.first
5313 return Query(self.db,self.db._adapter.NOT,self)
5314
5315
5316 regex_quotes = re.compile("'[^']*'")
5317
5318
5320 if not orderby:
5321 return None
5322 orderby2 = orderby[0]
5323 for item in orderby[1:]:
5324 orderby2 = orderby2 | item
5325 return orderby2
5326
5327
5329
5330 """
5331 a Set represents a set of records in the database,
5332 the records are identified by the query=Query(...) object.
5333 normally the Set is generated by DAL.__call__(Query(...))
5334
5335 given a set, for example
5336 set = db(db.users.name=='Max')
5337 you can:
5338 set.update(db.users.name='Massimo')
5339 set.delete() # all elements in the set
5340 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
5341 and take subsets:
5342 subset = set(db.users.id<5)
5343 """
5344
5346 self.db = db
5347 self._db = db
5348 self.query = query
5349
5351 if isinstance(query,Table):
5352 query = query._id>0
5353 elif isinstance(query,Field):
5354 query = query!=None
5355 if self.query:
5356 return Set(self.db, self.query & query)
5357 else:
5358 return Set(self.db, query)
5359
5360 - def _count(self,distinct=None):
5361 return self.db._adapter._count(self.query,distinct)
5362
5363 - def _select(self, *fields, **attributes):
5364 return self.db._adapter._select(self.query,fields,attributes)
5365
5367 tablename=self.db._adapter.get_table(self.query)
5368 return self.db._adapter._delete(tablename,self.query)
5369
5370 - def _update(self, **update_fields):
5371 tablename = self.db._adapter.get_table(self.query)
5372 fields = self.db[tablename]._listify(update_fields,update=True)
5373 return self.db._adapter._update(tablename,self.query,fields)
5374
5376 return not self.select(limitby=(0,1))
5377
5378 - def count(self,distinct=None):
5379 return self.db._adapter.count(self.query,distinct)
5380
5381 - def select(self, *fields, **attributes):
5382 return self.db._adapter.select(self.query,fields,attributes)
5383
5388
5389 - def update(self, **update_fields):
5390 tablename = self.db._adapter.get_table(self.query)
5391 fields = self.db[tablename]._listify(update_fields,update=True)
5392 if not fields:
5393 raise SyntaxError, "No fields to update"
5394 self.delete_uploaded_files(update_fields)
5395 return self.db._adapter.update(tablename,self.query,fields)
5396
5398 table = self.db[self.db._adapter.tables(self.query)[0]]
5399
5400 if upload_fields:
5401 fields = upload_fields.keys()
5402 else:
5403 fields = table.fields
5404 fields = [f for f in fields if table[f].type == 'upload'
5405 and table[f].uploadfield == True
5406 and table[f].autodelete]
5407 if not fields:
5408 return
5409 for record in self.select(*[table[f] for f in fields]):
5410 for fieldname in fields:
5411 field = table[fieldname]
5412 oldname = record.get(fieldname, None)
5413 if not oldname:
5414 continue
5415 if upload_fields and oldname == upload_fields[fieldname]:
5416 continue
5417 uploadfolder = field.uploadfolder
5418 if not uploadfolder:
5419 uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
5420 if field.uploadseparate:
5421 items = oldname.split('.')
5422 uploadfolder = os.path.join(uploadfolder,
5423 "%s.%s" % (items[0], items[1]),
5424 items[2][:2])
5425 oldpath = os.path.join(uploadfolder, oldname)
5426 if os.path.exists(oldpath):
5427 os.unlink(oldpath)
5428
5429
5431 (colset, table, id) = pack
5432 b = a or dict(colset)
5433 c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
5434 table._db(table._id==id).update(**c)
5435 for (k, v) in c.items():
5436 colset[k] = v
5437
5438
5439 -class Rows(object):
5440
5441 """
5442 A wrapper for the return value of a select. It basically represents a table.
5443 It has an iterator and each row is represented as a dictionary.
5444 """
5445
5446
5447
5448 - def __init__(
5449 self,
5450 db=None,
5451 records=[],
5452 colnames=[],
5453 compact=True,
5454 rawrows=None
5455 ):
5456 self.db = db
5457 self.records = records
5458 self.colnames = colnames
5459 self.compact = compact
5460 self.response = rawrows
5461
5463 if not keyed_virtualfields:
5464 return self
5465 for row in self.records:
5466 for (tablename,virtualfields) in keyed_virtualfields.items():
5467 attributes = dir(virtualfields)
5468 virtualfields.__dict__.update(row)
5469 if not tablename in row:
5470 box = row[tablename] = Row()
5471 else:
5472 box = row[tablename]
5473 for attribute in attributes:
5474 if attribute[0] != '_':
5475 method = getattr(virtualfields,attribute)
5476 if hasattr(method,'im_func') and method.im_func.func_code.co_argcount:
5477 box[attribute]=method()
5478 return self
5479
5481 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
5482 records = self.records+other.records
5483 return Rows(self.db,records,self.colnames)
5484
5486 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects'
5487 records = self.records
5488 records += [record for record in other.records \
5489 if not record in records]
5490 return Rows(self.db,records,self.colnames)
5491
5493 if len(self.records):
5494 return 1
5495 return 0
5496
5498 return len(self.records)
5499
5501 return Rows(self.db,self.records[a:b],self.colnames)
5502
5504 row = self.records[i]
5505 keys = row.keys()
5506 if self.compact and len(keys) == 1 and keys[0] != '_extra':
5507 return row[row.keys()[0]]
5508 return row
5509
5511 """
5512 iterator over records
5513 """
5514
5515 for i in xrange(len(self)):
5516 yield self[i]
5517
5519 """
5520 serializes the table into a csv file
5521 """
5522
5523 s = cStringIO.StringIO()
5524 self.export_to_csv_file(s)
5525 return s.getvalue()
5526
5528 if not self.records:
5529 return None
5530 return self[0]
5531
5533 if not self.records:
5534 return None
5535 return self[-1]
5536
5538 """
5539 returns a new Rows object, a subset of the original object,
5540 filtered by the function f
5541 """
5542 if not self.records:
5543 return Rows(self.db, [], self.colnames)
5544 records = []
5545 for i in range(0,len(self)):
5546 row = self[i]
5547 if f(row):
5548 records.append(self.records[i])
5549 return Rows(self.db, records, self.colnames)
5550
5552 """
5553 removes elements from the calling Rows object, filtered by the function f,
5554 and returns a new Rows object containing the removed elements
5555 """
5556 if not self.records:
5557 return Rows(self.db, [], self.colnames)
5558 removed = []
5559 i=0
5560 while i<len(self):
5561 row = self[i]
5562 if f(row):
5563 removed.append(self.records[i])
5564 del self.records[i]
5565 else:
5566 i += 1
5567 return Rows(self.db, removed, self.colnames)
5568
5569 - def sort(self, f, reverse=False):
5570 """
5571 returns a list of sorted elements (not sorted in place)
5572 """
5573 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
5574
5575 - def as_list(self,
5576 compact=True,
5577 storage_to_dict=True,
5578 datetime_to_str=True):
5579 """
5580 returns the data as a list or dictionary.
5581 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5582 :param datetime_to_str: convert datetime fields as strings (default True)
5583 """
5584 (oc, self.compact) = (self.compact, compact)
5585 if storage_to_dict:
5586 items = [item.as_dict(datetime_to_str) for item in self]
5587 else:
5588 items = [item for item in self]
5589 self.compact = compact
5590 return items
5591
5592
5593 - def as_dict(self,
5594 key='id',
5595 compact=True,
5596 storage_to_dict=True,
5597 datetime_to_str=True):
5598 """
5599 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
5600
5601 :param key: the name of the field to be used as dict key, normally the id
5602 :param compact: ? (default True)
5603 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5604 :param datetime_to_str: convert datetime fields as strings (default True)
5605 """
5606 rows = self.as_list(compact, storage_to_dict, datetime_to_str)
5607 if isinstance(key,str) and key.count('.')==1:
5608 (table, field) = key.split('.')
5609 return dict([(r[table][field],r) for r in rows])
5610 elif isinstance(key,str):
5611 return dict([(r[key],r) for r in rows])
5612 else:
5613 return dict([(key(r),r) for r in rows])
5614
5616 """
5617 export data to csv, the first line contains the column names
5618
5619 :param ofile: where the csv must be exported to
5620 :param null: how null values must be represented (default '<NULL>')
5621 :param delimiter: delimiter to separate values (default ',')
5622 :param quotechar: character to use to quote string values (default '"')
5623 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
5624 :param represent: use the fields .represent value (default False)
5625 :param colnames: list of column names to use (default self.colnames)
5626 This will only work when exporting rows objects!!!!
5627 DO NOT use this with db.export_to_csv()
5628 """
5629 delimiter = kwargs.get('delimiter', ',')
5630 quotechar = kwargs.get('quotechar', '"')
5631 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
5632 represent = kwargs.get('represent', False)
5633 writer = csv.writer(ofile, delimiter=delimiter,
5634 quotechar=quotechar, quoting=quoting)
5635 colnames = kwargs.get('colnames', self.colnames)
5636
5637 writer.writerow(colnames)
5638
5639 def none_exception(value):
5640 """
5641 returns a cleaned up value that can be used for csv export:
5642 - unicode text is encoded as such
5643 - None values are replaced with the given representation (default <NULL>)
5644 """
5645 if value is None:
5646 return null
5647 elif isinstance(value, unicode):
5648 return value.encode('utf8')
5649 elif isinstance(value,Reference):
5650 return int(value)
5651 elif hasattr(value, 'isoformat'):
5652 return value.isoformat()[:19].replace('T', ' ')
5653 elif isinstance(value, (list,tuple)):
5654 return bar_encode(value)
5655 return value
5656
5657 for record in self:
5658 row = []
5659 for col in colnames:
5660 if not table_field.match(col):
5661 row.append(record._extra[col])
5662 else:
5663 (t, f) = col.split('.')
5664 field = self.db[t][f]
5665 if isinstance(record.get(t, None), (Row,dict)):
5666 value = record[t][f]
5667 else:
5668 value = record[f]
5669 if represent and field.represent:
5670 value = field.represent(value)
5671 row.append(none_exception(value))
5672 writer.writerow(row)
5673
5675 """
5676 serializes the table using sqlhtml.SQLTABLE (if present)
5677 """
5678
5679 import sqlhtml
5680 return sqlhtml.SQLTABLE(self).xml()
5681
5682 - def json(self, mode='object', default=None):
5683 """
5684 serializes the table to a JSON list of objects
5685 """
5686 mode = mode.lower()
5687 if not mode in ['object', 'array']:
5688 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode
5689
5690 def inner_loop(record, col):
5691 (t, f) = col.split('.')
5692 res = None
5693 if not table_field.match(col):
5694 res = record._extra[col]
5695 else:
5696 if isinstance(record.get(t, None), Row):
5697 res = record[t][f]
5698 else:
5699 res = record[f]
5700 if mode == 'object':
5701 return (f, res)
5702 else:
5703 return res
5704
5705 if mode == 'object':
5706 items = [dict([inner_loop(record, col) for col in
5707 self.colnames]) for record in self]
5708 else:
5709 items = [[inner_loop(record, col) for col in self.colnames]
5710 for record in self]
5711 if have_serializers:
5712 return serializers.json(items,default=default or serializers.custom_json)
5713 else:
5714 import simplejson
5715 return simplejson.dumps(items)
5716
5718 return cPickle.loads(data)
5719
5721 return Rows_unpickler, \
5722 (cPickle.dumps(data.as_list(storage_to_dict=True,
5723 datetime_to_str=False)),)
5724
5725 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler)
5726
5727
5728
5729
5730
5731
5733 """
5734
5735 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
5736 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
5737 >>> tmp = db.define_table('users',\
5738 Field('stringf', 'string', length=32, required=True),\
5739 Field('booleanf', 'boolean', default=False),\
5740 Field('passwordf', 'password', notnull=True),\
5741 Field('uploadf', 'upload'),\
5742 Field('blobf', 'blob'),\
5743 Field('integerf', 'integer', unique=True),\
5744 Field('doublef', 'double', unique=True,notnull=True),\
5745 Field('datef', 'date', default=datetime.date.today()),\
5746 Field('timef', 'time'),\
5747 Field('datetimef', 'datetime'),\
5748 migrate='test_user.table')
5749
5750 Insert a field
5751
5752 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
5753 uploadf=None, integerf=5, doublef=3.14,\
5754 datef=datetime.date(2001, 1, 1),\
5755 timef=datetime.time(12, 30, 15),\
5756 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
5757 1
5758
5759 Drop the table
5760
5761 >>> db.users.drop()
5762
5763 Examples of insert, select, update, delete
5764
5765 >>> tmp = db.define_table('person',\
5766 Field('name'),\
5767 Field('birth','date'),\
5768 migrate='test_person.table')
5769 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
5770 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
5771
5772 commented len(db().select(db.person.ALL))
5773 commented 2
5774
5775 >>> me = db(db.person.id==person_id).select()[0] # test select
5776 >>> me.name
5777 'Massimo'
5778 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
5779 1
5780 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
5781 1
5782
5783 Update a single record
5784
5785 >>> me.update_record(name=\"Max\")
5786 >>> me.name
5787 'Max'
5788
5789 Examples of complex search conditions
5790
5791 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
5792 1
5793 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
5794 1
5795 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
5796 1
5797 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
5798 >>> me.name
5799 'Max'
5800
5801 Examples of search conditions using extract from date/datetime/time
5802
5803 >>> len(db(db.person.birth.month()==12).select())
5804 1
5805 >>> len(db(db.person.birth.year()>1900).select())
5806 1
5807
5808 Example of usage of NULL
5809
5810 >>> len(db(db.person.birth==None).select()) ### test NULL
5811 0
5812 >>> len(db(db.person.birth!=None).select()) ### test NULL
5813 1
5814
5815 Examples of search conditions using lower, upper, and like
5816
5817 >>> len(db(db.person.name.upper()=='MAX').select())
5818 1
5819 >>> len(db(db.person.name.like('%ax')).select())
5820 1
5821 >>> len(db(db.person.name.upper().like('%AX')).select())
5822 1
5823 >>> len(db(~db.person.name.upper().like('%AX')).select())
5824 0
5825
5826 orderby, groupby and limitby
5827
5828 >>> people = db().select(db.person.name, orderby=db.person.name)
5829 >>> order = db.person.name|~db.person.birth
5830 >>> people = db().select(db.person.name, orderby=order)
5831
5832 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
5833
5834 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
5835
5836 Example of one 2 many relation
5837
5838 >>> tmp = db.define_table('dog',\
5839 Field('name'),\
5840 Field('birth','date'),\
5841 Field('owner',db.person),\
5842 migrate='test_dog.table')
5843 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
5844 1
5845
5846 A simple JOIN
5847
5848 >>> len(db(db.dog.owner==db.person.id).select())
5849 1
5850
5851 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
5852 1
5853
5854 Drop tables
5855
5856 >>> db.dog.drop()
5857 >>> db.person.drop()
5858
5859 Example of many 2 many relation and Set
5860
5861 >>> tmp = db.define_table('author', Field('name'),\
5862 migrate='test_author.table')
5863 >>> tmp = db.define_table('paper', Field('title'),\
5864 migrate='test_paper.table')
5865 >>> tmp = db.define_table('authorship',\
5866 Field('author_id', db.author),\
5867 Field('paper_id', db.paper),\
5868 migrate='test_authorship.table')
5869 >>> aid = db.author.insert(name='Massimo')
5870 >>> pid = db.paper.insert(title='QCD')
5871 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
5872
5873 Define a Set
5874
5875 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
5876 >>> rows = authored_papers.select(db.author.name, db.paper.title)
5877 >>> for row in rows: print row.author.name, row.paper.title
5878 Massimo QCD
5879
5880 Example of search condition using belongs
5881
5882 >>> set = (1, 2, 3)
5883 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
5884 >>> print rows[0].title
5885 QCD
5886
5887 Example of search condition using nested select
5888
5889 >>> nested_select = db()._select(db.authorship.paper_id)
5890 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
5891 >>> print rows[0].title
5892 QCD
5893
5894 Example of expressions
5895
5896 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
5897 >>> db(mynumber.id>0).delete()
5898 0
5899 >>> for i in range(10): tmp = mynumber.insert(x=i)
5900 >>> db(mynumber.id>0).select(mynumber.x.sum())[0](mynumber.x.sum())
5901 45
5902
5903 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
5904 5
5905
5906 Output in csv
5907
5908 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
5909 author.name,paper.title\r
5910 Massimo,QCD
5911
5912 Delete all leftover tables
5913
5914 >>> DAL.distributed_transaction_commit(db)
5915
5916 >>> db.mynumber.drop()
5917 >>> db.authorship.drop()
5918 >>> db.author.drop()
5919 >>> db.paper.drop()
5920 """
5921
5922
5923
5924
5925 SQLField = Field
5926 SQLTable = Table
5927 SQLXorable = Expression
5928 SQLQuery = Query
5929 SQLSet = Set
5930 SQLRows = Rows
5931 SQLStorage = Row
5932 SQLDB = DAL
5933 GQLDB = DAL
5934 DAL.Field = Field
5935 DAL.Table = Table
5936
5937
5938
5939
5940
5941 if __name__ == '__main__':
5942 import doctest
5943 doctest.testmod()
5944