1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including SQLite, MySQL, Postgres, Oracle, MS SQL, DB2, Interbase, Ingres
20
21 Completely refactored by MDP on Dec, 2010
22
23 TODO:
24 - create more functions in adapters to abstract more
25 - fix insert, create, migrate
26 - move startswith, endswith, contains into adapters
27 - handle _lastsql (where?)
28
29 Example of usage:
30
31 >>> # from dal import DAL, Field
32
33 ### create DAL connection (and create DB if not exists)
34 >>> db=DAL(('mysql://a:b@locahost/x','sqlite://storage.sqlite'),folder=None)
35
36 ### define a table 'person' (create/aster as necessary)
37 >>> person = db.define_table('person',Field('name','string'))
38
39 ### insert a record
40 >>> id = person.insert(name='James')
41
42 ### retrieve it by id
43 >>> james = person(id)
44
45 ### retrieve it by name
46 >>> james = person(name='James')
47
48 ### retrieve it by arbitrary query
49 >>> query = (person.name=='James')&(person.name.startswith('J'))
50 >>> james = db(query).select(person.ALL)[0]
51
52 ### update one record
53 >>> james.update_record(name='Jim')
54
55 ### update multiple records by query
56 >>> db(person.name.like('J%')).update(name='James')
57 1
58
59 ### delete records by query
60 >>> db(person.name.lower()=='jim').delete()
61 0
62
63 ### retrieve multiple records (rows)
64 >>> people = db(person).select(orderby=person.name,groupby=person.name,limitby=(0,100))
65
66 ### further filter them
67 >>> james = people.find(lambda row: row.name=='James').first()
68 >>> print james.id, james.name
69 1 James
70
71 ### check aggrgates
72 >>> counter = person.id.count()
73 >>> print db(person).select(counter).first()(counter)
74 1
75
76 ### delete one record
77 >>> james.delete_record()
78 1
79
80 ### delete (drop) entire database table
81 >>> person.drop()
82
83 Supported field types:
84 id string text boolean integer double decimal password upload blob time date datetime,
85
86 Supported DAL URI strings:
87 'sqlite://test.db'
88 'sqlite:memory'
89 'jdbc:sqlite://test.db'
90 'mysql://root:none@localhost/test'
91 'postgres://mdipierro:none@localhost/test'
92 'jdbc:postgres://mdipierro:none@localhost/test'
93 'mssql://web2py:none@A64X2/web2py_test'
94 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
95 'oracle://username:password@database'
96 'firebird://user:password@server:3050/database'
97 'db2://DSN=dsn;UID=user;PWD=pass'
98 'firebird://username:password@hostname/database'
99 'firebird_embedded://username:password@c://path'
100 'informix://user:password@server:3050/database'
101 'informixu://user:password@server:3050/database' # unicode informix
102 'gae' # for google app engine (work in progress)
103
104 For more info:
105 help(DAL)
106 help(Field)
107 """
108
109
110
111
112
113 __all__ = ['DAL', 'Field']
114 MAXCHARLENGTH = 512
115 INFINITY = 32768
116
117 import re
118 import sys
119 import locale
120 import os
121 import types
122 import cPickle
123 import datetime
124 import threading
125 import time
126 import cStringIO
127 import csv
128 import copy
129 import socket
130 import logging
131 import copy_reg
132 import base64
133 import shutil
134 import marshal
135 import decimal
136 import struct
137 import urllib
138 import hashlib
139
140 CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType,
141 types.MethodType, types.BuiltinMethodType)
142
143
144
145
146
147 try:
148 from utils import web2py_uuid
149 except ImportError:
150 import uuid
152
153 try:
154 import portalocker
155 have_portalocker = True
156 except ImportError:
157 have_portalocker = False
158
159 try:
160 import serializers
161 have_serializers = True
162 except ImportError:
163 have_serializers = False
164
165 try:
166 import validators
167 have_validators = True
168 except ImportError:
169 have_validators = False
170
171 logger = logging.getLogger("web2py.dal")
172 DEFAULT = lambda:0
173
174 sql_locker = threading.RLock()
175 thread = threading.local()
176
177
178
179
180 regex_dbname = re.compile('^(\w+)(\:\w+)*')
181 table_field = re.compile('^[\w_]+\.[\w_]+$')
182 regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
183 regex_cleanup_fn = re.compile('[\'"\s;]+')
184 string_unpack=re.compile('(?<!\|)\|(?!\|)')
185 regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
186
187
188
189
190
191 drivers = []
192
193 try:
194 from pysqlite2 import dbapi2 as sqlite3
195 drivers.append('pysqlite2')
196 except ImportError:
197 try:
198 from sqlite3 import dbapi2 as sqlite3
199 drivers.append('SQLite3')
200 except ImportError:
201 logger.debug('no sqlite3 or pysqlite2.dbapi2 driver')
202
203 try:
204 import contrib.pymysql as pymysql
205 drivers.append('pymysql')
206 except ImportError:
207 logger.debug('no pymysql driver')
208
209 try:
210 import psycopg2
211 drivers.append('PostgreSQL')
212 except ImportError:
213 logger.debug('no psycopg2 driver')
214
215 try:
216 import cx_Oracle
217 drivers.append('Oracle')
218 except ImportError:
219 logger.debug('no cx_Oracle driver')
220
221 try:
222 import pyodbc
223 drivers.append('MSSQL/DB2')
224 except ImportError:
225 logger.debug('no MSSQL/DB2 driver')
226
227 try:
228 import kinterbasdb
229 drivers.append('Interbase')
230 except ImportError:
231 logger.debug('no kinterbasdb driver')
232
233 try:
234 import informixdb
235 drivers.append('Informix')
236 logger.warning('Informix support is experimental')
237 except ImportError:
238 logger.debug('no informixdb driver')
239
240 try:
241 import sapdb
242 drivers.append('SAPDB')
243 logger.warning('SAPDB support is experimental')
244 except ImportError:
245 logger.debug('no sapdb driver')
246
247 try:
248 from com.ziclix.python.sql import zxJDBC
249 import java.sql
250 from org.sqlite import JDBC
251 drivers.append('zxJDBC')
252 logger.warning('zxJDBC support is experimental')
253 is_jdbc = True
254 except ImportError:
255 logger.debug('no zxJDBC driver')
256 is_jdbc = False
257
258 try:
259 import ingresdbi
260 drivers.append('Ingres')
261 except ImportError:
262 logger.debug('no Ingres driver')
263
264
265 try:
266 from new import classobj
267 from google.appengine.ext import db as gae
268 from google.appengine.api import namespace_manager
269 from google.appengine.api.datastore_types import Key
270 from google.appengine.ext.db.polymodel import PolyModel
271 drivers.append('gae')
272
274 """
275 GAE decimal implementation
276 """
277 data_type = decimal.Decimal
278
279 - def __init__(self, precision, scale, **kwargs):
280 super(GAEDecimalProperty, self).__init__(self, **kwargs)
281 d = '1.'
282 for x in range(scale):
283 d += '0'
284 self.round = decimal.Decimal(d)
285
292
294 if value:
295 return decimal.Decimal(value).quantize(self.round)
296 else:
297 return None
298
300 value = super(GAEDecimalProperty, self).validate(value)
301 if value is None or isinstance(value, decimal.Decimal):
302 return value
303 elif isinstance(value, basestring):
304 return decimal.Decimal(value)
305 raise gae.BadValueError("Property %s must be a Decimal or string." % self.name)
306
307 except ImportError:
308 pass
309
310
311
312
313
315
316 pools = {}
317
318 @staticmethod
321
322
323
324 @staticmethod
326 """ to close cleanly databases in a multithreaded environment """
327 if not hasattr(thread,'instances'):
328 return
329 while thread.instances:
330 instance = thread.instances.pop()
331 getattr(instance,action)()
332
333 really = True
334 if instance.pool_size:
335 sql_locker.acquire()
336 pool = ConnectionPool.pools[instance.uri]
337 if len(pool) < instance.pool_size:
338 pool.append(instance.connection)
339 really = False
340 sql_locker.release()
341 if really:
342 getattr(instance,'close')()
343 return
344
346 """ this actually does not make the folder. it has to be there """
347 if hasattr(thread,'folder'):
348 self.folder = thread.folder
349 else:
350 self.folder = thread.folder = ''
351
352
353 if False and self.folder and not os.path.exists(self.folder):
354 os.mkdir(self._folder)
355
373
374
375
376
377
378
380
381 maxcharlength = INFINITY
382 commit_on_alter_table = False
383 support_distributed_transaction = False
384 uploads_in_blob = False
385 types = {
386 'boolean': 'CHAR(1)',
387 'string': 'CHAR(%(length)s)',
388 'text': 'TEXT',
389 'password': 'CHAR(%(length)s)',
390 'blob': 'BLOB',
391 'upload': 'CHAR(%(length)s)',
392 'integer': 'INTEGER',
393 'double': 'DOUBLE',
394 'decimal': 'DOUBLE',
395 'date': 'DATE',
396 'time': 'TIME',
397 'datetime': 'TIMESTAMP',
398 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
399 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
400 'list:integer': 'TEXT',
401 'list:string': 'TEXT',
402 'list:reference': 'TEXT',
403 }
404
406 """
407 to be used ONLY for files that on GAE may not be on filesystem
408 """
409 return os.path.exists(filename)
410
411 - def file_open(self, filename, mode='rb', lock=True):
424
433
436
437 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
438 credential_decoder=lambda x:x, driver_args={}):
439 self.db = db
440 self.dbengine = "None"
441 self.uri = uri
442 self.pool_size = pool_size
443 self.folder = folder
444 self.db_codec = db_codec
445 class Dummy(object):
446 lastrowid = 1
447 def __getattr__(self, value):
448 return lambda *a, **b: []
449 self.connection = Dummy()
450 self.cursor = Dummy()
451
453 return '%s_sequence' % tablename
454
456 return '%s_sequence' % tablename
457
458
459 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
460 fields = []
461 sql_fields = {}
462 sql_fields_aux = {}
463 TFK = {}
464 tablename = table._tablename
465 for field in table:
466 k = field.name
467 if isinstance(field.type,SQLCustomType):
468 ftype = field.type.native or field.type.type
469 elif field.type.startswith('reference'):
470 referenced = field.type[10:].strip()
471 constraint_name = self.constraint_name(tablename, field.name)
472 if hasattr(table,'_primarykey'):
473 rtablename,rfieldname = referenced.split('.')
474 rtable = table._db[rtablename]
475 rfield = rtable[rfieldname]
476
477 if rfieldname in rtable._primarykey or rfield.unique:
478 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
479
480 if not rfield.unique and len(rtable._primarykey)>1 :
481
482 if rtablename not in TFK:
483 TFK[rtablename] = {}
484 TFK[rtablename][rfieldname] = field.name
485 else:
486 ftype = ftype + \
487 self.types['reference FK'] %dict(\
488 constraint_name=constraint_name,
489 table_name=tablename,
490 field_name=field.name,
491 foreign_key='%s (%s)'%(rtablename, rfieldname),
492 on_delete_action=field.ondelete)
493 else:
494 ftype = self.types[field.type[:9]]\
495 % dict(table_name=tablename,
496 field_name=field.name,
497 constraint_name=constraint_name,
498 foreign_key=referenced + ('(%s)' % table._db[referenced].fields[0]),
499 on_delete_action=field.ondelete)
500 elif field.type.startswith('list:reference'):
501 ftype = self.types[field.type[:14]]
502 elif field.type.startswith('decimal'):
503 precision, scale = [int(x) for x in field.type[8:-1].split(',')]
504 ftype = self.types[field.type[:7]] % \
505 dict(precision=precision,scale=scale)
506 elif not field.type in self.types:
507 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
508 (field.type, field.name)
509 else:
510 ftype = self.types[field.type]\
511 % dict(length=field.length)
512 if not field.type.startswith('id') and not field.type.startswith('reference'):
513 if field.notnull:
514 ftype += ' NOT NULL'
515 else:
516 ftype += self.ALLOW_NULL()
517 if field.unique:
518 ftype += ' UNIQUE'
519
520
521 sql_fields[field.name] = ftype
522
523 if field.default!=None:
524
525
526
527
528
529 not_null = self.NOT_NULL(field.default,field.type)
530 ftype = ftype.replace('NOT NULL',not_null)
531 sql_fields_aux[field.name] = ftype
532
533 fields.append('%s %s' % (field.name, ftype))
534 other = ';'
535
536
537 if self.dbengine == 'mysql':
538 if not hasattr(table, "_primarykey"):
539 fields.append('PRIMARY KEY(%s)' % table.fields[0])
540 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
541
542 fields = ',\n '.join(fields)
543 for rtablename in TFK:
544 rfields = TFK[rtablename]
545 pkeys = table._db[rtablename]._primarykey
546 fkeys = [ rfields[k] for k in pkeys ]
547 fields = fields + ',\n ' + \
548 self.types['reference TFK'] %\
549 dict(table_name=tablename,
550 field_name=', '.join(fkeys),
551 foreign_table=rtablename,
552 foreign_key=', '.join(pkeys),
553 on_delete_action=field.ondelete)
554
555 if hasattr(table,'_primarykey'):
556 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
557 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
558 else:
559 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
560 (tablename, fields, other)
561
562 if self.uri.startswith('sqlite:///'):
563 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
564 dbpath = self.uri[9:self.uri.rfind('/')]\
565 .decode('utf8').encode(path_encoding)
566 else:
567 dbpath = self.folder
568 if not migrate:
569 return query
570 elif self.uri.startswith('sqlite:memory'):
571 table._dbt = None
572 elif isinstance(migrate, str):
573 table._dbt = os.path.join(dbpath, migrate)
574 else:
575 table._dbt = os.path.join(dbpath, '%s_%s.table' \
576 % (hashlib.md5(self.uri).hexdigest(), tablename))
577 if table._dbt:
578 table._loggername = os.path.join(dbpath, 'sql.log')
579 logfile = self.file_open(table._loggername, 'a')
580 else:
581 logfile = None
582 if not table._dbt or not self.file_exists(table._dbt):
583 if table._dbt:
584 logfile.write('timestamp: %s\n'
585 % datetime.datetime.today().isoformat())
586 logfile.write(query + '\n')
587 if not fake_migrate:
588 self.create_sequence_and_triggers(query,table)
589 table._db.commit()
590 if table._dbt:
591 tfile = self.file_open(table._dbt, 'w')
592 cPickle.dump(sql_fields, tfile)
593 self.file_close(tfile)
594 if fake_migrate:
595 logfile.write('faked!\n')
596 else:
597 logfile.write('success!\n')
598 else:
599 tfile = self.file_open(table._dbt, 'r')
600 try:
601 sql_fields_old = cPickle.load(tfile)
602 except EOFError:
603 self.file_close(tfile)
604 self.file_close(logfile)
605 raise RuntimeError, 'File %s appears corrupted' % table._dbt
606 self.file_close(tfile)
607 if sql_fields != sql_fields_old:
608 self.migrate_table(table,
609 sql_fields, sql_fields_old,
610 sql_fields_aux, logfile,
611 fake_migrate=fake_migrate)
612 self.file_close(logfile)
613 return query
614
615 - def migrate_table(
616 self,
617 table,
618 sql_fields,
619 sql_fields_old,
620 sql_fields_aux,
621 logfile,
622 fake_migrate=False,
623 ):
624 tablename = table._tablename
625
626 sql_fields = dict((k.lower(), v) for k, v in sql_fields.items())
627 sql_fields_old = dict((k.lower(), v) for k, v in sql_fields_old.items())
628 sql_fields_aux = dict((k.lower(), v) for k, v in sql_fields_aux.items())
629
630 keys = sql_fields.keys()
631 for key in sql_fields_old:
632 if not key in keys:
633 keys.append(key)
634 if self.dbengine == 'mssql':
635 new_add = '; ALTER TABLE %s ADD ' % tablename
636 else:
637 new_add = ', ADD '
638
639 fields_changed = False
640 sql_fields_current = copy.copy(sql_fields_old)
641 for key in keys:
642 if not key in sql_fields_old:
643 sql_fields_current[key] = sql_fields[key]
644 query = ['ALTER TABLE %s ADD %s %s;' % \
645 (tablename, key, sql_fields_aux[key].replace(', ', new_add))]
646 elif self.dbengine == 'sqlite':
647 query = None
648 elif not key in sql_fields:
649 del sql_fields_current[key]
650 if not self.dbengine in ('firebird',):
651 query = ['ALTER TABLE %s DROP COLUMN %s;' % (tablename, key)]
652 else:
653 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
654 elif sql_fields[key] != sql_fields_old[key] \
655 and not isinstance(table[key].type, SQLCustomType) \
656 and not (table[key].type.startswith('reference') and \
657 sql_fields[key].startswith('INT,') and \
658 sql_fields_old[key].startswith('INT NOT NULL,')):
659 sql_fields_current[key] = sql_fields[key]
660 t = tablename
661 tt = sql_fields_aux[key].replace(', ', new_add)
662 if not self.dbengine in ('firebird',):
663 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
664 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
665 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
666 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
667 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
668 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
669 else:
670 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
671 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
672 'ALTER TABLE %s DROP %s;' % (t, key),
673 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
674 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
675 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
676 else:
677 query = None
678
679 if query:
680 fields_changed = True
681 logfile.write('timestamp: %s\n'
682 % datetime.datetime.today().isoformat())
683 table._db['_lastsql'] = '\n'.join(query)
684 for sub_query in query:
685 logfile.write(sub_query + '\n')
686 if not fake_migrate:
687 self.execute(sub_query)
688
689
690
691 if table._db._adapter.commit_on_alter_table:
692 table._db.commit()
693 tfile = self.file_open(table._dbt, 'w')
694 cPickle.dump(sql_fields_current, tfile)
695 self.file_close(tfile)
696 logfile.write('success!\n')
697 else:
698 logfile.write('faked!\n')
699
700 if fields_changed and not self.dbengine in ['mysql','oracle','firebird']:
701 table._db.commit()
702 tfile = self.file_open(table._dbt, 'w')
703 cPickle.dump(sql_fields_current, tfile)
704 self.file_close(tfile)
705
708
711
713 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
714
717
720
723
725 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
726
729
731 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
732
734 return 'PRIMARY KEY(%s)' % key
735
736 - def _drop(self,table,mode):
737 return ['DROP TABLE %s;' % table]
738
739 - def drop(self, table, mode=''):
740 if table._dbt:
741 logfile = self.file_open(table._loggername, 'a')
742 queries = self._drop(table, mode)
743 for query in queries:
744 if table._dbt:
745 logfile.write(query + '\n')
746 self.execute(query)
747 table._db.commit()
748 del table._db[table._tablename]
749 del table._db.tables[table._db.tables.index(table._tablename)]
750 table._db._update_referenced_by(table._tablename)
751 if table._dbt:
752 self.file_delete(table._dbt)
753 logfile.write('success!\n')
754
756 keys = ','.join(f.name for f,v in fields)
757 values = ','.join(self.expand(v,f.type) for f,v in fields)
758 return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
759
760 - def insert(self,table,fields):
761 query = self._insert(table,fields)
762 try:
763 self.execute(query)
764 except Exception, e:
765 if isinstance(e,self.integrity_error_class()):
766 return None
767 raise e
768 if hasattr(table,'_primarykey'):
769 return dict( [ (k,fields[k]) for k in table._primarykey ])
770 id = self.lastrowid(table)
771 if not isinstance(id,int):
772 return id
773 rid = Reference(id)
774 (rid._table, rid._record) = (table, None)
775 return rid
776
779
780 - def NOT(self,first):
782
783 - def AND(self,first,second):
785
786 - def OR(self,first,second):
788
790 if isinstance(second,str):
791 return '(%s IN (%s))' % (self.expand(first),second[:-1])
792 return '(%s IN (%s))' % (self.expand(first),
793 ','.join(self.expand(item,first.type) for item in second))
794
795 - def LIKE(self,first,second):
797
800
803
805 if first.type in ('string','text'):
806 key = '%'+str(second).replace('%','%%')+'%'
807 elif first.type.startswith('list:'):
808 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
809 return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
810
811 - def EQ(self,first,second=None):
815
816 - def NE(self,first,second=None):
820
821 - def LT(self,first,second=None):
823
824 - def LE(self,first,second=None):
826
827 - def GT(self,first,second=None):
829
830 - def GE(self,first,second=None):
832
833 - def ADD(self,first,second):
835
836 - def SUB(self,first,second):
838
839 - def MUL(self,first,second):
841
842 - def DIV(self,first,second):
844
845 - def MOD(self,first,second):
847
848 - def AS(self,first,second):
850
851 - def ON(self,first,second):
853
856
857 - def COMMA(self,first,second):
859
860 - def expand(self,expression,field_type=None):
861 if isinstance(expression,Field):
862 return str(expression)
863 elif isinstance(expression, (Expression, Query)):
864 if not expression.second is None:
865 return expression.op(expression.first, expression.second)
866 elif not expression.first is None:
867 return expression.op(expression.first)
868 else:
869 return expression.op()
870 elif field_type:
871 return self.represent(expression,field_type)
872 elif isinstance(expression,(list,tuple)):
873 return ','.join([self.represent(item,field_type) for item in expression])
874 else:
875 return str(expression)
876
877 - def alias(self,table,alias):
878 """
879 given a table object, makes a new table object
880 with alias name.
881 """
882 other = copy.copy(table)
883 other['_ot'] = other._tablename
884 other['ALL'] = SQLALL(other)
885 other['_tablename'] = alias
886 for fieldname in other.fields:
887 other[fieldname] = copy.copy(other[fieldname])
888 other[fieldname]._tablename = alias
889 other[fieldname].tablename = alias
890 other[fieldname].table = other
891 table._db[alias] = table
892 return other
893
895 tablename = table._tablename
896 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
897
899 if table._dbt:
900 logfile = self.file_open(table._loggername, 'a')
901 queries = table._db._adapter._truncate(table, mode)
902 for query in queries:
903 if table._dbt:
904 logfile.write(query + '\n')
905 self.execute(query)
906 table._db.commit()
907 if table._dbt:
908 logfile.write('success!\n')
909
910 - def _update(self,tablename,query,fields):
911 if query:
912 sql_w = ' WHERE ' + self.expand(query)
913 else:
914 sql_w = ''
915 sql_v = ','.join(['%s=%s' % (field.name, self.expand(value,field.type)) for (field,value) in fields])
916 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
917
918 - def update(self,tablename,query,fields):
919 sql = self._update(tablename,query,fields)
920 self.execute(sql)
921 try:
922 return self.cursor.rowcount
923 except:
924 return None
925
926 - def _delete(self,tablename, query):
927 if query:
928 sql_w = ' WHERE ' + self.expand(query)
929 else:
930 sql_w = ''
931 return 'DELETE FROM %s%s;' % (tablename, sql_w)
932
933 - def delete(self,tablename,query):
934 sql = self._delete(tablename,query)
935
936 db = self.db
937 table = db[tablename]
938 if self.dbengine=='sqlite' and table._referenced_by:
939 deleted = [x[table._id.name] for x in db(query).select(table._id)]
940
941 self.execute(sql)
942 try:
943 counter = self.cursor.rowcount
944 except:
945 counter = None
946
947 if self.dbengine=='sqlite' and counter:
948 for tablename,fieldname in table._referenced_by:
949 f = db[tablename][fieldname]
950 if f.type=='reference '+table._tablename and f.ondelete=='CASCADE':
951 db(db[tablename][fieldname].belongs(deleted)).delete()
952
953 return counter
954
956 tablenames = self.tables(query)
957 if len(tablenames)==1:
958 return tablenames[0]
959 elif len(tablenames)<1:
960 raise RuntimeError, "No table selected"
961 else:
962 raise RuntimeError, "Too many tables selected"
963
964 - def _select(self, query, fields, attributes):
965 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
966 'required','cache','left',
967 'distinct','having')):
968 raise SyntaxError, 'invalid select attribute: %s' % key
969
970 new_fields = []
971 for item in fields:
972 if isinstance(item,SQLALL):
973 new_fields += item.table
974 else:
975 new_fields.append(item)
976 fields = new_fields
977 tablenames = self.tables(query)
978 if not fields:
979 for table in tablenames:
980 for field in self.db[table]:
981 fields.append(field)
982 else:
983 for field in fields:
984 if isinstance(field,basestring) and table_field.match(field):
985 tn,fn = field.split('.')
986 field = self.db[tn][fn]
987 for tablename in self.tables(field):
988 if not tablename in tablenames:
989 tablenames.append(tablename)
990 if len(tablenames) < 1:
991 raise SyntaxError, 'Set: no tables selected'
992 sql_f = ', '.join([self.expand(f) for f in fields])
993 self._colnames = [c.strip() for c in sql_f.split(', ')]
994 if query:
995 sql_w = ' WHERE ' + self.expand(query)
996 else:
997 sql_w = ''
998 sql_o = ''
999 sql_s = ''
1000 left = attributes.get('left', False)
1001 distinct = attributes.get('distinct', False)
1002 groupby = attributes.get('groupby', False)
1003 orderby = attributes.get('orderby', False)
1004 having = attributes.get('having', False)
1005 limitby = attributes.get('limitby', False)
1006 if distinct is True:
1007 sql_s += 'DISTINCT'
1008 elif distinct:
1009 sql_s += 'DISTINCT ON (%s)' % distinct
1010 if left:
1011 join = attributes['left']
1012 command = self.LEFT_JOIN()
1013 if not isinstance(join, (tuple, list)):
1014 join = [join]
1015 joint = [t._tablename for t in join if not isinstance(t,Expression)]
1016 joinon = [t for t in join if isinstance(t, Expression)]
1017
1018 tables_to_merge={}
1019 [tables_to_merge.update(dict.fromkeys(self.tables(t))) for t in joinon]
1020 joinont = [t.first._tablename for t in joinon]
1021 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1022 important_tablenames = joint + joinont + tables_to_merge.keys()
1023 excluded = [t for t in tablenames if not t in important_tablenames ]
1024 sql_t = ', '.join([ t for t in excluded + tables_to_merge.keys()])
1025 if joint:
1026 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1027
1028 for t in joinon:
1029 sql_t += ' %s %s' % (command, str(t))
1030 else:
1031 sql_t = ', '.join(tablenames)
1032 if groupby:
1033 if isinstance(groupby, (list, tuple)):
1034 groupby = xorify(groupby)
1035 sql_o += ' GROUP BY %s' % self.expand(groupby)
1036 if having:
1037 sql_o += ' HAVING %s' % attributes['having']
1038 if orderby:
1039 if isinstance(orderby, (list, tuple)):
1040 orderby = xorify(orderby)
1041 if str(orderby) == '<random>':
1042 sql_o += ' ORDER BY %s' % self.RANDOM()
1043 else:
1044 sql_o += ' ORDER BY %s' % self.expand(orderby)
1045 if limitby:
1046 if not orderby and tablenames:
1047 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in ((hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey) or [self.db[t]._id.name])])
1048
1049 return self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1050
1051 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1052 if limitby:
1053 (lmin, lmax) = limitby
1054 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1055 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1056
1057 - def select(self,query,fields,attributes):
1058 """
1059 Always returns a Rows object, even if it may be empty
1060 """
1061 def response(sql):
1062 self.execute(sql)
1063 return self.cursor.fetchall()
1064 sql = self._select(query,fields,attributes)
1065 if attributes.get('cache', None):
1066 (cache_model, time_expire) = attributes['cache']
1067 del attributes['cache']
1068 key = self.uri + '/' + sql
1069 key = (key<=200) and key or hashlib.md5(key).hexdigest()
1070 rows = cache_model(key, lambda: response(sql), time_expire)
1071 else:
1072 rows = response(sql)
1073 if isinstance(rows,tuple):
1074 rows = list(rows)
1075 limitby = attributes.get('limitby',None) or (0,)
1076 rows = self.rowslice(rows,limitby[0],None)
1077 return self.parse(rows,self._colnames)
1078
1079 - def _count(self,query,distinct=None):
1080 tablenames = self.tables(query)
1081 if query:
1082 sql_w = ' WHERE ' + self.expand(query)
1083 else:
1084 sql_w = ''
1085 sql_t = ','.join(tablenames)
1086 if distinct:
1087 if isinstance(distinct,(list,tuple)):
1088 distinct = xorify(distinct)
1089 sql_d = self.expand(distinct)
1090 return 'SELECT count(DISTINCT %s) FROM %s%s' % (sql_d, sql_t, sql_w)
1091 return 'SELECT count(*) FROM %s%s' % (sql_t, sql_w)
1092
1093 - def count(self,query,distinct=None):
1094 self.execute(self._count(query,distinct))
1095 return self.cursor.fetchone()[0]
1096
1097
1108
1110 return self.connection.commit()
1111
1114
1116 return self.connection.close()
1117
1120
1123
1126
1129
1132
1134 return '%s_%s__constraint' % (table,fieldname)
1135
1138
1140 self.db._lastsql = a[0]
1141 return self.cursor.execute(*a,**b)
1142
1145
1147 if isinstance(obj,CALLABLETYPES):
1148 obj = obj()
1149 if isinstance(fieldtype, SQLCustomType):
1150 return fieldtype.encoder(obj)
1151 if isinstance(obj, (Expression, Field)):
1152 return str(obj)
1153 if fieldtype.startswith('list:'):
1154 if not obj:
1155 obj = []
1156 if not isinstance(obj, (list, tuple)):
1157 obj = [obj]
1158 if isinstance(obj, (list, tuple)):
1159 obj = bar_encode(obj)
1160 if obj is None:
1161 return 'NULL'
1162 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
1163 return 'NULL'
1164 r = self.represent_exceptions(obj,fieldtype)
1165 if r != None:
1166 return r
1167 if fieldtype == 'boolean':
1168 if obj and not str(obj)[:1].upper() in ['F', '0']:
1169 return "'T'"
1170 else:
1171 return "'F'"
1172 if fieldtype == 'id' or fieldtype == 'integer':
1173 return str(int(obj))
1174 if fieldtype.startswith('decimal'):
1175 return str(obj)
1176 elif fieldtype.startswith('reference'):
1177 if fieldtype.find('.')>0:
1178 return repr(obj)
1179 elif isinstance(obj, (Row, Reference)):
1180 return str(obj['id'])
1181 return str(int(obj))
1182 elif fieldtype == 'double':
1183 return repr(float(obj))
1184 if isinstance(obj, unicode):
1185 obj = obj.encode(self.db_codec)
1186 if fieldtype == 'blob':
1187 obj = base64.b64encode(str(obj))
1188 elif fieldtype == 'date':
1189 if isinstance(obj, (datetime.date, datetime.datetime)):
1190 obj = obj.isoformat()[:10]
1191 else:
1192 obj = str(obj)
1193 elif fieldtype == 'datetime':
1194 if isinstance(obj, datetime.datetime):
1195 obj = obj.isoformat()[:19].replace('T',' ')
1196 elif isinstance(obj, datetime.date):
1197 obj = obj.isoformat()[:10]+' 00:00:00'
1198 else:
1199 obj = str(obj)
1200 elif fieldtype == 'time':
1201 if isinstance(obj, datetime.time):
1202 obj = obj.isoformat()[:10]
1203 else:
1204 obj = str(obj)
1205 if not isinstance(obj,str):
1206 obj = str(obj)
1207 try:
1208 obj.decode(self.db_codec)
1209 except:
1210 obj = obj.decode('latin1').encode(self.db_codec)
1211 return "'%s'" % obj.replace("'", "''")
1212
1215
1218
1221
1222 - def rowslice(self,rows,minimum=0,maximum=None):
1223 """ by default this function does nothing, overload when db does not do slicing """
1224 return rows
1225
1226 - def parse(self, rows, colnames, blob_decode=True):
1227 db = self.db
1228 virtualtables = []
1229 new_rows = []
1230 for (i,row) in enumerate(rows):
1231 new_row = Row()
1232 for j,colname in enumerate(colnames):
1233 value = row[j]
1234 if not table_field.match(colnames[j]):
1235 if not '_extra' in new_row:
1236 new_row['_extra'] = Row()
1237 new_row['_extra'][colnames[j]] = value
1238 select_as_parser = re.compile("\s+AS\s+(\S+)")
1239 new_column_name = select_as_parser.search(colnames[j])
1240 if not new_column_name is None:
1241 column_name = new_column_name.groups(0)
1242 setattr(new_row,column_name[0],value)
1243 continue
1244 (tablename, fieldname) = colname.split('.')
1245 table = db[tablename]
1246 field = table[fieldname]
1247 field_type = field.type
1248 if field.type != 'blob' and isinstance(value, str):
1249 try:
1250 value = value.decode(db._db_codec)
1251 except Exception:
1252 pass
1253 if isinstance(value, unicode):
1254 value = value.encode('utf-8')
1255 if not tablename in new_row:
1256 colset = new_row[tablename] = Row()
1257 virtualtables.append(tablename)
1258 else:
1259 colset = new_row[tablename]
1260
1261 if isinstance(field_type, SQLCustomType):
1262 colset[fieldname] = field_type.decoder(value)
1263
1264 elif not isinstance(field_type, str) or value is None:
1265 colset[fieldname] = value
1266 elif isinstance(field_type, str) and \
1267 field_type.startswith('reference'):
1268 referee = field_type[10:].strip()
1269 if not '.' in referee:
1270 colset[fieldname] = rid = Reference(value)
1271 (rid._table, rid._record) = (db[referee], None)
1272 else:
1273 colset[fieldname] = value
1274 elif field_type == 'boolean':
1275 if value == True or str(value)[:1].lower() == 't':
1276 colset[fieldname] = True
1277 else:
1278 colset[fieldname] = False
1279 elif field_type == 'date' \
1280 and (not isinstance(value, datetime.date)\
1281 or isinstance(value, datetime.datetime)):
1282 (y, m, d) = [int(x) for x in
1283 str(value)[:10].strip().split('-')]
1284 colset[fieldname] = datetime.date(y, m, d)
1285 elif field_type == 'time' \
1286 and not isinstance(value, datetime.time):
1287 time_items = [int(x) for x in
1288 str(value)[:8].strip().split(':')[:3]]
1289 if len(time_items) == 3:
1290 (h, mi, s) = time_items
1291 else:
1292 (h, mi, s) = time_items + [0]
1293 colset[fieldname] = datetime.time(h, mi, s)
1294 elif field_type == 'datetime'\
1295 and not isinstance(value, datetime.datetime):
1296 (y, m, d) = [int(x) for x in
1297 str(value)[:10].strip().split('-')]
1298 time_items = [int(x) for x in
1299 str(value)[11:19].strip().split(':')[:3]]
1300 if len(time_items) == 3:
1301 (h, mi, s) = time_items
1302 else:
1303 (h, mi, s) = time_items + [0]
1304 colset[fieldname] = datetime.datetime(y, m, d, h, mi, s)
1305 elif field_type == 'blob' and blob_decode:
1306 colset[fieldname] = base64.b64decode(str(value))
1307 elif field_type.startswith('decimal'):
1308 decimals = [int(x) for x in field_type[8:-1].split(',')][-1]
1309 if self.dbengine == 'sqlite':
1310 value = ('%.' + str(decimals) + 'f') % value
1311 if not isinstance(value, decimal.Decimal):
1312 value = decimal.Decimal(str(value))
1313 colset[fieldname] = value
1314 elif field_type.startswith('list:integer'):
1315 if not self.uri.startswith('gae'):
1316 colset[fieldname] = bar_decode_integer(value)
1317 else:
1318 colset[fieldname] = value
1319 elif field_type.startswith('list:reference'):
1320 if not self.uri.startswith('gae'):
1321 colset[fieldname] = bar_decode_integer(value)
1322 else:
1323 colset[fieldname] = value
1324 elif field_type.startswith('list:string'):
1325 if not self.uri.startswith('gae'):
1326 colset[fieldname] = bar_decode_string(value)
1327 else:
1328 colset[fieldname] = value
1329 else:
1330 colset[fieldname] = value
1331 if field_type == 'id':
1332 id = colset[field.name]
1333 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
1334 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
1335 for (referee_table, referee_name) in \
1336 table._referenced_by:
1337 s = db[referee_table][referee_name]
1338 colset[referee_table] = Set(db, s == id)
1339 colset['id'] = id
1340 new_rows.append(new_row)
1341 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
1342 for table in virtualtables:
1343 for item in db[tablename].virtualfields:
1344 try:
1345 rowsobj = rowsobj.setvirtualfields(**{table:item})
1346 except KeyError:
1347
1348 pass
1349 return rowsobj
1350
1351
1352
1353
1354
1355
1357
1359 return "web2py_extract('%s',%s)" % (what,self.expand(field))
1360
1361 @staticmethod
1363 table = {
1364 'year': (0, 4),
1365 'month': (5, 7),
1366 'day': (8, 10),
1367 'hour': (11, 13),
1368 'minute': (14, 16),
1369 'second': (17, 19),
1370 }
1371 try:
1372 (i, j) = table[lookup]
1373 return int(s[i:j])
1374 except:
1375 return None
1376
1377 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1378 credential_decoder=lambda x:x, driver_args={}):
1379 self.db = db
1380 self.dbengine = "sqlite"
1381 self.uri = uri
1382 self.pool_size = pool_size
1383 self.folder = folder
1384 self.db_codec = db_codec
1385 self.find_or_make_work_folder()
1386 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1]
1387 if uri.startswith('sqlite:memory'):
1388 dbpath = ':memory:'
1389 else:
1390 dbpath = uri.split('://')[1]
1391 if dbpath[0] != '/':
1392 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1393 if not 'check_same_thread' in driver_args:
1394 driver_args['check_same_thread'] = False
1395 def connect(dbpath=dbpath, driver_args=driver_args):
1396 return sqlite3.Connection(dbpath, **driver_args)
1397 self.pool_connection(connect)
1398 self.cursor = self.connection.cursor()
1399 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1400
1402 tablename = table._tablename
1403 return ['DELETE FROM %s;' % tablename,
1404 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
1405
1408
1409
1411
1412 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1413 credential_decoder=lambda x:x, driver_args={}):
1414 self.db = db
1415 self.dbengine = "sqlite"
1416 self.uri = uri
1417 self.pool_size = pool_size
1418 self.folder = folder
1419 self.db_codec = db_codec
1420 self.find_or_make_work_folder()
1421 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1]
1422 if uri.startswith('sqlite:memory'):
1423 dbpath = ':memory:'
1424 else:
1425 dbpath = uri.split('://')[1]
1426 if dbpath[0] != '/':
1427 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1428 def connect(dbpath=dbpath,driver_args=driver_args):
1429 return zxJDBC.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath),**driver_args)
1430 self.pool_connection(connect)
1431 self.cursor = self.connection.cursor()
1432 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1433
1436
1437
1439
1440 driver = globals().get('pymysql',None)
1441 maxcharlength = 255
1442 commit_on_alter_table = True
1443 support_distributed_transaction = True
1444 types = {
1445 'boolean': 'CHAR(1)',
1446 'string': 'VARCHAR(%(length)s)',
1447 'text': 'LONGTEXT',
1448 'password': 'VARCHAR(%(length)s)',
1449 'blob': 'LONGBLOB',
1450 'upload': 'VARCHAR(%(length)s)',
1451 'integer': 'INT',
1452 'double': 'DOUBLE',
1453 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1454 'date': 'DATE',
1455 'time': 'TIME',
1456 'datetime': 'DATETIME',
1457 'id': 'INT AUTO_INCREMENT NOT NULL',
1458 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1459 'list:integer': 'LONGTEXT',
1460 'list:string': 'LONGTEXT',
1461 'list:reference': 'LONGTEXT',
1462 }
1463
1466
1468 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1469
1470 - def _drop(self,table,mode):
1471
1472 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,'SET FOREIGN_KEY_CHECKS=1;']
1473
1476
1480
1483
1486
1488 return '; ALTER TABLE %s ADD ' % table
1489
1490 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1491 credential_decoder=lambda x:x, driver_args={}):
1492 self.db = db
1493 self.dbengine = "mysql"
1494 self.uri = uri
1495 self.pool_size = pool_size
1496 self.folder = folder
1497 self.db_codec = db_codec
1498 self.find_or_make_work_folder()
1499 uri = uri.split('://')[1]
1500 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
1501 if not m:
1502 raise SyntaxError, \
1503 "Invalid URI string in DAL: %s" % self.uri
1504 user = m.group('user')
1505 if not user:
1506 raise SyntaxError, 'User required'
1507 password = m.group('password')
1508 if not password:
1509 password = ''
1510 host = m.group('host')
1511 if not host:
1512 raise SyntaxError, 'Host name required'
1513 db = m.group('db')
1514 if not db:
1515 raise SyntaxError, 'Database name required'
1516 port = int(m.group('port') or '3306')
1517 charset = m.group('charset') or 'utf8'
1518 driver_args.update(dict(db=db,
1519 user=credential_decoder(user),
1520 passwd=credential_decoder(password),
1521 host=host,
1522 port=port,
1523 charset=charset))
1524 def connect(driver_args=driver_args):
1525 return self.driver.connect(**driver_args)
1526 self.pool_connection(connect)
1527 self.cursor = self.connection.cursor()
1528 self.execute('SET FOREIGN_KEY_CHECKS=1;')
1529 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
1530
1532 self.execute('select last_insert_id();')
1533 return int(self.cursor.fetchone()[0])
1534
1535
1536 -class PostgreSQLAdapter(BaseAdapter):
1537
1538 support_distributed_transaction = True
1539 types = {
1540 'boolean': 'CHAR(1)',
1541 'string': 'VARCHAR(%(length)s)',
1542 'text': 'TEXT',
1543 'password': 'VARCHAR(%(length)s)',
1544 'blob': 'BYTEA',
1545 'upload': 'VARCHAR(%(length)s)',
1546 'integer': 'INTEGER',
1547 'double': 'FLOAT8',
1548 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1549 'date': 'DATE',
1550 'time': 'TIME',
1551 'datetime': 'TIMESTAMP',
1552 'id': 'SERIAL PRIMARY KEY',
1553 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1554 'list:integer': 'TEXT',
1555 'list:string': 'TEXT',
1556 'list:reference': 'TEXT',
1557 }
1558
1559 - def sequence_name(self,table):
1560 return '%s_id_Seq' % table
1561
1564
1567
1568 - def prepare(self,key):
1569 self.execute("PREPARE TRANSACTION '%s';" % key)
1570
1571 - def commit_prepared(self,key):
1572 self.execute("COMMIT PREPARED '%s';" % key)
1573
1574 - def rollback_prepared(self,key):
1575 self.execute("ROLLBACK PREPARED '%s';" % key)
1576
1577 - def create_sequence_and_triggers(self, query, table, **args):
1578
1579
1580
1581
1582 self.execute(query)
1583
1584 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1585 credential_decoder=lambda x:x, driver_args={}):
1586 self.db = db
1587 self.dbengine = "postgres"
1588 self.uri = uri
1589 self.pool_size = pool_size
1590 self.folder = folder
1591 self.db_codec = db_codec
1592 self.find_or_make_work_folder()
1593 uri = uri.split('://')[1]
1594 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
1595 if not m:
1596 raise SyntaxError, "Invalid URI string in DAL"
1597 user = m.group('user')
1598 if not user:
1599 raise SyntaxError, 'User required'
1600 password = m.group('password')
1601 if not password:
1602 password = ''
1603 host = m.group('host')
1604 if not host:
1605 raise SyntaxError, 'Host name required'
1606 db = m.group('db')
1607 if not db:
1608 raise SyntaxError, 'Database name required'
1609 port = m.group('port') or '5432'
1610 sslmode = m.group('sslmode')
1611 if sslmode:
1612 msg = ("dbname='%s' user='%s' host='%s'"
1613 "port=%s password='%s' sslmode='%s'") \
1614 % (db, user, host, port, password, sslmode)
1615 else:
1616 msg = ("dbname='%s' user='%s' host='%s'"
1617 "port=%s password='%s'") \
1618 % (db, user, host, port, password)
1619 def connect(msg=msg,driver_args=driver_args):
1620 return psycopg2.connect(msg,**driver_args)
1621 self.pool_connection(connect)
1622 self.connection.set_client_encoding('UTF8')
1623 self.cursor = self.connection.cursor()
1624 self.execute('BEGIN;')
1625 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1626 self.execute("SET standard_conforming_strings=on;")
1627
1628 - def lastrowid(self,table):
1629 self.execute("select currval('%s')" % table._sequence_name)
1630 return int(self.cursor.fetchone()[0])
1631
1632 - def LIKE(self,first,second):
1633 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second,'string'))
1634
1635 - def STARTSWITH(self,first,second):
1636 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second+'%','string'))
1637
1638 - def ENDSWITH(self,first,second):
1639 return '(%s ILIKE %s)' % (self.expand(first),self.expand('%'+second,'string'))
1640
1641 - def CONTAINS(self,first,second):
1642 if first.type in ('string','text'):
1643 key = '%'+str(second).replace('%','%%')+'%'
1644 elif first.type.startswith('list:'):
1645 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
1646 return '(%s ILIKE %s)' % (self.expand(first),self.expand(key,'string'))
1647
1648 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
1649
1650 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1651 credential_decoder=lambda x:x, driver_args={}):
1652 self.db = db
1653 self.dbengine = "postgres"
1654 self.uri = uri
1655 self.pool_size = pool_size
1656 self.folder = folder
1657 self.db_codec = db_codec
1658 self.find_or_make_work_folder()
1659 uri = uri.split('://')[1]
1660 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(uri)
1661 if not m:
1662 raise SyntaxError, "Invalid URI string in DAL"
1663 user = m.group('user')
1664 if not user:
1665 raise SyntaxError, 'User required'
1666 password = m.group('password')
1667 if not password:
1668 password = ''
1669 host = m.group('host')
1670 if not host:
1671 raise SyntaxError, 'Host name required'
1672 db = m.group('db')
1673 if not db:
1674 raise SyntaxError, 'Database name required'
1675 port = m.group('port') or '5432'
1676 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
1677 def connect(msg=msg,driver_args=driver_args):
1678 return zxJDBC.connect(*msg,**driver_args)
1679 self.pool_connection(connect)
1680 self.connection.set_client_encoding('UTF8')
1681 self.cursor = self.connection.cursor()
1682 self.execute('BEGIN;')
1683 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1684
1685
1687 commit_on_alter_table = False
1688 types = {
1689 'boolean': 'CHAR(1)',
1690 'string': 'VARCHAR2(%(length)s)',
1691 'text': 'CLOB',
1692 'password': 'VARCHAR2(%(length)s)',
1693 'blob': 'CLOB',
1694 'upload': 'VARCHAR2(%(length)s)',
1695 'integer': 'INT',
1696 'double': 'FLOAT',
1697 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1698 'date': 'DATE',
1699 'time': 'CHAR(8)',
1700 'datetime': 'DATE',
1701 'id': 'NUMBER PRIMARY KEY',
1702 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1703 'list:integer': 'CLOB',
1704 'list:string': 'CLOB',
1705 'list:reference': 'CLOB',
1706 }
1707
1709 return '%s_sequence' % tablename
1710
1712 return '%s_trigger' % tablename
1713
1715 return 'LEFT OUTER JOIN'
1716
1718 return 'dbms_random.value'
1719
1720 - def NOT_NULL(self,default,field_type):
1721 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
1722
1723 - def _drop(self,table,mode):
1726
1727 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1728 if limitby:
1729 (lmin, lmax) = limitby
1730 if len(sql_w) > 1:
1731 sql_w_row = sql_w + ' AND w_row > %i' % lmin
1732 else:
1733 sql_w_row = 'WHERE w_row > %i' % lmin
1734 return '%s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
1735 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1736
1742
1744 if fieldtype == 'blob':
1745 obj = base64.b64encode(str(obj))
1746 return ":CLOB('%s')" % obj
1747 elif fieldtype == 'date':
1748 if isinstance(obj, (datetime.date, datetime.datetime)):
1749 obj = obj.isoformat()[:10]
1750 else:
1751 obj = str(obj)
1752 return "to_date('%s','yyyy-mm-dd')" % obj
1753 elif fieldtype == 'datetime':
1754 if isinstance(obj, datetime.datetime):
1755 obj = obj.isoformat()[:19].replace('T',' ')
1756 elif isinstance(obj, datetime.date):
1757 obj = obj.isoformat()[:10]+' 00:00:00'
1758 else:
1759 obj = str(obj)
1760 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
1761 return None
1762
1763 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1764 credential_decoder=lambda x:x, driver_args={}):
1765 self.db = db
1766 self.dbengine = "oracle"
1767 self.uri = uri
1768 self.pool_size = pool_size
1769 self.folder = folder
1770 self.db_codec = db_codec
1771 self.find_or_make_work_folder()
1772 uri = uri.split('://')[1]
1773 if not 'threaded' in driver_args:
1774 driver_args['threaded']=True
1775 def connect(uri=uri,driver_args=driver_args):
1776 return cx_Oracle.connect(uri,**driver_args)
1777 self.pool_connection(connect)
1778 self.cursor = self.connection.cursor()
1779 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1780 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1781 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
1782
1784 args = []
1785 i = 1
1786 while True:
1787 m = self.oracle_fix.match(command)
1788 if not m:
1789 break
1790 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
1791 args.append(m.group('clob')[6:-2].replace("''", "'"))
1792 i += 1
1793 return self.log_execute(command[:-1], args)
1794
1802
1807
1808
1810 types = {
1811 'boolean': 'BIT',
1812 'string': 'VARCHAR(%(length)s)',
1813 'text': 'TEXT',
1814 'password': 'VARCHAR(%(length)s)',
1815 'blob': 'IMAGE',
1816 'upload': 'VARCHAR(%(length)s)',
1817 'integer': 'INT',
1818 'double': 'FLOAT',
1819 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1820 'date': 'DATETIME',
1821 'time': 'CHAR(8)',
1822 'datetime': 'DATETIME',
1823 'id': 'INT IDENTITY PRIMARY KEY',
1824 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1825 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1826 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
1827 'list:integer': 'TEXT',
1828 'list:string': 'TEXT',
1829 'list:reference': 'TEXT',
1830 }
1831
1833 return "DATEPART('%s' FROM %s)" % (what, self.expand(field))
1834
1836 return 'LEFT OUTER JOIN'
1837
1840
1843
1845 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1846
1848 return 'PRIMARY KEY CLUSTERED (%s)' % key
1849
1850 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1851 if limitby:
1852 (lmin, lmax) = limitby
1853 sql_s += ' TOP %i' % lmax
1854 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1855
1857 if fieldtype == 'boolean':
1858 if obj and not str(obj)[0].upper() == 'F':
1859 return '1'
1860 else:
1861 return '0'
1862 return None
1863
1864 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1865 credential_decoder=lambda x:x, driver_args={}, fake_connect=False):
1866 self.db = db
1867 self.dbengine = "mssql"
1868 self.uri = uri
1869 self.pool_size = pool_size
1870 self.folder = folder
1871 self.db_codec = db_codec
1872 self.find_or_make_work_folder()
1873
1874 uri = uri.split('://')[1]
1875 if '@' not in uri:
1876 try:
1877 m = re.compile('^(?P<dsn>.+)$').match(uri)
1878 if not m:
1879 raise SyntaxError, \
1880 'Parsing uri string(%s) has no result' % self.uri
1881 dsn = m.group('dsn')
1882 if not dsn:
1883 raise SyntaxError, 'DSN required'
1884 except SyntaxError, e:
1885 logger.error('NdGpatch error')
1886 raise e
1887 cnxn = 'DSN=%s' % dsn
1888 else:
1889 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(uri)
1890 if not m:
1891 raise SyntaxError, \
1892 "Invalid URI string in DAL: %s" % uri
1893 user = m.group('user')
1894 if not user:
1895 raise SyntaxError, 'User required'
1896 password = m.group('password')
1897 if not password:
1898 password = ''
1899 host = m.group('host')
1900 if not host:
1901 raise SyntaxError, 'Host name required'
1902 db = m.group('db')
1903 if not db:
1904 raise SyntaxError, 'Database name required'
1905 port = m.group('port') or '1433'
1906
1907
1908
1909 argsdict = { 'DRIVER':'{SQL Server}' }
1910 urlargs = m.group('urlargs') or ''
1911 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
1912 for argmatch in argpattern.finditer(urlargs):
1913 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
1914 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()])
1915 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
1916 % (host, port, db, user, password, urlargs)
1917 def connect(cnxn=cnxn,driver_args=driver_args):
1918 return pyodbc.connect(cnxn,**driver_args)
1919 if not fake_connect:
1920 self.pool_connection(connect)
1921 self.cursor = self.connection.cursor()
1922
1924
1925 self.execute('SELECT SCOPE_IDENTITY();')
1926 return int(self.cursor.fetchone()[0])
1927
1930
1931 - def rowslice(self,rows,minimum=0,maximum=None):
1932 if maximum is None:
1933 return rows[minimum:]
1934 return rows[minimum:maximum]
1935
1936
1938 types = {
1939 'boolean': 'CHAR(1)',
1940 'string': 'NVARCHAR(%(length)s)',
1941 'text': 'NTEXT',
1942 'password': 'NVARCHAR(%(length)s)',
1943 'blob': 'IMAGE',
1944 'upload': 'NVARCHAR(%(length)s)',
1945 'integer': 'INT',
1946 'double': 'FLOAT',
1947 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1948 'date': 'DATETIME',
1949 'time': 'CHAR(8)',
1950 'datetime': 'DATETIME',
1951 'id': 'INT IDENTITY PRIMARY KEY',
1952 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1953 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1954 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
1955 'list:integer': 'NTEXT',
1956 'list:string': 'NTEXT',
1957 'list:reference': 'NTEXT',
1958 }
1959
1961 value = BaseAdapter.represent(self, obj, fieldtype)
1962 if fieldtype == 'string' or fieldtype == 'text' and value[:1]=="'":
1963 value = 'N'+value
1964 return value
1965
1968
1969
1971
1972 commit_on_alter_table = False
1973 support_distributed_transaction = True
1974 types = {
1975 'boolean': 'CHAR(1)',
1976 'string': 'VARCHAR(%(length)s)',
1977 'text': 'BLOB SUB_TYPE 1',
1978 'password': 'VARCHAR(%(length)s)',
1979 'blob': 'BLOB SUB_TYPE 0',
1980 'upload': 'VARCHAR(%(length)s)',
1981 'integer': 'INTEGER',
1982 'double': 'DOUBLE PRECISION',
1983 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
1984 'date': 'DATE',
1985 'time': 'TIME',
1986 'datetime': 'TIMESTAMP',
1987 'id': 'INTEGER PRIMARY KEY',
1988 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1989 'list:integer': 'BLOB SUB_TYPE 1',
1990 'list:string': 'BLOB SUB_TYPE 1',
1991 'list:reference': 'BLOB SUB_TYPE 1',
1992 }
1993
1995 return 'genid_%s' % tablename
1996
1998 return 'trg_id_%s' % tablename
1999
2002
2003 - def NOT_NULL(self,default,field_type):
2004 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2005
2007 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
2008
2009 - def _drop(self,table,mode):
2012
2013 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2014 if limitby:
2015 (lmin, lmax) = limitby
2016 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin)
2017 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2018
2020 return ['DELETE FROM %s;' % table._tablename,
2021 'SET GENERATOR %s TO 0;' % table._sequence_name]
2022
2023 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2024 credential_decoder=lambda x:x, driver_args={}):
2025 self.db = db
2026 self.dbengine = "firebird"
2027 self.uri = uri
2028 self.pool_size = pool_size
2029 self.folder = folder
2030 self.db_codec = db_codec
2031 self.find_or_make_work_folder()
2032 uri = uri.split('://')[1]
2033 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2034 if not m:
2035 raise SyntaxError, "Invalid URI string in DAL: %s" % uri
2036 user = m.group('user')
2037 if not user:
2038 raise SyntaxError, 'User required'
2039 password = m.group('password')
2040 if not password:
2041 password = ''
2042 host = m.group('host')
2043 if not host:
2044 raise SyntaxError, 'Host name required'
2045 port = int(m.group('port') or 3050)
2046 db = m.group('db')
2047 if not db:
2048 raise SyntaxError, 'Database name required'
2049 charset = m.group('charset') or 'UTF8'
2050 driver_args.update(dict(dsn='%s/%s:%s' % (host,port,db),
2051 user = credential_decoder(user),
2052 password = credential_decoder(password),
2053 charset = charset))
2054 def connect(driver_args=driver_args):
2055 return kinterbasdb.connect(**driver_args)
2056 self.pool_connection(connect)
2057
2058 self.cursor = self.connection.cursor()
2059
2068
2073
2074
2076
2077 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2078 credential_decoder=lambda x:x, driver_args={}):
2079 self.db = db
2080 self.dbengine = "firebird"
2081 self.uri = uri
2082 self.pool_size = pool_size
2083 self.folder = folder
2084 self.db_codec = db_codec
2085 self.find_or_make_work_folder()
2086 uri = uri.split('://')[1]
2087 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2088 if not m:
2089 raise SyntaxError, \
2090 "Invalid URI string in DAL: %s" % self.uri
2091 user = m.group('user')
2092 if not user:
2093 raise SyntaxError, 'User required'
2094 password = m.group('password')
2095 if not password:
2096 password = ''
2097 pathdb = m.group('path')
2098 if not pathdb:
2099 raise SyntaxError, 'Path required'
2100 charset = m.group('charset')
2101 if not charset:
2102 charset = 'UTF8'
2103 host = ''
2104 driver_args.update(dict(host=host,
2105 database=pathdb,
2106 user=credential_decoder(user),
2107 password=credential_decoder(password),
2108 charset=charset))
2109 def connect(driver_args=driver_args):
2110 return kinterbasdb.connect(**driver_args)
2111 self.pool_connection(connect)
2112 self.cursor = self.connection.cursor()
2113
2114
2208
2213
2216
2219
2220
2222 types = {
2223 'boolean': 'CHAR(1)',
2224 'string': 'VARCHAR(%(length)s)',
2225 'text': 'CLOB',
2226 'password': 'VARCHAR(%(length)s)',
2227 'blob': 'BLOB',
2228 'upload': 'VARCHAR(%(length)s)',
2229 'integer': 'INT',
2230 'double': 'DOUBLE',
2231 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2232 'date': 'DATE',
2233 'time': 'TIME',
2234 'datetime': 'TIMESTAMP',
2235 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2236 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2237 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2238 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2239 'list:integer': 'CLOB',
2240 'list:string': 'CLOB',
2241 'list:reference': 'CLOB',
2242 }
2243
2245 return 'LEFT OUTER JOIN'
2246
2249
2250 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2251 if limitby:
2252 (lmin, lmax) = limitby
2253 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
2254 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2255
2257 if fieldtype == 'blob':
2258 obj = base64.b64encode(str(obj))
2259 return "BLOB('%s')" % obj
2260 elif fieldtype == 'datetime':
2261 if isinstance(obj, datetime.datetime):
2262 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
2263 elif isinstance(obj, datetime.date):
2264 obj = obj.isoformat()[:10]+'-00.00.00'
2265 return "'%s'" % obj
2266 return None
2267
2268 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2269 credential_decoder=lambda x:x, driver_args={}):
2270 self.db = db
2271 self.dbengine = "db2"
2272 self.uri = uri
2273 self.pool_size = pool_size
2274 self.folder = folder
2275 self.db_codec = db_codec
2276 self.find_or_make_work_folder()
2277 cnxn = uri.split(':', 1)[1]
2278 def connect(cnxn=cnxn,driver_args=driver_args):
2279 return pyodbc.connect(cnxn,**driver_args)
2280 self.pool_connection(connect)
2281 self.cursor = self.connection.cursor()
2282
2284 if command[-1:]==';':
2285 command = command[:-1]
2286 return self.log_execute(command)
2287
2289 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
2290 return int(self.cursor.fetchone()[0])
2291
2292 - def rowslice(self,rows,minimum=0,maximum=None):
2293 if maximum is None:
2294 return rows[minimum:]
2295 return rows[minimum:maximum]
2296
2297
2298 INGRES_SEQNAME='ii***lineitemsequence'
2299
2300
2301
2303
2304 types = {
2305 'boolean': 'CHAR(1)',
2306 'string': 'VARCHAR(%(length)s)',
2307 'text': 'CLOB',
2308 'password': 'VARCHAR(%(length)s)',
2309 'blob': 'BLOB',
2310 'upload': 'VARCHAR(%(length)s)',
2311 'integer': 'INTEGER4',
2312 'double': 'FLOAT8',
2313 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2314 'date': 'ANSIDATE',
2315 'time': 'TIME WITHOUT TIME ZONE',
2316 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2317 'id': 'integer4 not null unique with default next value for %s' % INGRES_SEQNAME,
2318 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2319 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2320 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2321 'list:integer': 'CLOB',
2322 'list:string': 'CLOB',
2323 'list:reference': 'CLOB',
2324 }
2325
2327 return 'LEFT OUTER JOIN'
2328
2331
2332 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2333 if limitby:
2334 (lmin, lmax) = limitby
2335 fetch_amt = lmax - lmin
2336 if fetch_amt:
2337 sql_s += ' FIRST %d ' % (fetch_amt, )
2338 if lmin:
2339
2340 sql_o += ' OFFSET %d' % (lmin, )
2341 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2342
2343 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2344 credential_decoder=lambda x:x, driver_args={}):
2345 self.db = db
2346 self.dbengine = "ingres"
2347 self.uri = uri
2348 self.pool_size = pool_size
2349 self.folder = folder
2350 self.db_codec = db_codec
2351 self.find_or_make_work_folder()
2352 connstr = self._uri.split(':', 1)[1]
2353
2354 connstr = connstr.lstrip()
2355 while connstr.startswith('/'):
2356 connstr = connstr[1:]
2357 database_name=connstr
2358 vnode = '(local)'
2359 servertype = 'ingres'
2360 trace = (0, None)
2361 driver_args.update(dict(database=database_name,
2362 vnode=vnode,
2363 servertype=servertype,
2364 trace=trace))
2365 def connect(driver_args=driver_args):
2366 return ingresdbi.connect(**driver_args)
2367 self.pool_connection(connect)
2368 self.cursor = self.connection.cursor()
2369
2371
2372
2373
2374 if hasattr(table,'_primarykey'):
2375 modify_tbl_sql = 'modify %s to btree unique on %s' % \
2376 (table._tablename,
2377 ', '.join(["'%s'" % x for x in table.primarykey]))
2378 self.execute(modify_tbl_sql)
2379 else:
2380 tmp_seqname='%s_iisq' % table._tablename
2381 query=query.replace(INGRES_SEQNAME, tmp_seqname)
2382 self.execute('create sequence %s' % tmp_seqname)
2383 self.execute(query)
2384 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
2385
2386
2388 tmp_seqname='%s_iisq' % table
2389 self.execute('select current value for %s' % tmp_seqname)
2390 return int(self.cursor.fetchone()[0])
2391
2394
2395
2397 types = {
2398 'boolean': 'CHAR(1)',
2399 'string': 'NVARCHAR(%(length)s)',
2400 'text': 'NCLOB',
2401 'password': 'NVARCHAR(%(length)s)',
2402 'blob': 'BLOB',
2403 'upload': 'VARCHAR(%(length)s)',
2404 'integer': 'INTEGER4',
2405 'double': 'FLOAT8',
2406 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2407 'date': 'ANSIDATE',
2408 'time': 'TIME WITHOUT TIME ZONE',
2409 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2410 'id': 'integer4 not null unique with default next value for %s'% INGRES_SEQNAME,
2411 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2412 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2413 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2414 'list:integer': 'NCLOB',
2415 'list:string': 'NCLOB',
2416 'list:reference': 'NCLOB',
2417 }
2418
2420
2421 support_distributed_transaction = False
2422 types = {
2423 'boolean': 'CHAR(1)',
2424 'string': 'VARCHAR(%(length)s)',
2425 'text': 'LONG',
2426 'password': 'VARCHAR(%(length)s)',
2427 'blob': 'LONG',
2428 'upload': 'VARCHAR(%(length)s)',
2429 'integer': 'INT',
2430 'double': 'FLOAT',
2431 'decimal': 'FIXED(%(precision)s,%(scale)s)',
2432 'date': 'DATE',
2433 'time': 'TIME',
2434 'datetime': 'TIMESTAMP',
2435 'id': 'INT PRIMARY KEY',
2436 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2437 'list:integer': 'LONG',
2438 'list:string': 'LONG',
2439 'list:reference': 'LONG',
2440 }
2441
2443 return '%s_id_Seq' % table
2444
2445 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2446 if limitby:
2447 (lmin, lmax) = limitby
2448 if len(sql_w) > 1:
2449 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2450 else:
2451 sql_w_row = 'WHERE w_row > %i' % lmin
2452 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2453 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2454
2456
2457 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
2458 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
2459 % (table._tablename, table._id.name, table._sequence_name))
2460 self.execute(query)
2461
2462 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2463 credential_decoder=lambda x:x, driver_args={}):
2464 self.db = db
2465 self.dbengine = "sapdb"
2466 self.uri = uri
2467 self.pool_size = pool_size
2468 self.folder = folder
2469 self.db_codec = db_codec
2470 self.find_or_make_work_folder()
2471 uri = uri.split('://')[1]
2472 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
2473 if not m:
2474 raise SyntaxError, "Invalid URI string in DAL"
2475 user = m.group('user')
2476 if not user:
2477 raise SyntaxError, 'User required'
2478 password = m.group('password')
2479 if not password:
2480 password = ''
2481 host = m.group('host')
2482 if not host:
2483 raise SyntaxError, 'Host name required'
2484 db = m.group('db')
2485 if not db:
2486 raise SyntaxError, 'Database name required'
2487 def connect(user=user,password=password,database=db,host=host,driver_args=driver_args):
2488 return sapdb.Connection(user,password,database,host,**driver_args)
2489 self.pool_connection(connect)
2490
2491 self.cursor = self.connection.cursor()
2492
2494 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
2495 return int(self.cursor.fetchone()[0])
2496
2497
2498
2499
2500
2502
2504 if db.engine != 'mysql':
2505 raise RuntimeError, "only MySQL can store metadata .table files in database for now"
2506 self.db = db
2507 self.filename = filename
2508 self.mode = mode
2509 self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
2510 self.p=0
2511 self.data = ''
2512 if mode in ('r','rw','a'):
2513 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" % filename
2514 rows = self.db.executesql(query)
2515 if rows:
2516 self.data = rows[0][0]
2517 elif os.path.exists(filename):
2518 self.data = open(filename,'r').read()
2519 elif mode in ('r','rw'):
2520 raise RuntimeError, "File %s does not exist" % filename
2521
2522 - def read(self, bytes):
2523 data = self.data[self.p:self.p+bytes]
2524 self.p += len(data)
2525 return data
2526
2528 i = self.data.find('\n',self.p)+1
2529 if i>0:
2530 data, self.p = self.data[self.p:i], i
2531 else:
2532 data, self.p = self.data[self.p:], len(self.data)
2533 return data
2534
2537
2539 self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
2540 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')" % \
2541 (self.filename, self.data.replace("'","''"))
2542 self.db.executesql(query)
2543
2544 @staticmethod
2546 if os.path.exists(filename):
2547 return True
2548 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
2549 if db.executesql(query):
2550 return True
2551 return False
2552
2553
2555
2558
2559 - def file_open(self, filename, mode='rb', lock=True):
2561
2564
2566 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
2567 self.db.executesql(query)
2568
2569
2571
2573 if isinstance(obj,CALLABLETYPES):
2574 obj = obj()
2575 if isinstance(fieldtype, SQLCustomType):
2576 return fieldtype.encoder(obj)
2577 if isinstance(obj, (Expression, Field)):
2578 raise SyntaxError, "non supported on GAE"
2579 if 'gae' in globals():
2580 if isinstance(fieldtype, gae.Property):
2581 return obj
2582 if fieldtype.startswith('list:'):
2583 if not obj:
2584 obj = []
2585 if not isinstance(obj, (list, tuple)):
2586 obj = [obj]
2587 if obj == '' and not fieldtype[:2] in ['st','te','pa','up']:
2588 return None
2589 if obj != None:
2590 if isinstance(obj, list) and not fieldtype.startswith('list'):
2591 obj = [self.represent(o, fieldtype) for o in obj]
2592 elif fieldtype in ('integer','id'):
2593 obj = long(obj)
2594 elif fieldtype == 'double':
2595 obj = float(obj)
2596 elif fieldtype.startswith('reference'):
2597 if isinstance(obj, (Row, Reference)):
2598 obj = obj['id']
2599 obj = long(obj)
2600 elif fieldtype == 'boolean':
2601 if obj and not str(obj)[0].upper() == 'F':
2602 obj = True
2603 else:
2604 obj = False
2605 elif fieldtype == 'date':
2606 if not isinstance(obj, datetime.date):
2607 (y, m, d) = [int(x) for x in str(obj).strip().split('-')]
2608 obj = datetime.date(y, m, d)
2609 elif isinstance(obj,datetime.datetime):
2610 (y, m, d) = (obj.year, obj.month, obj.day)
2611 obj = datetime.date(y, m, d)
2612 elif fieldtype == 'time':
2613 if not isinstance(obj, datetime.time):
2614 time_items = [int(x) for x in str(obj).strip().split(':')[:3]]
2615 if len(time_items) == 3:
2616 (h, mi, s) = time_items
2617 else:
2618 (h, mi, s) = time_items + [0]
2619 obj = datetime.time(h, mi, s)
2620 elif fieldtype == 'datetime':
2621 if not isinstance(obj, datetime.datetime):
2622 (y, m, d) = [int(x) for x in str(obj)[:10].strip().split('-')]
2623 time_items = [int(x) for x in str(obj)[11:].strip().split(':')[:3]]
2624 while len(time_items)<3:
2625 time_items.append(0)
2626 (h, mi, s) = time_items
2627 obj = datetime.datetime(y, m, d, h, mi, s)
2628 elif fieldtype == 'blob':
2629 pass
2630 elif fieldtype.startswith('list:string'):
2631 if obj!=None and not isinstance(obj,(list,tuple)):
2632 obj=[obj]
2633 return [str(x) for x in obj]
2634 elif fieldtype.startswith('list:'):
2635 if obj!=None and not isinstance(obj,(list,tuple)):
2636 obj=[obj]
2637 return [int(x) for x in obj]
2638 elif isinstance(obj, str):
2639 obj = obj.decode('utf8')
2640 elif not isinstance(obj, unicode):
2641 obj = unicode(obj)
2642 return obj
2643
2645 return 'insert %s in %s' % (fields, table)
2646
2647 - def _count(self,query,distinct=None):
2648 return 'count %s' % repr(query)
2649
2650 - def _select(self,query,fields,attributes):
2651 return 'select %s where %s' % (repr(fields), repr(query))
2652
2653 - def _delete(self,tablename, query):
2654 return 'delete %s where %s' % (repr(tablename),repr(query))
2655
2656 - def _update(self,tablename,query,fields):
2657 return 'update %s (%s) where %s' % (repr(tablename),
2658 repr(fields),repr(query))
2659
2661 """
2662 remember: no transactions on many NoSQL
2663 """
2664 pass
2665
2667 """
2668 remember: no transactions on many NoSQL
2669 """
2670 pass
2671
2673 """
2674 remember: no transactions on many NoSQL
2675 """
2676 pass
2677
2678
2679
2680 - def OR(self,first,second): raise SyntaxError, "Not supported"
2681 - def AND(self,first,second): raise SyntaxError, "Not supported"
2682 - def AS(self,first,second): raise SyntaxError, "Not supported"
2683 - def ON(self,first,second): raise SyntaxError, "Not supported"
2684 - def STARTSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2685 - def ENDSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2686 - def ADD(self,first,second): raise SyntaxError, "Not supported"
2687 - def SUB(self,first,second): raise SyntaxError, "Not supported"
2688 - def MUL(self,first,second): raise SyntaxError, "Not supported"
2689 - def DIV(self,first,second): raise SyntaxError, "Not supported"
2690 - def LOWER(self,first): raise SyntaxError, "Not supported"
2691 - def UPPER(self,first): raise SyntaxError, "Not supported"
2693 - def AGGREGATE(self,first,what): raise SyntaxError, "Not supported"
2694 - def LEFT_JOIN(self): raise SyntaxError, "Not supported"
2695 - def RANDOM(self): raise SyntaxError, "Not supported"
2696 - def SUBSTRING(self,field,parameters): raise SyntaxError, "Not supported"
2697 - def PRIMARY_KEY(self,key): raise SyntaxError, "Not supported"
2698 - def LIKE(self,first,second): raise SyntaxError, "Not supported"
2699 - def drop(self,table,mode): raise SyntaxError, "Not supported"
2700 - def alias(self,table,alias): raise SyntaxError, "Not supported"
2701 - def migrate_table(self,*a,**b): raise SyntaxError, "Not supported"
2703 - def prepare(self,key): raise SyntaxError, "Not supported"
2706 - def concat_add(self,table): raise SyntaxError, "Not supported"
2707 - def constraint_name(self, table, fieldname): raise SyntaxError, "Not supported"
2709 - def log_execute(self,*a,**b): raise SyntaxError, "Not supported"
2710 - def execute(self,*a,**b): raise SyntaxError, "Not supported"
2712 - def lastrowid(self,table): raise SyntaxError, "Not supported"
2714 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError, "Not supported"
2715
2716
2717 -class GAEF(object):
2718 - def __init__(self,name,op,value,apply):
2719 self.name=name=='id' and '__key__' or name
2720 self.op=op
2721 self.value=value
2722 self.apply=apply
2724 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
2725
2727 uploads_in_blob = True
2728 types = {}
2729
2731 - def file_open(self, filename, mode='rb', lock=True): pass
2732 - def file_close(self, fileobj, unlock=True): pass
2733
2734 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2735 credential_decoder=lambda x:x, driver_args={}):
2736 self.types.update({
2737 'boolean': gae.BooleanProperty,
2738 'string': (lambda: gae.StringProperty(multiline=True)),
2739 'text': gae.TextProperty,
2740 'password': gae.StringProperty,
2741 'blob': gae.BlobProperty,
2742 'upload': gae.StringProperty,
2743 'integer': gae.IntegerProperty,
2744 'double': gae.FloatProperty,
2745 'decimal': GAEDecimalProperty,
2746 'date': gae.DateProperty,
2747 'time': gae.TimeProperty,
2748 'datetime': gae.DateTimeProperty,
2749 'id': None,
2750 'reference': gae.IntegerProperty,
2751 'list:string': (lambda: gae.StringListProperty(default=None)),
2752 'list:integer': (lambda: gae.ListProperty(int,default=None)),
2753 'list:reference': (lambda: gae.ListProperty(int,default=None)),
2754 })
2755 self.db = db
2756 self.uri = uri
2757 self.dbengine = 'gql'
2758 self.folder = folder
2759 db['_lastsql'] = ''
2760 self.db_codec = 'UTF-8'
2761 self.pool_size = 0
2762 match = re.compile('(?P<namespace>.+)').match(uri[6:])
2763 if match:
2764 namespace_manager.set_namespace(match.group('namespace'))
2765
2766 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
2767 myfields = {}
2768 for k in table.fields:
2769 if isinstance(polymodel,Table) and k in polymodel.fields():
2770 continue
2771 field = table[k]
2772 attr = {}
2773 if isinstance(field.type, SQLCustomType):
2774 ftype = self.types[field.type.native or field.type.type](**attr)
2775 elif isinstance(field.type, gae.Property):
2776 ftype = field.type
2777 elif field.type.startswith('id'):
2778 continue
2779 elif field.type.startswith('decimal'):
2780 precision, scale = field.type[7:].strip('()').split(',')
2781 precision = int(precision)
2782 scale = int(scale)
2783 ftype = GAEDecimalProperty(precision, scale, **attr)
2784 elif field.type.startswith('reference'):
2785 if field.notnull:
2786 attr = dict(required=True)
2787 referenced = field.type[10:].strip()
2788 ftype = self.types[field.type[:9]](table._db[referenced])
2789 elif field.type.startswith('list:reference'):
2790 if field.notnull:
2791 attr = dict(required=True)
2792 referenced = field.type[15:].strip()
2793 ftype = self.types[field.type[:14]](**attr)
2794 elif field.type.startswith('list:'):
2795 ftype = self.types[field.type](**attr)
2796 elif not field.type in self.types\
2797 or not self.types[field.type]:
2798 raise SyntaxError, 'Field: unknown field type: %s' % field.type
2799 else:
2800 ftype = self.types[field.type](**attr)
2801 myfields[field.name] = ftype
2802 if not polymodel:
2803 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
2804 elif polymodel==True:
2805 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
2806 elif isinstance(polymodel,Table):
2807 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
2808 else:
2809 raise SyntaxError, "polymodel must be None, True, a table or a tablename"
2810 return None
2811
2812 - def expand(self,expression,field_type=None):
2813 if isinstance(expression,Field):
2814 if expression.type in ('text','blob'):
2815 raise SyntaxError, 'AppEngine does not index by: %s' % expression.type
2816 return expression.name
2817 elif isinstance(expression, (Expression, Query)):
2818 if not expression.second is None:
2819 return expression.op(expression.first, expression.second)
2820 elif not expression.first is None:
2821 return expression.op(expression.first)
2822 else:
2823 return expression.op()
2824 elif field_type:
2825 return self.represent(expression,field_type)
2826 elif isinstance(expression,(list,tuple)):
2827 return ','.join([self.represent(item,field_type) for item in expression])
2828 else:
2829 return str(expression)
2830
2831
2832 - def AND(self,first,second):
2838
2839 - def EQ(self,first,second=None):
2840 if isinstance(second, Key):
2841 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
2842 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
2843
2844 - def NE(self,first,second=None):
2845 if first.type != 'id':
2846 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
2847 else:
2848 second = Key.from_path(first._tablename, long(second))
2849 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
2850
2851 - def LT(self,first,second=None):
2852 if first.type != 'id':
2853 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
2854 else:
2855 second = Key.from_path(first._tablename, long(second))
2856 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
2857
2858 - def LE(self,first,second=None):
2859 if first.type != 'id':
2860 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
2861 else:
2862 second = Key.from_path(first._tablename, long(second))
2863 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
2864
2865 - def GT(self,first,second=None):
2866 if first.type != 'id' or second==0 or second == '0':
2867 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
2868 else:
2869 second = Key.from_path(first._tablename, long(second))
2870 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
2871
2872 - def GE(self,first,second=None):
2873 if first.type != 'id':
2874 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
2875 else:
2876 second = Key.from_path(first._tablename, long(second))
2877 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
2878
2881
2882 - def COMMA(self,first,second):
2884
2885 - def BELONGS(self,first,second=None):
2886 if not isinstance(second,(list, tuple)):
2887 raise SyntaxError, "Not supported"
2888 if first.type != 'id':
2889 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
2890 else:
2891 second = [Key.from_path(first._tablename, i) for i in second]
2892 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
2893
2898
2899 - def NOT(self,first):
2900 nops = { self.EQ: self.NE,
2901 self.NE: self.EQ,
2902 self.LT: self.GE,
2903 self.GT: self.LE,
2904 self.LE: self.GT,
2905 self.GE: self.LT}
2906 if not isinstance(first,Query):
2907 raise SyntaxError, "Not suported"
2908 nop = nops.get(first.op,None)
2909 if not nop:
2910 raise SyntaxError, "Not suported %s" % first.op.__name__
2911 first.op = nop
2912 return self.expand(first)
2913
2915 self.db(table.id > 0).delete()
2916
2917 - def select_raw(self,query,fields=[],attributes={}):
2918 new_fields = []
2919 for item in fields:
2920 if isinstance(item,SQLALL):
2921 new_fields += item.table
2922 else:
2923 new_fields.append(item)
2924 fields = new_fields
2925 if query:
2926 tablename = self.get_table(query)
2927 elif fields:
2928 tablename = fields[0].tablename
2929 query = fields[0].table.id>0
2930 else:
2931 raise SyntaxError, "Unable to determine a tablename"
2932 tableobj = self.db[tablename]._tableobj
2933 items = tableobj.all()
2934 filters = self.expand(query)
2935 for filter in filters:
2936 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
2937 continue
2938 elif filter.name=='__key__' and filter.op=='=':
2939 if filter.value==0:
2940 items = []
2941 elif isinstance(filter.value, Key):
2942 items = tableobj.get(filter.value)
2943 items = (item and [item]) or []
2944 else:
2945 item = tableobj.get_by_id(filter.value)
2946 items = (item and [item]) or []
2947 elif isinstance(items,list):
2948 items = [i for i in items if filter.apply(getattr(item,filter.name),
2949 filter.value)]
2950 else:
2951 if filter.name=='__key__': items.order('__key__')
2952 items = items.filter('%s %s' % (filter.name,filter.op),filter.value)
2953 if not isinstance(items,list):
2954 if attributes.get('left', None):
2955 raise SyntaxError, 'Set: no left join in appengine'
2956 if attributes.get('groupby', None):
2957 raise SyntaxError, 'Set: no groupby in appengine'
2958 orderby = attributes.get('orderby', False)
2959 if orderby:
2960
2961 if isinstance(orderby, (list, tuple)):
2962 orderby = xorify(orderby)
2963 if isinstance(orderby,Expression):
2964 orderby = self.expand(orderby)
2965 orders = orderby.split(', ')
2966 for order in orders:
2967 order={'-id':'-__key__','id':'__key__'}.get(order,order)
2968 items = items.order(order)
2969 if attributes.get('limitby', None):
2970 (lmin, lmax) = attributes['limitby']
2971 (limit, offset) = (lmax - lmin, lmin)
2972 items = items.fetch(limit, offset=offset)
2973 fields = self.db[tablename].fields
2974 return (items, tablename, fields)
2975
2976 - def select(self,query,fields,attributes):
2977 (items, tablename, fields) = self.select_raw(query,fields,attributes)
2978
2979 rows = [
2980 [t=='id' and int(item.key().id()) or getattr(item, t) for t in fields]
2981 for item in items]
2982 colnames = ['%s.%s' % (tablename, t) for t in fields]
2983 return self.parse(rows, colnames, False)
2984
2985
2986 - def count(self,query,distinct=None):
2987 if distinct:
2988 raise RuntimeError, "COUNT DISTINCT not supported"
2989 (items, tablename, fields) = self.select_raw(query)
2990
2991 try:
2992 return len(items)
2993 except TypeError:
2994 return items.count(limit=None)
2995
2996 - def delete(self,tablename, query):
2997 """
2998 This function was changed on 2010-05-04 because according to
2999 http://code.google.com/p/googleappengine/issues/detail?id=3119
3000 GAE no longer support deleting more than 1000 records.
3001 """
3002
3003 (items, tablename, fields) = self.select_raw(query)
3004
3005 if not isinstance(items,list):
3006 counter = items.count(limit=None)
3007 leftitems = items.fetch(1000)
3008 while len(leftitems):
3009 gae.delete(leftitems)
3010 leftitems = items.fetch(1000)
3011 else:
3012 counter = len(items)
3013 gae.delete(items)
3014 return counter
3015
3016 - def update(self,tablename,query,update_fields):
3017
3018 (items, tablename, fields) = self.select_raw(query)
3019 counter = 0
3020 for item in items:
3021 for field, value in update_fields:
3022 setattr(item, field.name, self.represent(value,field.type))
3023 item.put()
3024 counter += 1
3025 logger.info(str(counter))
3026 return counter
3027
3028 - def insert(self,table,fields):
3029 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
3030
3031 tmp = table._tableobj(**dfields)
3032 tmp.put()
3033 rid = Reference(tmp.key().id())
3034 (rid._table, rid._record) = (table, None)
3035 return rid
3036
3038 parsed_items = []
3039 for item in items:
3040 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
3041 parsed_items.append(table._tableobj(**dfields))
3042 gae.put(parsed_items)
3043 return True
3044
3045 try:
3046 import couchdb
3047 drivers.append('CouchDB')
3048 except ImportError:
3049 logger.debug('no couchdb driver')
3050
3052 n=0
3053 for c in uuid: n=n*16+'0123456789abcdef'.find(c)
3054 return n
3055
3057 uuid=''
3058 while(n):
3059 n,i = divmod(n,16)
3060 uuid = '0123456789abcdef'[i]+uuid
3061 return uuid
3062
3064 uploads_in_blob = True
3065 types = {
3066 'boolean': bool,
3067 'string': str,
3068 'text': str,
3069 'password': str,
3070 'blob': str,
3071 'upload': str,
3072 'integer': long,
3073 'double': float,
3074 'date': datetime.date,
3075 'time': datetime.time,
3076 'datetime': datetime.datetime,
3077 'id': long,
3078 'reference': long,
3079 'list:string': list,
3080 'list:integer': list,
3081 'list:reference': list,
3082 }
3083
3085 - def file_open(self, filename, mode='rb', lock=True): pass
3086 - def file_close(self, fileobj, unlock=True): pass
3087
3088 - def expand(self,expression,field_type=None):
3089 if isinstance(expression,Field):
3090 if expression.type=='id':
3091 return "%s._id" % expression.tablename
3092 return BaseAdapter.expand(self,expression,field_type)
3093
3094 - def AND(self,first,second):
3096
3097 - def OR(self,first,second):
3099
3100 - def EQ(self,first,second):
3104
3105 - def NE(self,first,second):
3109
3110 - def COMMA(self,first,second):
3112
3114 value = NoSQLAdapter.represent(self, obj, fieldtype)
3115 if fieldtype=='id':
3116 return repr(str(int(value)))
3117 return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
3118
3119 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
3120 pool_size=0,folder=None,db_codec ='UTF-8',
3121 credential_decoder=lambda x:x, driver_args={}):
3122 self.db = db
3123 self.uri = uri
3124 self.dbengine = 'couchdb'
3125 self.folder = folder
3126 db['_lastsql'] = ''
3127 self.db_codec = 'UTF-8'
3128 self.pool_size = pool_size
3129
3130 url='http://'+uri[10:]
3131 def connect(url=url,driver_args=driver_args):
3132 return couchdb.Server(url,**driver_args)
3133 self.pool_connection(connect)
3134
3135 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
3136 if migrate:
3137 try:
3138 self.connection.create(table._tablename)
3139 except:
3140 pass
3141
3142 - def insert(self,table,fields):
3149
3150 - def _select(self,query,fields,attributes):
3151 if not isinstance(query,Query):
3152 raise SyntaxError, "Not Supported"
3153 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
3154 'required','cache','left',
3155 'distinct','having')):
3156 raise SyntaxError, 'invalid select attribute: %s' % key
3157 new_fields=[]
3158 for item in fields:
3159 if isinstance(item,SQLALL):
3160 new_fields += item.table
3161 else:
3162 new_fields.append(item)
3163 def uid(fd):
3164 return fd=='id' and '_id' or fd
3165 def get(row,fd):
3166 return fd=='id' and int(row['_id']) or row.get(fd,None)
3167 fields = new_fields
3168 tablename = self.get_table(query)
3169 fieldnames = [f.name for f in (fields or self.db[tablename])]
3170 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
3171 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
3172 fn="function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);}" %\
3173 dict(t=tablename,
3174 query=self.expand(query),
3175 order='%s._id' % tablename,
3176 fields=fields)
3177 return fn, colnames
3178
3179 - def select(self,query,fields,attributes):
3180 if not isinstance(query,Query):
3181 raise SyntaxError, "Not Supported"
3182 fn, colnames = self._select(query,fields,attributes)
3183 tablename = colnames[0].split('.')[0]
3184 ctable = self.connection[tablename]
3185 rows = [cols['value'] for cols in ctable.query(fn)]
3186 return self.parse(rows, colnames, False)
3187
3188 - def delete(self,tablename,query):
3189 if not isinstance(query,Query):
3190 raise SyntaxError, "Not Supported"
3191 if query.first.type=='id' and query.op==self.EQ:
3192 id = query.second
3193 tablename = query.first.tablename
3194 assert(tablename == query.first.tablename)
3195 ctable = self.connection[tablename]
3196 try:
3197 del ctable[str(id)]
3198 return 1
3199 except couchdb.http.ResourceNotFound:
3200 return 0
3201 else:
3202 tablename = self.get_table(query)
3203 rows = self.select(query,[self.db[tablename].id],{})
3204 ctable = self.connection[tablename]
3205 for row in rows:
3206 del ctable[str(row.id)]
3207 return len(rows)
3208
3209 - def update(self,tablename,query,fields):
3210 if not isinstance(query,Query):
3211 raise SyntaxError, "Not Supported"
3212 if query.first.type=='id' and query.op==self.EQ:
3213 id = query.second
3214 tablename = query.first.tablename
3215 ctable = self.connection[tablename]
3216 try:
3217 doc = ctable[str(id)]
3218 for key,value in fields:
3219 doc[key.name] = NoSQLAdapter.represent(self,value,self.db[tablename][key.name].type)
3220 ctable.save(doc)
3221 return 1
3222 except couchdb.http.ResourceNotFound:
3223 return 0
3224 else:
3225 tablename = self.get_table(query)
3226 rows = self.select(query,[self.db[tablename].id],{})
3227 ctable = self.connection[tablename]
3228 table = self.db[tablename]
3229 for row in rows:
3230 doc = ctable[str(row.id)]
3231 for key,value in fields:
3232 doc[key.name] = NoSQLAdapter.represent(self,value,table[key.name].type)
3233 ctable.save(doc)
3234 return len(rows)
3235
3236 - def count(self,query,distinct=None):
3237 if distinct:
3238 raise RuntimeError, "COUNT DISTINCT not supported"
3239 if not isinstance(query,Query):
3240 raise SyntaxError, "Not Supported"
3241 tablename = self.get_table(query)
3242 rows = self.select(query,[self.db[tablename].id],{})
3243 return len(rows)
3244
3246 """
3247 validates that the given text is clean: only contains [0-9a-zA-Z_]
3248 """
3249
3250 if re.compile('[^0-9a-zA-Z_]').findall(text):
3251 raise SyntaxError, \
3252 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
3253 % text
3254 return text
3255
3256
3257 try:
3258 import pymongo
3259 drivers.append('mongoDB')
3260 except:
3261 logger.debug('no mongoDB driver')
3262
3264 uploads_in_blob = True
3265 types = {
3266 'boolean': bool,
3267 'string': str,
3268 'text': str,
3269 'password': str,
3270 'blob': str,
3271 'upload': str,
3272 'integer': long,
3273 'double': float,
3274 'date': datetime.date,
3275 'time': datetime.time,
3276 'datetime': datetime.datetime,
3277 'id': long,
3278 'reference': long,
3279 'list:string': list,
3280 'list:integer': list,
3281 'list:reference': list,
3282 }
3283
3284 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
3285 pool_size=0,folder=None,db_codec ='UTF-8',
3286 credential_decoder=lambda x:x, driver_args={}):
3287 self.db = db
3288 self.uri = uri
3289 self.dbengine = 'mongodb'
3290 self.folder = folder
3291 db['_lastsql'] = ''
3292 self.db_codec = 'UTF-8'
3293 self.pool_size = pool_size
3294
3295 m = re.compile('^(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(self._uri[10:])
3296 if not m:
3297 raise SyntaxError, "Invalid URI string in DAL: %s" % self._uri
3298 host = m.group('host')
3299 if not host:
3300 raise SyntaxError, 'mongodb: host name required'
3301 dbname = m.group('db')
3302 if not dbname:
3303 raise SyntaxError, 'mongodb: db name required'
3304 port = m.group('port') or 27017
3305 driver_args.update(dict(host=host,port=port))
3306 def connect(dbname=dbname,driver_args=driver_args):
3307 return pymongo.Connection(**driver_args)[dbname]
3308 self.pool_connection(connect)
3309
3310 - def insert(self,table,fields):
3315
3316
3318 raise RuntimeError, "Not implemented"
3319
3320 - def select(self,query,fields,attributes):
3321 raise RuntimeError, "Not implemented"
3322
3323 - def delete(self,tablename, query):
3324 raise RuntimeError, "Not implemented"
3325
3326 - def update(self,tablename,query,fields):
3327 raise RuntimeError, "Not implemented"
3328
3329
3330
3331
3332
3333
3334 ADAPTERS = {
3335 'sqlite': SQLiteAdapter,
3336 'sqlite:memory': SQLiteAdapter,
3337 'mysql': MySQLAdapter,
3338 'postgres': PostgreSQLAdapter,
3339 'oracle': OracleAdapter,
3340 'mssql': MSSQLAdapter,
3341 'mssql2': MSSQL2Adapter,
3342 'db2': DB2Adapter,
3343 'informix': InformixAdapter,
3344 'firebird': FireBirdAdapter,
3345 'firebird_embedded': FireBirdAdapter,
3346 'ingres': IngresAdapter,
3347 'ingresu': IngresUnicodeAdapter,
3348 'sapdb': SAPDBAdapter,
3349 'jdbc:sqlite': JDBCSQLiteAdapter,
3350 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
3351 'jdbc:postgres': JDBCPostgreSQLAdapter,
3352 'gae': GAENoSQLAdapter,
3353 'couchdb': CouchDBAdapter,
3354 'mongodb': CouchDBAdapter,
3355 }
3356
3357
3359 """
3360 Field type validation, using web2py's validators mechanism.
3361
3362 makes sure the content of a field is in line with the declared
3363 fieldtype
3364 """
3365 if not have_validators:
3366 return []
3367 field_type, field_length = field.type, field.length
3368 if isinstance(field_type, SQLCustomType):
3369 if hasattr(field_type, 'validator'):
3370 return field_type.validator
3371 else:
3372 field_type = field_type.type
3373 elif not isinstance(field_type,str):
3374 return []
3375 requires=[]
3376 def ff(r,id):
3377 row=r(id)
3378 if not row:
3379 return id
3380 elif hasattr(r, '_format') and isinstance(r._format,str):
3381 return r._format % row
3382 elif hasattr(r, '_format') and callable(r._format):
3383 return r._format(row)
3384 else:
3385 return id
3386 if field_type == 'string':
3387 requires.append(validators.IS_LENGTH(field_length))
3388 elif field_type == 'text':
3389 requires.append(validators.IS_LENGTH(2 ** 16))
3390 elif field_type == 'password':
3391 requires.append(validators.IS_LENGTH(field_length))
3392 elif field_type == 'double':
3393 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
3394 elif field_type == 'integer':
3395 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
3396 elif field_type.startswith('decimal'):
3397 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
3398 elif field_type == 'date':
3399 requires.append(validators.IS_DATE())
3400 elif field_type == 'time':
3401 requires.append(validators.IS_TIME())
3402 elif field_type == 'datetime':
3403 requires.append(validators.IS_DATETIME())
3404 elif field.db and field_type.startswith('reference') and \
3405 field_type.find('.') < 0 and \
3406 field_type[10:] in field.db.tables:
3407 referenced = field.db[field_type[10:]]
3408 def repr_ref(id, r=referenced, f=ff): return f(r, id)
3409 field.represent = field.represent or repr_ref
3410 if hasattr(referenced, '_format') and referenced._format:
3411 requires = validators.IS_IN_DB(field.db,referenced.id,
3412 referenced._format)
3413 if field.unique:
3414 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3415 if field.tablename == field_type[10:]:
3416 return validators.IS_EMPTY_OR(requires)
3417 return requires
3418 elif field.db and field_type.startswith('list:reference') and \
3419 field_type.find('.') < 0 and \
3420 field_type[15:] in field.db.tables:
3421 referenced = field.db[field_type[15:]]
3422 def list_ref_repr(ids, r=referenced, f=ff):
3423 if not ids:
3424 return None
3425 refs = r._db(r.id.belongs(ids)).select(r.id)
3426 return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
3427 field.represent = field.represent or list_ref_repr
3428 if hasattr(referenced, '_format') and referenced._format:
3429 requires = validators.IS_IN_DB(field.db,referenced.id,
3430 referenced._format,multiple=True)
3431 else:
3432 requires = validators.IS_IN_DB(field.db,referenced.id,
3433 multiple=True)
3434 if field.unique:
3435 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3436 return requires
3437 elif field_type.startswith('list:'):
3438 def repr_list(values): return', '.join(str(v) for v in (values or []))
3439 field.represent = field.represent or repr_list
3440 if field.unique:
3441 requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
3442 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
3443 if field.notnull and not field_type[:2] in sff:
3444 requires.insert(0, validators.IS_NOT_EMPTY())
3445 elif not field.notnull and field_type[:2] in sff and requires:
3446 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
3447 return requires
3448
3449
3451 return str(item).replace('|', '||')
3452
3455
3457 return [int(x) for x in value.split('|') if x.strip()]
3458
3461
3462
3464
3465 """
3466 a dictionary that lets you do d['a'] as well as d.a
3467 this is only used to store a Row
3468 """
3469
3471 key=str(key)
3472 if key in self.get('_extra',{}):
3473 return self._extra[key]
3474 return dict.__getitem__(self, key)
3475
3477 return self.__getitem(self,key)
3478
3481
3484
3487
3489 return '<Row ' + dict.__repr__(self) + '>'
3490
3493
3495 try:
3496 return self.as_dict() == other.as_dict()
3497 except AttributeError:
3498 return False
3499
3501 return not (self == other)
3502
3504 return Row(dict(self))
3505
3506 - def as_dict(self,datetime_to_str=False):
3507 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list)
3508 d = dict(self)
3509 for k in copy.copy(d.keys()):
3510 v=d[k]
3511 if d[k] is None:
3512 continue
3513 elif isinstance(v,Row):
3514 d[k]=v.as_dict()
3515 elif isinstance(v,Reference):
3516 d[k]=int(v)
3517 elif isinstance(v,decimal.Decimal):
3518 d[k]=float(v)
3519 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
3520 if datetime_to_str:
3521 d[k] = v.isoformat().replace('T',' ')[:19]
3522 elif not isinstance(v,SERIALIZABLE_TYPES):
3523 del d[k]
3524 return d
3525
3526
3528 return Row(cPickle.loads(data))
3529
3532
3533 copy_reg.pickle(Row, Row_pickler, Row_unpickler)
3534
3535
3536
3537
3538
3539
3540
3544
3545
3547
3548 """
3549 an instance of this class represents a database connection
3550
3551 Example::
3552
3553 db = DAL('sqlite://test.db')
3554 db.define_table('tablename', Field('fieldname1'),
3555 Field('fieldname2'))
3556 """
3557
3558 @staticmethod
3560 """
3561 # ## this allows gluon to set a folder for this thread
3562 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
3563 """
3564 BaseAdapter.set_folder(folder)
3565
3566 @staticmethod
3579
3580 @staticmethod
3602
3603 - def __init__(self, uri='sqlite://dummy.db', pool_size=0, folder=None,
3604 db_codec='UTF-8', check_reserved=None,
3605 migrate=True, fake_migrate=False,
3606 decode_credentials=False, driver_args=None):
3607 """
3608 Creates a new Database Abstraction Layer instance.
3609
3610 Keyword arguments:
3611
3612 :uri: string that contains information for connecting to a database.
3613 (default: 'sqlite://dummy.db')
3614 :pool_size: How many open connections to make to the database object.
3615 :folder: <please update me>
3616 :db_codec: string encoding of the database (default: 'UTF-8')
3617 :check_reserved: list of adapters to check tablenames and column names
3618 against sql reserved keywords. (Default None)
3619
3620 * 'common' List of sql keywords that are common to all database types
3621 such as "SELECT, INSERT". (recommended)
3622 * 'all' Checks against all known SQL keywords. (not recommended)
3623 <adaptername> Checks against the specific adapters list of keywords
3624 (recommended)
3625 * '<adaptername>_nonreserved' Checks against the specific adapters
3626 list of nonreserved keywords. (if available)
3627 """
3628
3629 if not decode_credentials:
3630 credential_decoder = lambda cred: cred
3631 else:
3632 credential_decoder = lambda cred: urllib.unquote(cred)
3633 if folder:
3634 self.set_folder(folder)
3635 self._uri = uri
3636 self._pool_size = pool_size
3637 self._db_codec = db_codec
3638 self._lastsql = ''
3639 if uri:
3640 uris = isinstance(uri,(list,tuple)) and uri or [uri]
3641 error = ''
3642 connected = False
3643 for k in range(5):
3644 for uri in uris:
3645 try:
3646 if is_jdbc and not uri.startswith('jdbc:'):
3647 uri = 'jdbc:'+uri
3648 self._dbname = regex_dbname.match(uri).group()
3649 if not self._dbname in ADAPTERS:
3650 raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
3651
3652 args = (self,uri,pool_size,folder,db_codec,credential_decoder,driver_args or {})
3653 self._adapter = ADAPTERS[self._dbname](*args)
3654 connected = True
3655 break
3656 except SyntaxError:
3657 raise
3658 except Exception, error:
3659 pass
3660 if connected:
3661 break
3662 else:
3663 time.sleep(1)
3664 if not connected:
3665 raise RuntimeError, "Failure to connect, tried 5 times:\n%s" % error
3666 else:
3667 args = (self,'None',0,folder,db_codec)
3668 self._adapter = BaseAdapter(*args)
3669 migrate = fake_migrate = False
3670 self.tables = SQLCallableList()
3671 self.check_reserved = check_reserved
3672 if self.check_reserved:
3673 from reserved_sql_keywords import ADAPTERS as RSK
3674 self.RSK = RSK
3675 self._migrate = migrate
3676 self._fake_migrate = fake_migrate
3677
3679 """
3680 Validates ``name`` against SQL keywords
3681 Uses self.check_reserve which is a list of
3682 operators to use.
3683 self.check_reserved
3684 ['common', 'postgres', 'mysql']
3685 self.check_reserved
3686 ['all']
3687 """
3688 for backend in self.check_reserved:
3689 if name.upper() in self.RSK[backend]:
3690 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
3691
3693 if self.has_key(tablename):
3694 return True
3695 else:
3696 return False
3697
3698 - def parse_as_rest(self,patterns,args,vars,query=None,nested_select=True):
3699 """
3700 EXAMPLE:
3701
3702 db.define_table('person',Field('name'),Field('info'))
3703 db.define_table('pet',Field('person',db.person),Field('name'),Field('info'))
3704
3705 @request.restful()
3706 def index():
3707 def GET(*args,**vars):
3708 patterns = [
3709 "/persons[person]",
3710 "/{person.name.startswith}",
3711 "/{person.name}/:field",
3712 "/{person.name}/pets[pet.person]",
3713 "/{person.name}/pet[pet.person]/{pet.name}",
3714 "/{person.name}/pet[pet.person]/{pet.name}/:field"
3715 ]
3716 parser = db.parse_as_rest(patterns,args,vars)
3717 if parser.status == 200:
3718 return dict(content=parser.response)
3719 else:
3720 raise HTTP(parser.status,parser.error)
3721 def POST(table_name,**vars):
3722 if table_name == 'person':
3723 return db.person.validate_and_insert(**vars)
3724 elif table_name == 'pet':
3725 return db.pet.validate_and_insert(**vars)
3726 else:
3727 raise HTTP(400)
3728 return locals()
3729 """
3730
3731 db = self
3732 re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
3733 re2 = re.compile('^.+\[.+\]$')
3734
3735 def auto_table(table,base='',depth=0):
3736 patterns = []
3737 for field in db[table].fields:
3738 if base:
3739 tag = '%s/%s' % (base,field.replace('_','-'))
3740 else:
3741 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
3742 f = db[table][field]
3743 if not f.readable: continue
3744 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
3745 tag += '/{%s.%s}' % (table,field)
3746 patterns.append(tag)
3747 patterns.append(tag+'/:field')
3748 elif f.type.startswith('boolean'):
3749 tag += '/{%s.%s}' % (table,field)
3750 patterns.append(tag)
3751 patterns.append(tag+'/:field')
3752 elif f.type.startswith('double') or f.type.startswith('integer'):
3753 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
3754 patterns.append(tag)
3755 patterns.append(tag+'/:field')
3756 elif f.type.startswith('list:'):
3757 tag += '/{%s.%s.contains}' % (table,field)
3758 patterns.append(tag)
3759 patterns.append(tag+'/:field')
3760 elif f.type in ('date','datetime'):
3761 tag+= '/{%s.%s.year}' % (table,field)
3762 patterns.append(tag)
3763 patterns.append(tag+'/:field')
3764 tag+='/{%s.%s.month}' % (table,field)
3765 patterns.append(tag)
3766 patterns.append(tag+'/:field')
3767 tag+='/{%s.%s.day}' % (table,field)
3768 patterns.append(tag)
3769 patterns.append(tag+'/:field')
3770 if f.type in ('datetime','time'):
3771 tag+= '/{%s.%s.hour}' % (table,field)
3772 patterns.append(tag)
3773 patterns.append(tag+'/:field')
3774 tag+='/{%s.%s.minute}' % (table,field)
3775 patterns.append(tag)
3776 patterns.append(tag+'/:field')
3777 tag+='/{%s.%s.second}' % (table,field)
3778 patterns.append(tag)
3779 patterns.append(tag+'/:field')
3780 if depth>0:
3781 for rtable,rfield in db[table]._referenced_by:
3782 tag+='/%s[%s.%s]' % (rtable,rtable,rfield)
3783 patterns.append(tag)
3784 patterns += auto_table(rtable,base=tag,depth=depth-1)
3785 return patterns
3786
3787 if patterns=='auto':
3788 patterns=[]
3789 for table in db.tables:
3790 if not table.startswith('auth_'):
3791 patterns += auto_table(table,base='',depth=1)
3792 else:
3793 i = 0
3794 while i<len(patterns):
3795 pattern = patterns[i]
3796 tokens = pattern.split('/')
3797 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
3798 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],'/'.join(tokens[:-1]))
3799 patterns = patterns[:i]+new_patterns+patterns[i+1:]
3800 i += len(new_patterns)
3801 else:
3802 i += 1
3803 if '/'.join(args) == 'patterns':
3804 return Row({'status':200,'pattern':'list',
3805 'error':None,'response':patterns})
3806 for pattern in patterns:
3807 otable=table=None
3808 dbset=db(query)
3809 i=0
3810 tags = pattern[1:].split('/')
3811
3812 if len(tags)!=len(args):
3813 continue
3814 for tag in tags:
3815
3816 if re1.match(tag):
3817
3818 tokens = tag[1:-1].split('.')
3819 table, field = tokens[0], tokens[1]
3820 if not otable or table == otable:
3821 if len(tokens)==2 or tokens[2]=='eq':
3822 query = db[table][field]==args[i]
3823 elif tokens[2]=='ne':
3824 query = db[table][field]!=args[i]
3825 elif tokens[2]=='lt':
3826 query = db[table][field]<args[i]
3827 elif tokens[2]=='gt':
3828 query = db[table][field]>args[i]
3829 elif tokens[2]=='ge':
3830 query = db[table][field]>=args[i]
3831 elif tokens[2]=='le':
3832 query = db[table][field]<=args[i]
3833 elif tokens[2]=='year':
3834 query = db[table][field].year()==args[i]
3835 elif tokens[2]=='month':
3836 query = db[table][field].month()==args[i]
3837 elif tokens[2]=='day':
3838 query = db[table][field].day()==args[i]
3839 elif tokens[2]=='hour':
3840 query = db[table][field].hour()==args[i]
3841 elif tokens[2]=='minute':
3842 query = db[table][field].minutes()==args[i]
3843 elif tokens[2]=='second':
3844 query = db[table][field].seconds()==args[i]
3845 elif tokens[2]=='startswith':
3846 query = db[table][field].startswith(args[i])
3847 elif tokens[2]=='contains':
3848 query = db[table][field].contains(args[i])
3849 else:
3850 raise RuntimeError, "invalid pattern: %s" % pattern
3851 if len(tokens)==4 and tokens[3]=='not':
3852 query = ~query
3853 elif len(tokens)>=4:
3854 raise RuntimeError, "invalid pattern: %s" % pattern
3855 dbset=dbset(query)
3856 else:
3857 raise RuntimeError, "missing relation in pattern: %s" % pattern
3858 elif otable and re2.match(tag) and args[i]==tag[:tag.find('[')]:
3859
3860 ref = tag[tag.find('[')+1:-1]
3861 if '.' in ref:
3862 table,field = ref.split('.')
3863
3864 if nested_select:
3865 try:
3866 dbset=db(db[table][field].belongs(dbset._select(db[otable]._id)))
3867 except ValueError:
3868 return Row({'status':400,'pattern':pattern,
3869 'error':'invalid path','response':None})
3870 else:
3871 items = [item.id for item in dbset.select(db[otable]._id)]
3872 dbset=db(db[table][field].belongs(items))
3873 else:
3874 dbset=dbset(db[ref])
3875 elif tag==':field' and table:
3876
3877 field = args[i]
3878 if not field in db[table]: break
3879 try:
3880 item = dbset.select(db[table][field],limitby=(0,1)).first()
3881 except ValueError:
3882 return Row({'status':400,'pattern':pattern,
3883 'error':'invalid path','response':None})
3884 if not item:
3885 return Row({'status':404,'pattern':pattern,
3886 'error':'record not found','response':None})
3887 else:
3888 return Row({'status':200,'response':item[field],
3889 'pattern':pattern})
3890 elif tag != args[i]:
3891 break
3892 otable = table
3893 i += 1
3894 if i==len(tags) and table:
3895 otable,ofield = vars.get('order','%s.%s' % (table,field)).split('.',1)
3896 try:
3897 if otable[:1]=='~': orderby = ~db[otable[1:]][ofield]
3898 else: orderby = db[otable][ofield]
3899 except KeyError:
3900 return Row({'status':400,'error':'invalid orderby','response':None})
3901 fields = [field for field in db[table] if field.readable]
3902 count = dbset.count()
3903 try:
3904 limits = (int(vars.get('min',0)),int(vars.get('max',1000)))
3905 if limits[0]<0 or limits[1]<limits[0]: raise ValueError
3906 except ValueError:
3907 Row({'status':400,'error':'invalid limits','response':None})
3908 if count > limits[1]-limits[0]:
3909 Row({'status':400,'error':'too many records','response':None})
3910 try:
3911 response = dbset.select(limitby=limits,orderby=orderby,*fields)
3912 except ValueError:
3913 return Row({'status':400,'pattern':pattern,
3914 'error':'invalid path','response':None})
3915 return Row({'status':200,'response':response,'pattern':pattern})
3916 return Row({'status':400,'error':'no mathcing pattern','response':None})
3917
3918
3919 - def define_table(
3920 self,
3921 tablename,
3922 *fields,
3923 **args
3924 ):
3925
3926 for key in args:
3927 if key not in [
3928 'migrate',
3929 'primarykey',
3930 'fake_migrate',
3931 'format',
3932 'trigger_name',
3933 'sequence_name']:
3934 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key)
3935 migrate = args.get('migrate',self._migrate)
3936 fake_migrate = args.get('fake_migrate', self._fake_migrate)
3937 format = args.get('format',None)
3938 trigger_name = args.get('trigger_name', None)
3939 sequence_name = args.get('sequence_name', None)
3940 primarykey=args.get('primarykey',None)
3941 polymodel=args.get('polymodel',None)
3942 if not isinstance(tablename,str):
3943 raise SyntaxError, "missing table name"
3944 tablename = cleanup(tablename)
3945 lowertablename = tablename.lower()
3946
3947 if tablename.startswith('_') or hasattr(self,lowertablename) or \
3948 regex_python_keywords.match(tablename):
3949 raise SyntaxError, 'invalid table name: %s' % tablename
3950 elif lowertablename in self.tables:
3951 raise SyntaxError, 'table already defined: %s' % tablename
3952 elif self.check_reserved:
3953 self.check_reserved_keyword(tablename)
3954
3955 t = self[tablename] = Table(self, tablename, *fields,
3956 **dict(primarykey=primarykey,
3957 trigger_name=trigger_name,
3958 sequence_name=sequence_name))
3959
3960 if self._uri in (None,'None'):
3961 return t
3962
3963 t._create_references()
3964
3965 if migrate or self._uri.startswith('gae'):
3966 try:
3967 sql_locker.acquire()
3968 self._adapter.create_table(t,migrate=migrate,
3969 fake_migrate=fake_migrate,
3970 polymodel=polymodel)
3971 finally:
3972 sql_locker.release()
3973 else:
3974 t._dbt = None
3975 self.tables.append(tablename)
3976 t._format = format
3977 return t
3978
3980 for tablename in self.tables:
3981 yield self[tablename]
3982
3985
3988
3991
3993 if key[:1]!='_' and key in self:
3994 raise SyntaxError, \
3995 'Object %s exists and cannot be redefined' % key
3996 self[key] = value
3997
3999 return '<DAL ' + dict.__repr__(self) + '>'
4000
4002 if isinstance(query,Table):
4003 query = query._id>0
4004 elif isinstance(query,Field):
4005 query = query!=None
4006 return Set(self, query)
4007
4010
4013
4014 - def executesql(self, query, placeholders=None, as_dict=False):
4015 """
4016 placeholders is optional and will always be None when using DAL
4017 if using raw SQL with placeholders, placeholders may be
4018 a sequence of values to be substituted in
4019 or, *if supported by the DB driver*, a dictionary with keys
4020 matching named placeholders in your SQL.
4021
4022 Added 2009-12-05 "as_dict" optional argument. Will always be
4023 None when using DAL. If using raw SQL can be set to True
4024 and the results cursor returned by the DB driver will be
4025 converted to a sequence of dictionaries keyed with the db
4026 field names. Tested with SQLite but should work with any database
4027 since the cursor.description used to get field names is part of the
4028 Python dbi 2.0 specs. Results returned with as_dict = True are
4029 the same as those returned when applying .to_list() to a DAL query.
4030
4031 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
4032
4033 --bmeredyk
4034 """
4035 if placeholders:
4036 self._adapter.execute(query, placeholders)
4037 else:
4038 self._adapter.execute(query)
4039 if as_dict:
4040 if not hasattr(self._adapter.cursor,'description'):
4041 raise RuntimeError, "database does not support executesql(...,as_dict=True)"
4042
4043
4044
4045 columns = self._adapter.cursor.description
4046
4047 fields = [f[0] for f in columns]
4048
4049 data = self._adapter.cursor.fetchall()
4050
4051
4052 return [dict(zip(fields,row)) for row in data]
4053
4054 try:
4055 return self._adapter.cursor.fetchall()
4056 except:
4057 return None
4058
4060 for tablename in self.tables:
4061 by = self[tablename]._referenced_by
4062 by[:] = [item for item in by if not item[0] == other]
4063
4070
4071 - def import_from_csv_file(self, ifile, id_map={}, null='<NULL>',
4072 unique='uuid', *args, **kwargs):
4073 for line in ifile:
4074 line = line.strip()
4075 if not line:
4076 continue
4077 elif line == 'END':
4078 return
4079 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
4080 raise SyntaxError, 'invalid file format'
4081 else:
4082 tablename = line[6:]
4083 self[tablename].import_from_csv_file(ifile, id_map, null,
4084 unique, *args, **kwargs)
4085
4086
4088 """
4089 Helper class providing a comma-separated string having all the field names
4090 (prefixed by table name and '.')
4091
4092 normally only called from within gluon.sql
4093 """
4094
4097
4099 return ', '.join([str(field) for field in self.table])
4100
4101
4103
4105 if not self._record:
4106 self._record = self._table[int(self)]
4107 if not self._record:
4108 raise RuntimeError, "Using a recursive select but encountered a broken reference"
4109
4111 if key == 'id':
4112 return int(self)
4113 self.__allocate()
4114 return self._record.get(key, None)
4115
4122
4124 if key == 'id':
4125 return int(self)
4126 self.__allocate()
4127 return self._record.get(key, None)
4128
4130 self.__allocate()
4131 self._record[key] = value
4132
4133
4135 return marshal.loads(data)
4136
4138 try:
4139 marshal_dump = marshal.dumps(int(data))
4140 except AttributeError:
4141 marshal_dump = 'i%s' % struct.pack('<i', int(data))
4142 return (Reference_unpickler, (marshal_dump,))
4143
4144 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler)
4145
4146
4148
4149 """
4150 an instance of this class represents a database table
4151
4152 Example::
4153
4154 db = DAL(...)
4155 db.define_table('users', Field('name'))
4156 db.users.insert(name='me') # print db.users._insert(...) to see SQL
4157 db.users.drop()
4158 """
4159
4160 - def __init__(
4161 self,
4162 db,
4163 tablename,
4164 *fields,
4165 **args
4166 ):
4167 """
4168 Initializes the table and performs checking on the provided fields.
4169
4170 Each table will have automatically an 'id'.
4171
4172 If a field is of type Table, the fields (excluding 'id') from that table
4173 will be used instead.
4174
4175 :raises SyntaxError: when a supplied field is of incorrect type.
4176 """
4177 self._tablename = tablename
4178 self._sequence_name = args.get('sequence_name',None) or \
4179 db and db._adapter.sequence_name(tablename)
4180 self._trigger_name = args.get('trigger_name',None) or \
4181 db and db._adapter.trigger_name(tablename)
4182
4183 primarykey = args.get('primarykey', None)
4184 if primarykey and not isinstance(primarykey,list):
4185 raise SyntaxError, "primarykey must be a list of fields from table '%s'" \
4186 % tablename
4187 elif primarykey:
4188 self._primarykey = primarykey
4189 new_fields = []
4190 else:
4191 new_fields = [ Field('id', 'id') ]
4192 for field in fields:
4193 if isinstance(field, Field):
4194 if hasattr(field, '_db'):
4195 field = copy.copy(field)
4196 if field.type == 'id':
4197
4198 new_fields[0] = field
4199 else:
4200 new_fields.append(field)
4201 elif isinstance(field, Table):
4202 new_fields += [copy.copy(field[f]) for f in
4203 field.fields if field[f].type!='id']
4204 else:
4205 raise SyntaxError, \
4206 'define_table argument is not a Field or Table: %s' % field
4207 fields = new_fields
4208 self._db = db
4209 self._id = fields[0]
4210 tablename = tablename
4211 self.fields = SQLCallableList()
4212 self.virtualfields = []
4213 fields = list(fields)
4214
4215 if db and self._db._adapter.uploads_in_blob==True:
4216 for field in fields:
4217 if isinstance(field, Field) and field.type == 'upload'\
4218 and field.uploadfield is True:
4219 tmp = field.uploadfield = '%s_blob' % field.name
4220 fields.append(self._db.Field(tmp, 'blob', default=''))
4221
4222 lower_fieldnames = set()
4223 for field in fields:
4224 if db and db.check_reserved:
4225 db.check_reserved_keyword(field.name)
4226
4227 if field.name.lower() in lower_fieldnames:
4228 raise SyntaxError, "duplicate field %s in table %s" % (field.name, tablename)
4229 else:
4230 lower_fieldnames.add(field.name.lower())
4231
4232 self.fields.append(field.name)
4233 self[field.name] = field
4234 if field.type == 'id':
4235 self['id'] = field
4236 field.tablename = field._tablename = tablename
4237 field.table = field._table = self
4238 field.db = field._db = self._db
4239 field.length = min(field.length,self._db and self._db._adapter.maxcharlength or INFINITY)
4240 if field.requires == DEFAULT:
4241 field.requires = sqlhtml_validators(field)
4242 self.ALL = SQLALL(self)
4243
4244 if hasattr(self,'_primarykey'):
4245 for k in self._primarykey:
4246 if k not in self.fields:
4247 raise SyntaxError, \
4248 "primarykey must be a list of fields from table '%s " % tablename
4249 else:
4250 self[k].notnull = True
4251
4253 errors = Row()
4254 for key,value in vars.items():
4255 value,error = self[key].validate(value)
4256 if error:
4257 errors[key] = error
4258 return errors
4259
4261 self._referenced_by = []
4262 for fieldname in self.fields:
4263 field=self[fieldname]
4264 if isinstance(field.type,str) and field.type[:10] == 'reference ':
4265 ref = field.type[10:].strip()
4266 if not ref.split():
4267 raise SyntaxError, 'Table: reference to nothing: %s' %ref
4268 refs = ref.split('.')
4269 rtablename = refs[0]
4270 if not rtablename in self._db:
4271 raise SyntaxError, "Table: table '%s' does not exist" % rtablename
4272 rtable = self._db[rtablename]
4273 if self._tablename in rtable.fields:
4274 raise SyntaxError, \
4275 'Field: table %s has same name as a field in referenced table %s' \
4276 % (self._tablename, rtablename)
4277 elif len(refs)==2:
4278 rfieldname = refs[1]
4279 if not hasattr(rtable,'_primarykey'):
4280 raise SyntaxError,\
4281 'keyed tables can only reference other keyed tables (for now)'
4282 if rfieldname not in rtable.fields:
4283 raise SyntaxError,\
4284 "invalid field '%s' for referenced table '%s' in table '%s'" \
4285 % (rfieldname, rtablename, self._tablename)
4286 rtable._referenced_by.append((self._tablename, field.name))
4287
4289 return dict([(k, v) for (k, v) in record.items() if k
4290 in self.fields and (self[k].type!='id' or id)])
4291
4293 """ for keyed table only """
4294 query = None
4295 for k,v in key.iteritems():
4296 if k in self._primarykey:
4297 if query:
4298 query = query & (self[k] == v)
4299 else:
4300 query = (self[k] == v)
4301 else:
4302 raise SyntaxError, \
4303 'Field %s is not part of the primary key of %s' % \
4304 (k,self._tablename)
4305 return query
4306
4308 if not key:
4309 return None
4310 elif isinstance(key, dict):
4311 """ for keyed table """
4312 query = self._build_query(key)
4313 rows = self._db(query).select()
4314 if rows:
4315 return rows[0]
4316 return None
4317 elif str(key).isdigit():
4318 return self._db(self.id == key).select(limitby=(0,1)).first()
4319 elif key:
4320 return dict.__getitem__(self, str(key))
4321
4323 if key!=DEFAULT:
4324 if isinstance(key, Query):
4325 record = self._db(key).select(limitby=(0,1)).first()
4326 elif not str(key).isdigit():
4327 record = None
4328 else:
4329 record = self._db(self.id == key).select(limitby=(0,1)).first()
4330 if record:
4331 for k,v in kwargs.items():
4332 if record[k]!=v: return None
4333 return record
4334 elif kwargs:
4335 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.items()])
4336 return self._db(query).select(limitby=(0,1)).first()
4337 else:
4338 return None
4339
4341 if isinstance(key, dict) and isinstance(value, dict):
4342 """ option for keyed table """
4343 if set(key.keys()) == set(self._primarykey):
4344 value = self._filter_fields(value)
4345 kv = {}
4346 kv.update(value)
4347 kv.update(key)
4348 if not self.insert(**kv):
4349 query = self._build_query(key)
4350 self._db(query).update(**self._filter_fields(value))
4351 else:
4352 raise SyntaxError,\
4353 'key must have all fields from primary key: %s'%\
4354 (self._primarykey)
4355 elif str(key).isdigit():
4356 if key == 0:
4357 self.insert(**self._filter_fields(value))
4358 elif not self._db(self.id == key)\
4359 .update(**self._filter_fields(value)):
4360 raise SyntaxError, 'No such record: %s' % key
4361 else:
4362 if isinstance(key, dict):
4363 raise SyntaxError,\
4364 'value must be a dictionary: %s' % value
4365 dict.__setitem__(self, str(key), value)
4366
4368 if isinstance(key, dict):
4369 query = self._build_query(key)
4370 if not self._db(query).delete():
4371 raise SyntaxError, 'No such record: %s' % key
4372 elif not str(key).isdigit() or not self._db(self.id == key).delete():
4373 raise SyntaxError, 'No such record: %s' % key
4374
4377
4379 if key in self:
4380 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key
4381 self[key] = value
4382
4384 for fieldname in self.fields:
4385 yield self[fieldname]
4386
4388 return '<Table ' + dict.__repr__(self) + '>'
4389
4391 if self.get('_ot', None):
4392 return '%s AS %s' % (self._ot, self._tablename)
4393 return self._tablename
4394
4395 - def _drop(self, mode = ''):
4396 return self._db._adapter._drop(self, mode)
4397
4398 - def drop(self, mode = ''):
4399 return self._db._adapter.drop(self,mode)
4400
4401 - def _listify(self,fields,update=False):
4402 new_fields = []
4403 new_fields_names = []
4404 for name in fields:
4405 if not name in self.fields:
4406 raise SyntaxError, 'Field %s does not belong to the table' % name
4407 new_fields.append((self[name],fields[name]))
4408 new_fields_names.append(name)
4409 for ofield in self:
4410 if not ofield.name in new_fields_names:
4411 if not update and ofield.default!=None:
4412 new_fields.append((ofield,ofield.default))
4413 elif update and ofield.update!=None:
4414 new_fields.append((ofield,ofield.update))
4415 for ofield in self:
4416 if not ofield.name in new_fields_names and ofield.compute:
4417 try:
4418 new_fields.append((ofield,ofield.compute(Row(fields))))
4419 except KeyError:
4420 pass
4421 if not update and ofield.required and not ofield.name in new_fields_names:
4422 raise SyntaxError,'Table: missing required field: %s' % ofield.name
4423 return new_fields
4424
4427
4430
4432 response = Row()
4433 response.errors = self._validate(**fields)
4434 if not response.errors:
4435 response.id = self.insert(**fields)
4436 else:
4437 response.id = None
4438 return response
4439
4451
4458
4460 return self._db._adapter._truncate(self, mode)
4461
4463 return self._db._adapter.truncate(self, mode)
4464
4465 - def import_from_csv_file(
4466 self,
4467 csvfile,
4468 id_map=None,
4469 null='<NULL>',
4470 unique='uuid',
4471 *args, **kwargs
4472 ):
4473 """
4474 import records from csv file. Column headers must have same names as
4475 table fields. field 'id' is ignored. If column names read 'table.file'
4476 the 'table.' prefix is ignored.
4477 'unique' argument is a field which must be unique
4478 (typically a uuid field)
4479 """
4480
4481 delimiter = kwargs.get('delimiter', ',')
4482 quotechar = kwargs.get('quotechar', '"')
4483 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
4484
4485 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting)
4486 colnames = None
4487 if isinstance(id_map, dict):
4488 if not self._tablename in id_map:
4489 id_map[self._tablename] = {}
4490 id_map_self = id_map[self._tablename]
4491
4492 def fix(field, value, id_map):
4493 if value == null:
4494 value = None
4495 elif field.type.startswith('list:string'):
4496 value = bar_decode_string(value)
4497 elif field.type.startswith('list:'):
4498 value = bar_decode_integer(value)
4499 elif id_map and field.type.startswith('reference'):
4500 try:
4501 value = id_map[field.type[9:].strip()][value]
4502 except KeyError:
4503 pass
4504 return (field.name, value)
4505
4506 def is_id(colname):
4507 if colname in self:
4508 return self[colname].type == 'id'
4509 else:
4510 return False
4511
4512 for line in reader:
4513 if not line:
4514 break
4515 if not colnames:
4516 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
4517 cols, cid = [], []
4518 for i,colname in enumerate(colnames):
4519 if is_id(colname):
4520 cid = i
4521 else:
4522 cols.append(i)
4523 if colname == unique:
4524 unique_idx = i
4525 else:
4526 items = [fix(self[colnames[i]], line[i], id_map) for i in cols]
4527
4528
4529 if not unique or unique not in colnames:
4530 new_id = self.insert(**dict(items))
4531 else:
4532 unique_value = line[unique_idx]
4533 query = self._db[self][unique] == unique_value
4534 record = self._db(query).select().first()
4535 if record:
4536 record.update_record(**dict(items))
4537 new_id = record[self._id.name]
4538 else:
4539 new_id = self.insert(**dict(items))
4540 if id_map and cid != []:
4541 id_map_self[line[cid]] = new_id
4542
4545
4546 - def on(self, query):
4547 return Expression(self._db,self._db._adapter.ON,self,query)
4548
4549
4550
4552
4553 - def __init__(
4554 self,
4555 db,
4556 op,
4557 first=None,
4558 second=None,
4559 type=None,
4560 ):
4561
4562 self.db = db
4563 self.op = op
4564 self.first = first
4565 self.second = second
4566
4567 if not type and first and hasattr(first,'type'):
4568 self.type = first.type
4569 else:
4570 self.type = type
4571
4574
4577
4580
4583
4585 return Expression(self.db, self.db._adapter.LOWER, self, None, self.type)
4586
4588 return Expression(self.db, self.db._adapter.UPPER, self, None, self.type)
4589
4592
4595
4598
4601
4604
4607
4609 if start < 0:
4610 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
4611 else:
4612 pos0 = start + 1
4613
4614 if stop < 0:
4615 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
4616 elif stop == sys.maxint:
4617 length = self.len()
4618 else:
4619 length = '(%s - %s)' % (stop + 1, pos0)
4620 return Expression(self.db,self.db._adapter.SUBSTRING,
4621 self, (pos0, length), self.type)
4622
4624 return self[i:i + 1]
4625
4627 return self.db._adapter.expand(self,self.type)
4628
4630 return Expression(self.db,self.db._adapter.COMMA,self,other,self.type)
4631
4633 if hasattr(self,'_op') and self.op == self.db._adapter.INVERT:
4634 return self.first
4635 return Expression(self.db,self.db._adapter.INVERT,self,type=self.type)
4636
4638 return Expression(self.db,self.db._adapter.ADD,self,other,self.type)
4639
4641 if self.type == 'integer':
4642 result_type = 'integer'
4643 elif self.type in ['date','time','datetime','double']:
4644 result_type = 'double'
4645 else:
4646 raise SyntaxError, "subtraction operation not supported for type"
4647 return Expression(self.db,self.db._adapter.SUB,self,other,
4648 result_type)
4650 return Expression(self.db,self.db._adapter.MUL,self,other,self.type)
4651
4653 return Expression(self.db,self.db._adapter.DIV,self,other,self.type)
4654
4656 return Expression(self.db,self.db._adapter.MOD,self,other,self.type)
4657
4659 return Query(self.db, self.db._adapter.EQ, self, value)
4660
4662 return Query(self.db, self.db._adapter.NE, self, value)
4663
4665 return Query(self.db, self.db._adapter.LT, self, value)
4666
4668 return Query(self.db, self.db._adapter.LE, self, value)
4669
4671 return Query(self.db, self.db._adapter.GT, self, value)
4672
4674 return Query(self.db, self.db._adapter.GE, self, value)
4675
4676 - def like(self, value):
4677 return Query(self.db, self.db._adapter.LIKE, self, value)
4678
4680 return Query(self.db, self.db._adapter.BELONGS, self, value)
4681
4683 if not self.type in ('string', 'text'):
4684 raise SyntaxError, "startswith used with incompatible field type"
4685 return Query(self.db, self.db._adapter.STARTSWITH, self, value)
4686
4688 if not self.type in ('string', 'text'):
4689 raise SyntaxError, "endswith used with incompatible field type"
4690 return Query(self.db, self.db._adapter.ENDSWITH, self, value)
4691
4693 if not self.type in ('string', 'text') and not self.type.startswith('list:'):
4694 raise SyntaxError, "contains used with incompatible field type"
4695 return Query(self.db, self.db._adapter.CONTAINS, self, value)
4696
4699
4700
4701
4702
4704 """
4705 allows defining of custom SQL types
4706
4707 Example::
4708
4709 decimal = SQLCustomType(
4710 type ='double',
4711 native ='integer',
4712 encoder =(lambda x: int(float(x) * 100)),
4713 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
4714 )
4715
4716 db.define_table(
4717 'example',
4718 Field('value', type=decimal)
4719 )
4720
4721 :param type: the web2py type (default = 'string')
4722 :param native: the backend type
4723 :param encoder: how to encode the value to store it in the backend
4724 :param decoder: how to decode the value retrieved from the backend
4725 :param validator: what validators to use ( default = None, will use the
4726 default validator for type)
4727 """
4728
4729 - def __init__(
4730 self,
4731 type='string',
4732 native=None,
4733 encoder=None,
4734 decoder=None,
4735 validator=None,
4736 _class=None,
4737 ):
4738
4739 self.type = type
4740 self.native = native
4741 self.encoder = encoder or (lambda x: x)
4742 self.decoder = decoder or (lambda x: x)
4743 self.validator = validator
4744 self._class = _class or type
4745
4748
4751
4754
4757
4758
4759 -class Field(Expression):
4760
4761 """
4762 an instance of this class represents a database field
4763
4764 example::
4765
4766 a = Field(name, 'string', length=32, default=None, required=False,
4767 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
4768 notnull=False, unique=False,
4769 uploadfield=True, widget=None, label=None, comment=None,
4770 uploadfield=True, # True means store on disk,
4771 # 'a_field_name' means store in this field in db
4772 # False means file content will be discarded.
4773 writable=True, readable=True, update=None, authorize=None,
4774 autodelete=False, represent=None, uploadfolder=None,
4775 uploadseparate=False # upload to separate directories by uuid_keys
4776 # first 2 character and tablename.fieldname
4777 # False - old behavior
4778 # True - put uploaded file in
4779 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
4780 # directory)
4781
4782 to be used as argument of DAL.define_table
4783
4784 allowed field types:
4785 string, boolean, integer, double, text, blob,
4786 date, time, datetime, upload, password
4787
4788 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
4789 fields should have a default or they will be required in SQLFORMs
4790 the requires argument is used to validate the field input in SQLFORMs
4791
4792 """
4793
4794 - def __init__(
4795 self,
4796 fieldname,
4797 type='string',
4798 length=None,
4799 default=DEFAULT,
4800 required=False,
4801 requires=DEFAULT,
4802 ondelete='CASCADE',
4803 notnull=False,
4804 unique=False,
4805 uploadfield=True,
4806 widget=None,
4807 label=None,
4808 comment=None,
4809 writable=True,
4810 readable=True,
4811 update=None,
4812 authorize=None,
4813 autodelete=False,
4814 represent=None,
4815 uploadfolder=None,
4816 uploadseparate=False,
4817 compute=None,
4818 custom_store=None,
4819 custom_retrieve=None,
4820 ):
4821 self.db = None
4822 self.op = None
4823 self.first = None
4824 self.second = None
4825 if not isinstance(fieldname,str):
4826 raise SyntaxError, "missing field name"
4827 if fieldname.startswith(':'):
4828 fieldname,readable,writable=fieldname[1:],False,False
4829 elif fieldname.startswith('.'):
4830 fieldname,readable,writable=fieldname[1:],False,False
4831 if '=' in fieldname:
4832 fieldname,default = fieldname.split('=',1)
4833 self.name = fieldname = cleanup(fieldname)
4834 if hasattr(Table,fieldname) or fieldname[0] == '_' or \
4835 regex_python_keywords.match(fieldname):
4836 raise SyntaxError, 'Field: invalid field name: %s' % fieldname
4837 if isinstance(type, Table):
4838 type = 'reference ' + type._tablename
4839 self.type = type
4840 self.length = (length is None) and MAXCHARLENGTH or length
4841 if default==DEFAULT:
4842 self.default = update or None
4843 else:
4844 self.default = default
4845 self.required = required
4846 self.ondelete = ondelete.upper()
4847 self.notnull = notnull
4848 self.unique = unique
4849 self.uploadfield = uploadfield
4850 self.uploadfolder = uploadfolder
4851 self.uploadseparate = uploadseparate
4852 self.widget = widget
4853 self.label = label or ' '.join(item.capitalize() for item in fieldname.split('_'))
4854 self.comment = comment
4855 self.writable = writable
4856 self.readable = readable
4857 self.update = update
4858 self.authorize = authorize
4859 self.autodelete = autodelete
4860 if not represent and type in ('list:integer','list:string'):
4861 represent=lambda x: ', '.join(str(y) for y in x or [])
4862 self.represent = represent
4863 self.compute = compute
4864 self.isattachment = True
4865 self.custom_store = custom_store
4866 self.custom_retrieve = custom_retrieve
4867 if self.label is None:
4868 self.label = ' '.join([x.capitalize() for x in
4869 fieldname.split('_')])
4870 if requires is None:
4871 self.requires = []
4872 else:
4873 self.requires = requires
4874
4875 - def store(self, file, filename=None, path=None):
4876 if self.custom_store:
4877 return self.custom_store(file,filename,path)
4878 if not filename:
4879 filename = file.name
4880 filename = os.path.basename(filename.replace('/', os.sep)\
4881 .replace('\\', os.sep))
4882 m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
4883 extension = m and m.group('e') or 'txt'
4884 uuid_key = web2py_uuid().replace('-', '')[-16:]
4885 encoded_filename = base64.b16encode(filename).lower()
4886 newfilename = '%s.%s.%s.%s' % \
4887 (self._tablename, self.name, uuid_key, encoded_filename)
4888 newfilename = newfilename[:200] + '.' + extension
4889 if isinstance(self.uploadfield,Field):
4890 blob_uploadfield_name = self.uploadfield.uploadfield
4891 keys={self.uploadfield.name: newfilename,
4892 blob_uploadfield_name: file.read()}
4893 self.uploadfield.table.insert(**keys)
4894 elif self.uploadfield == True:
4895 if path:
4896 pass
4897 elif self.uploadfolder:
4898 path = self.uploadfolder
4899 elif self.db._adapter.folder:
4900 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
4901 else:
4902 raise RuntimeError, "you must specify a Field(...,uploadfolder=...)"
4903 if self.uploadseparate:
4904 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2])
4905 if not os.path.exists(path):
4906 os.makedirs(path)
4907 pathfilename = os.path.join(path, newfilename)
4908 dest_file = open(pathfilename, 'wb')
4909 shutil.copyfileobj(file, dest_file)
4910 dest_file.close()
4911 return newfilename
4912
4914 if self.custom_retrieve:
4915 return self.custom_retrieve(name, path)
4916 import http
4917 if self.authorize or isinstance(self.uploadfield, str):
4918 row = self.db(self == name).select().first()
4919 if not row:
4920 raise http.HTTP(404)
4921 if self.authorize and not self.authorize(row):
4922 raise http.HTTP(403)
4923 try:
4924 m = regex_content.match(name)
4925 if not m or not self.isattachment:
4926 raise TypeError, 'Can\'t retrieve %s' % name
4927 filename = base64.b16decode(m.group('name'), True)
4928 filename = regex_cleanup_fn.sub('_', filename)
4929 except (TypeError, AttributeError):
4930 filename = name
4931 if isinstance(self.uploadfield, str):
4932 return (filename, cStringIO.StringIO(row[self.uploadfield] or ''))
4933 elif isinstance(self.uploadfield,Field):
4934 blob_uploadfield_name = self.uploadfield.uploadfield
4935 query = self.uploadfield == name
4936 data = self.uploadfield.table(query)[blob_uploadfield_name]
4937 return (filename, cStringIO.StringIO(data))
4938 else:
4939
4940 if path:
4941 pass
4942 elif self.uploadfolder:
4943 path = self.uploadfolder
4944 else:
4945 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
4946 if self.uploadseparate:
4947 t = m.group('table')
4948 f = m.group('field')
4949 u = m.group('uuidkey')
4950 path = os.path.join(path,"%s.%s" % (t,f),u[:2])
4951 return (filename, open(os.path.join(path, name), 'rb'))
4952
4967
4979
4982
4985
4987 try:
4988 return '%s.%s' % (self.tablename, self.name)
4989 except:
4990 return '<no table>.%s' % self.name
4991
4992
4994
4995 """
4996 a query object necessary to define a set.
4997 it can be stored or can be passed to DAL.__call__() to obtain a Set
4998
4999 Example::
5000
5001 query = db.users.name=='Max'
5002 set = db(query)
5003 records = set.select()
5004
5005 """
5006
5007 - def __init__(
5008 self,
5009 db,
5010 op,
5011 first=None,
5012 second=None,
5013 ):
5014 self.db = db
5015 self.op = op
5016 self.first = first
5017 self.second = second
5018
5020 return self.db._adapter.expand(self)
5021
5023 return Query(self.db,self.db._adapter.AND,self,other)
5024
5026 return Query(self.db,self.db._adapter.OR,self,other)
5027
5029 if self.op==self.db._adapter.NOT:
5030 return self.first
5031 return Query(self.db,self.db._adapter.NOT,self)
5032
5033
5034 regex_quotes = re.compile("'[^']*'")
5035
5036
5038 if not orderby:
5039 return None
5040 orderby2 = orderby[0]
5041 for item in orderby[1:]:
5042 orderby2 = orderby2 | item
5043 return orderby2
5044
5045
5047
5048 """
5049 a Set represents a set of records in the database,
5050 the records are identified by the query=Query(...) object.
5051 normally the Set is generated by DAL.__call__(Query(...))
5052
5053 given a set, for example
5054 set = db(db.users.name=='Max')
5055 you can:
5056 set.update(db.users.name='Massimo')
5057 set.delete() # all elements in the set
5058 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
5059 and take subsets:
5060 subset = set(db.users.id<5)
5061 """
5062
5064 self.db = db
5065 self._db = db
5066 self.query = query
5067
5069 if isinstance(query,Table):
5070 query = query._id>0
5071 elif isinstance(query,Field):
5072 query = query!=None
5073 if self.query:
5074 return Set(self.db, self.query & query)
5075 else:
5076 return Set(self.db, query)
5077
5078 - def _count(self,distinct=None):
5079 return self.db._adapter._count(self.query,distinct)
5080
5081 - def _select(self, *fields, **attributes):
5082 return self.db._adapter._select(self.query,fields,attributes)
5083
5085 tablename=self.db._adapter.get_table(self.query)
5086 return self.db._adapter._delete(tablename,self.query)
5087
5088 - def _update(self, **update_fields):
5089 tablename = self.db._adapter.get_table(self.query)
5090 fields = self.db[tablename]._listify(update_fields,update=True)
5091 return self.db._adapter._update(tablename,self.query,fields)
5092
5093 - def count(self,distinct=None):
5094 return self.db._adapter.count(self.query,distinct)
5095
5096 - def select(self, *fields, **attributes):
5097 return self.db._adapter.select(self.query,fields,attributes)
5098
5103
5104 - def update(self, **update_fields):
5105 tablename = self.db._adapter.get_table(self.query)
5106 fields = self.db[tablename]._listify(update_fields,update=True)
5107 if not fields:
5108 raise SyntaxError, "No fields to update"
5109 self.delete_uploaded_files(update_fields)
5110 return self.db._adapter.update(tablename,self.query,fields)
5111
5113 table = self.db[self.db._adapter.tables(self.query)[0]]
5114
5115 if upload_fields:
5116 fields = upload_fields.keys()
5117 else:
5118 fields = table.fields
5119 fields = [f for f in fields if table[f].type == 'upload'
5120 and table[f].uploadfield == True
5121 and table[f].autodelete]
5122 if not fields:
5123 return
5124 for record in self.select(*[table[f] for f in fields]):
5125 for fieldname in fields:
5126 field = table[fieldname]
5127 oldname = record.get(fieldname, None)
5128 if not oldname:
5129 continue
5130 if upload_fields and oldname == upload_fields[fieldname]:
5131 continue
5132 uploadfolder = field.uploadfolder
5133 if not uploadfolder:
5134 uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
5135 if field.uploadseparate:
5136 items = oldname.split('.')
5137 uploadfolder = os.path.join(uploadfolder,
5138 "%s.%s" % (items[0], items[1]),
5139 items[2][:2])
5140 oldpath = os.path.join(uploadfolder, oldname)
5141 if os.path.exists(oldpath):
5142 os.unlink(oldpath)
5143
5144
5146 (colset, table, id) = pack
5147 b = a or dict(colset)
5148 c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
5149 table._db(table._id==id).update(**c)
5150 for (k, v) in c.items():
5151 colset[k] = v
5152
5153
5154 -class Rows(object):
5155
5156 """
5157 A wrapper for the return value of a select. It basically represents a table.
5158 It has an iterator and each row is represented as a dictionary.
5159 """
5160
5161
5162
5163 - def __init__(
5164 self,
5165 db=None,
5166 records=[],
5167 colnames=[],
5168 compact=True,
5169 rawrows=None
5170 ):
5171 self.db = db
5172 self.records = records
5173 self.colnames = colnames
5174 self.compact = compact
5175 self.response = rawrows
5176
5178 if not keyed_virtualfields:
5179 return self
5180 for row in self.records:
5181 for (tablename,virtualfields) in keyed_virtualfields.items():
5182 attributes = dir(virtualfields)
5183 virtualfields.__dict__.update(row)
5184 if not tablename in row:
5185 box = row[tablename] = Row()
5186 else:
5187 box = row[tablename]
5188 for attribute in attributes:
5189 if attribute[0] != '_':
5190 method = getattr(virtualfields,attribute)
5191 if hasattr(method,'im_func') and method.im_func.func_code.co_argcount:
5192 box[attribute]=method()
5193 return self
5194
5196 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
5197 records = self.records+other.records
5198 return Rows(self.db,records,self.colnames)
5199
5201 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects'
5202 records = self.records
5203 records += [record for record in other.records \
5204 if not record in records]
5205 return Rows(self.db,records,self.colnames)
5206
5208 if len(self.records):
5209 return 1
5210 return 0
5211
5213 return len(self.records)
5214
5216 return Rows(self.db,self.records[a:b],self.colnames)
5217
5219 row = self.records[i]
5220 keys = row.keys()
5221 if self.compact and len(keys) == 1 and keys[0] != '_extra':
5222 return row[row.keys()[0]]
5223 return row
5224
5226 """
5227 iterator over records
5228 """
5229
5230 for i in xrange(len(self)):
5231 yield self[i]
5232
5234 """
5235 serializes the table into a csv file
5236 """
5237
5238 s = cStringIO.StringIO()
5239 self.export_to_csv_file(s)
5240 return s.getvalue()
5241
5243 if not self.records:
5244 return None
5245 return self[0]
5246
5248 if not self.records:
5249 return None
5250 return self[-1]
5251
5253 """
5254 returns a new Rows object, a subset of the original object,
5255 filtered by the function f
5256 """
5257 if not self.records:
5258 return Rows(self.db, [], self.colnames)
5259 records = []
5260 for i in range(0,len(self)):
5261 row = self[i]
5262 if f(row):
5263 records.append(self.records[i])
5264 return Rows(self.db, records, self.colnames)
5265
5267 """
5268 removes elements from the calling Rows object, filtered by the function f,
5269 and returns a new Rows object containing the removed elements
5270 """
5271 if not self.records:
5272 return Rows(self.db, [], self.colnames)
5273 removed = []
5274 i=0
5275 while i<len(self):
5276 row = self[i]
5277 if f(row):
5278 removed.append(self.records[i])
5279 del self.records[i]
5280 else:
5281 i += 1
5282 return Rows(self.db, removed, self.colnames)
5283
5284 - def sort(self, f, reverse=False):
5285 """
5286 returns a list of sorted elements (not sorted in place)
5287 """
5288 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
5289
5290 - def as_list(self,
5291 compact=True,
5292 storage_to_dict=True,
5293 datetime_to_str=True):
5294 """
5295 returns the data as a list or dictionary.
5296 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5297 :param datetime_to_str: convert datetime fields as strings (default True)
5298 """
5299 (oc, self.compact) = (self.compact, compact)
5300 if storage_to_dict:
5301 items = [item.as_dict(datetime_to_str) for item in self]
5302 else:
5303 items = [item for item in self]
5304 self.compact = compact
5305 return items
5306
5307
5308 - def as_dict(self,
5309 key='id',
5310 compact=True,
5311 storage_to_dict=True,
5312 datetime_to_str=True):
5313 """
5314 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
5315
5316 :param key: the name of the field to be used as dict key, normally the id
5317 :param compact: ? (default True)
5318 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5319 :param datetime_to_str: convert datetime fields as strings (default True)
5320 """
5321 rows = self.as_list(compact, storage_to_dict, datetime_to_str)
5322 if isinstance(key,str) and key.count('.')==1:
5323 (table, field) = key.split('.')
5324 return dict([(r[table][field],r) for r in rows])
5325 elif isinstance(key,str):
5326 return dict([(r[key],r) for r in rows])
5327 else:
5328 return dict([(key(r),r) for r in rows])
5329
5331 """
5332 export data to csv, the first line contains the column names
5333
5334 :param ofile: where the csv must be exported to
5335 :param null: how null values must be represented (default '<NULL>')
5336 :param delimiter: delimiter to separate values (default ',')
5337 :param quotechar: character to use to quote string values (default '"')
5338 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
5339 :param represent: use the fields .represent value (default False)
5340 :param colnames: list of column names to use (default self.colnames)
5341 This will only work when exporting rows objects!!!!
5342 DO NOT use this with db.export_to_csv()
5343 """
5344 delimiter = kwargs.get('delimiter', ',')
5345 quotechar = kwargs.get('quotechar', '"')
5346 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
5347 represent = kwargs.get('represent', False)
5348 writer = csv.writer(ofile, delimiter=delimiter,
5349 quotechar=quotechar, quoting=quoting)
5350 colnames = kwargs.get('colnames', self.colnames)
5351
5352 writer.writerow(colnames)
5353
5354 def none_exception(value):
5355 """
5356 returns a cleaned up value that can be used for csv export:
5357 - unicode text is encoded as such
5358 - None values are replaced with the given representation (default <NULL>)
5359 """
5360 if value is None:
5361 return null
5362 elif isinstance(value, unicode):
5363 return value.encode('utf8')
5364 elif isinstance(value,Reference):
5365 return int(value)
5366 elif hasattr(value, 'isoformat'):
5367 return value.isoformat()[:19].replace('T', ' ')
5368 elif isinstance(value, (list,tuple)):
5369 return bar_encode(value)
5370 return value
5371
5372 for record in self:
5373 row = []
5374 for col in colnames:
5375 if not table_field.match(col):
5376 row.append(record._extra[col])
5377 else:
5378 (t, f) = col.split('.')
5379 field = self.db[t][f]
5380 if isinstance(record.get(t, None), (Row,dict)):
5381 value = record[t][f]
5382 else:
5383 value = record[f]
5384 if represent and field.represent:
5385 value = field.represent(value)
5386 row.append(none_exception(value))
5387 writer.writerow(row)
5388
5390 """
5391 serializes the table using sqlhtml.SQLTABLE (if present)
5392 """
5393
5394 import sqlhtml
5395 return sqlhtml.SQLTABLE(self).xml()
5396
5397 - def json(self, mode='object', default=None):
5398 """
5399 serializes the table to a JSON list of objects
5400 """
5401 mode = mode.lower()
5402 if not mode in ['object', 'array']:
5403 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode
5404
5405 def inner_loop(record, col):
5406 (t, f) = col.split('.')
5407 res = None
5408 if not table_field.match(col):
5409 res = record._extra[col]
5410 else:
5411 if isinstance(record.get(t, None), Row):
5412 res = record[t][f]
5413 else:
5414 res = record[f]
5415 if mode == 'object':
5416 return (f, res)
5417 else:
5418 return res
5419
5420 if mode == 'object':
5421 items = [dict([inner_loop(record, col) for col in
5422 self.colnames]) for record in self]
5423 else:
5424 items = [[inner_loop(record, col) for col in self.colnames]
5425 for record in self]
5426 if have_serializers:
5427 return serializers.json(items,default=default or serializers.custom_json)
5428 else:
5429 import simplejson
5430 return simplejson.dumps(items)
5431
5433 return cPickle.loads(data)
5434
5436 return Rows_unpickler, \
5437 (cPickle.dumps(data.as_list(storage_to_dict=True,
5438 datetime_to_str=False)),)
5439
5440 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler)
5441
5442
5443
5444
5445
5446
5448 """
5449
5450 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
5451 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
5452 >>> tmp = db.define_table('users',\
5453 Field('stringf', 'string', length=32, required=True),\
5454 Field('booleanf', 'boolean', default=False),\
5455 Field('passwordf', 'password', notnull=True),\
5456 Field('uploadf', 'upload'),\
5457 Field('blobf', 'blob'),\
5458 Field('integerf', 'integer', unique=True),\
5459 Field('doublef', 'double', unique=True,notnull=True),\
5460 Field('datef', 'date', default=datetime.date.today()),\
5461 Field('timef', 'time'),\
5462 Field('datetimef', 'datetime'),\
5463 migrate='test_user.table')
5464
5465 Insert a field
5466
5467 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
5468 uploadf=None, integerf=5, doublef=3.14,\
5469 datef=datetime.date(2001, 1, 1),\
5470 timef=datetime.time(12, 30, 15),\
5471 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
5472 1
5473
5474 Drop the table
5475
5476 >>> db.users.drop()
5477
5478 Examples of insert, select, update, delete
5479
5480 >>> tmp = db.define_table('person',\
5481 Field('name'),\
5482 Field('birth','date'),\
5483 migrate='test_person.table')
5484 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
5485 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
5486
5487 commented len(db().select(db.person.ALL))
5488 commented 2
5489
5490 >>> me = db(db.person.id==person_id).select()[0] # test select
5491 >>> me.name
5492 'Massimo'
5493 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
5494 1
5495 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
5496 1
5497
5498 Update a single record
5499
5500 >>> me.update_record(name=\"Max\")
5501 >>> me.name
5502 'Max'
5503
5504 Examples of complex search conditions
5505
5506 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
5507 1
5508 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
5509 1
5510 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
5511 1
5512 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
5513 >>> me.name
5514 'Max'
5515
5516 Examples of search conditions using extract from date/datetime/time
5517
5518 >>> len(db(db.person.birth.month()==12).select())
5519 1
5520 >>> len(db(db.person.birth.year()>1900).select())
5521 1
5522
5523 Example of usage of NULL
5524
5525 >>> len(db(db.person.birth==None).select()) ### test NULL
5526 0
5527 >>> len(db(db.person.birth!=None).select()) ### test NULL
5528 1
5529
5530 Examples of search conditions using lower, upper, and like
5531
5532 >>> len(db(db.person.name.upper()=='MAX').select())
5533 1
5534 >>> len(db(db.person.name.like('%ax')).select())
5535 1
5536 >>> len(db(db.person.name.upper().like('%AX')).select())
5537 1
5538 >>> len(db(~db.person.name.upper().like('%AX')).select())
5539 0
5540
5541 orderby, groupby and limitby
5542
5543 >>> people = db().select(db.person.name, orderby=db.person.name)
5544 >>> order = db.person.name|~db.person.birth
5545 >>> people = db().select(db.person.name, orderby=order)
5546
5547 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
5548
5549 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
5550
5551 Example of one 2 many relation
5552
5553 >>> tmp = db.define_table('dog',\
5554 Field('name'),\
5555 Field('birth','date'),\
5556 Field('owner',db.person),\
5557 migrate='test_dog.table')
5558 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
5559 1
5560
5561 A simple JOIN
5562
5563 >>> len(db(db.dog.owner==db.person.id).select())
5564 1
5565
5566 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
5567 1
5568
5569 Drop tables
5570
5571 >>> db.dog.drop()
5572 >>> db.person.drop()
5573
5574 Example of many 2 many relation and Set
5575
5576 >>> tmp = db.define_table('author', Field('name'),\
5577 migrate='test_author.table')
5578 >>> tmp = db.define_table('paper', Field('title'),\
5579 migrate='test_paper.table')
5580 >>> tmp = db.define_table('authorship',\
5581 Field('author_id', db.author),\
5582 Field('paper_id', db.paper),\
5583 migrate='test_authorship.table')
5584 >>> aid = db.author.insert(name='Massimo')
5585 >>> pid = db.paper.insert(title='QCD')
5586 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
5587
5588 Define a Set
5589
5590 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
5591 >>> rows = authored_papers.select(db.author.name, db.paper.title)
5592 >>> for row in rows: print row.author.name, row.paper.title
5593 Massimo QCD
5594
5595 Example of search condition using belongs
5596
5597 >>> set = (1, 2, 3)
5598 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
5599 >>> print rows[0].title
5600 QCD
5601
5602 Example of search condition using nested select
5603
5604 >>> nested_select = db()._select(db.authorship.paper_id)
5605 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
5606 >>> print rows[0].title
5607 QCD
5608
5609 Example of expressions
5610
5611 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
5612 >>> db(mynumber.id>0).delete()
5613 0
5614 >>> for i in range(10): tmp = mynumber.insert(x=i)
5615 >>> db(mynumber.id>0).select(mynumber.x.sum())[0](mynumber.x.sum())
5616 45
5617
5618 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
5619 5
5620
5621 Output in csv
5622
5623 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
5624 author.name,paper.title\r
5625 Massimo,QCD
5626
5627 Delete all leftover tables
5628
5629 >>> DAL.distributed_transaction_commit(db)
5630
5631 >>> db.mynumber.drop()
5632 >>> db.authorship.drop()
5633 >>> db.author.drop()
5634 >>> db.paper.drop()
5635 """
5636
5637
5638
5639
5640 SQLField = Field
5641 SQLTable = Table
5642 SQLXorable = Expression
5643 SQLQuery = Query
5644 SQLSet = Set
5645 SQLRows = Rows
5646 SQLStorage = Row
5647 SQLDB = DAL
5648 GQLDB = DAL
5649 DAL.Field = Field
5650 DAL.Table = Table
5651
5652
5653
5654
5655
5656 if __name__ == '__main__':
5657 import doctest
5658 doctest.testmod()
5659