grepros 1.2.2
grep for ROS bag files and live topics
Loading...
Searching...
No Matches
sqlbase.py
Go to the documentation of this file.
1# -*- coding: utf-8 -*-
2"""
3Base class for producing SQL for topics and messages.
4
5------------------------------------------------------------------------------
6This file is part of grepros - grep for ROS bag files and live topics.
7Released under the BSD License.
8
9@author Erki Suurjaak
10@created 03.01.2022
11@modified 21.04.2024
12------------------------------------------------------------------------------
13"""
14## @namespace grepros.plugins.auto.sqlbase
15import json
16import re
17
18import yaml
19
20from ... import api
21from ... common import ConsolePrinter, ellipsize, import_item, merge_dicts
22
23
24
25class SqlMixin(object):
26 """
27 Base class for producing SQL for topics and messages.
28
29 Can load additional SQL dialects or additional options for existing dialects
30 from a YAML/JSON file.
31 """
32
33
34 DEFAULT_DIALECT = "sqlite"
35
36
37 DEFAULT_ARGS = dict(WRITE_OPTIONS={})
38
39
40 def __init__(self, args=None, **kwargs):
41 """
42 @param args arguments as namespace or dictionary, case-insensitive
43 @param args.write_options ```
44 {"dialect": SQL dialect if not default,
45 "nesting": true|false to created nested type tables}
46 ```
47 @param kwargs any and all arguments as keyword overrides, case-insensitive
48 """
49 self._topics = {} # {(topic, typename, typehash): {name, table_name, view_name, sql, ..}}
50 self._types = {} # {(typename, typehash): {type, table_name, sql, ..}}
51 self._schema = {} # {(typename, typehash): {cols}}
52 self._sql_cache = {} # {table: "INSERT INTO table VALUES (%s, ..)"}
53 self._dialect = None
54 self._nesting = None
55
56
57 def validate(self):
58 """
59 Returns whether arguments are valid.
60
61 Verifies that "dialect-file" is valid and "dialect" contains supported value, if any.
62 """
63 write_options = getattr(self.args, "WRITE_OPTIONS", {})
64 self._dialect = write_options.get("dialect", (self._dialect or self.DEFAULT_DIALECT))
65 self._nesting = write_options.get("nesting")
66 return all([self._validate_dialect_file(), self._validate_dialect()])
67
68
69 def _validate_dialect_file(self):
70 """Returns whether "dialect-file" is valid in args.WRITE_OPTIONS."""
71 ok = True
72 write_options = getattr(self.args, "WRITE_OPTIONS", {})
73 if write_options.get("dialect-file"):
74 filename = write_options["dialect-file"]
75 try:
76 with open(filename, encoding="utf-8") as f:
77 dialects = yaml.safe_load(f.read())
78 if any(not isinstance(v, dict) for v in dialects.values()):
79 raise Exception("Each dialect must be a dictionary.")
80 for opts in dialects.values():
81 for k, v in list(opts.get("adapters", {}).items()):
82 try: opts["adapters"][k] = import_item(v)
83 except ImportError:
84 ok = False
85 ConsolePrinter.error("Error loading adapter %r for %r "
86 "in SQL dialect file %r.", v, k, filename)
87 merge_dicts(self.DIALECTSDIALECTS, dialects)
88 except Exception as e:
89 ok = False
90 ConsolePrinter.error("Error reading SQL dialect file %r: %s", filename, e)
91
92 # Populate ROS type aliases like "byte" and "char"
93 for opts in self.DIALECTSDIALECTS.values() if ok else ():
94 for rostype in list(opts.get("types", {})):
95 alias = api.get_type_alias(rostype)
96 if alias:
97 opts["types"][alias] = opts["types"][rostype]
98 if alias and rostype + "[]" in opts["types"]:
99 opts["types"][alias + "[]"] = opts["types"][rostype + "[]"]
100
101 return ok
102
103
104 def _validate_dialect(self):
105 """Returns whether "dialect" is valid in args.WRITE_OPTIONS."""
106 ok = True
107 write_options = getattr(self.args, "WRITE_OPTIONS", {})
108 if "dialect" in write_options \
109 and write_options["dialect"] not in tuple(filter(bool, self.DIALECTSDIALECTS)):
110 ok = False
111 ConsolePrinter.error("Unknown dialect for SQL: %r. Choose one of {%s}.",
112 write_options["dialect"],
113 "|".join(sorted(filter(bool, self.DIALECTSDIALECTS))))
114 return ok
115
116
117 def close(self):
118 """Clears data structures."""
119 self._topics.clear()
120 self._types.clear()
121 self._schema.clear()
122 self._sql_cache.clear()
123
124
125 def _make_topic_data(self, topic, msg, exclude_cols=()):
126 """
127 Returns full data dictionary for topic, including view name and SQL.
128
129 @param exclude_cols list of column names to exclude from view SELECT, if any
130 @return {"name": topic name, "type": message type name as "pkg/Cls",
131 "table_name": message type table name, "view_name": topic view name,
132 "md5": message type definition MD5 hash, "sql": "CREATE VIEW .."}
133 """
134 with api.TypeMeta.make(msg, topic) as m:
135 typename, typehash, typekey = (m.typename, m.typehash, m.typekey)
136
137 table_name = self._types[typekey]["table_name"]
138 pkgname, clsname = typename.split("/", 1)
139 nameargs = {"topic": topic, "type": typename, "hash": typehash,
140 "package": pkgname, "class": clsname}
141 view_name = self._make_entity_name("view", nameargs)
142
143 sqlargs = dict(nameargs, view=quote(view_name), table=quote(table_name, force=True),
144 topic=repr(topic), cols="*")
145 if exclude_cols:
146 exclude_cols = [x[0] if isinstance(x, (list, tuple)) else x for x in exclude_cols]
147 select_cols = [c for c in self._schema[typekey] if c not in exclude_cols]
148 sqlargs["cols"] = ", ".join(quote(c) for c in select_cols)
149 sql = self._get_dialect_option("view_template").strip().format(**sqlargs)
150
151 return {"name": topic, "type": typename, "md5": typehash,
152 "sql": sql, "table_name": table_name, "view_name": view_name}
153
154
155 def _make_type_data(self, msg, extra_cols=(), rootmsg=None):
156 """
157 Returns full data dictionary for message type, including table name and SQL.
158
159 @param rootmsg top message this message is nested under, if any
160 @param extra_cols additional table columns, as [(column name, column def)]
161 @return {"type": message type name as "pkg/Cls",
162 "table_name": message type table name,
163 "definition": message type definition,
164 "cols": [(column name, column type)],
165 "md5": message type definition MD5 hash, "sql": "CREATE TABLE .."}
166 """
167 rootmsg = rootmsg or msg
168 with api.TypeMeta.make(msg, root=rootmsg) as m:
169 typename, typehash = (m.typename, m.typehash)
170
171 cols = []
172 scalars = set(x for x in self._get_dialect_option("types") if x == api.scalar(x))
173 for path, value, subtype in api.iter_message_fields(msg, scalars=scalars):
174 coltype = self._make_column_type(subtype)
175 cols += [(".".join(path), coltype)]
176 cols.extend(extra_cols or [])
177 cols = list(zip(self._make_column_names([c for c, _ in cols]), [t for _, t in cols]))
178 namewidth = 2 + max(len(n) for n, _ in cols)
179 coldefs = ["%s %s" % (quote(n).ljust(namewidth), t) for n, t in cols]
180
181 pkgname, clsname = typename.split("/", 1)
182 nameargs = {"type": typename, "hash": typehash, "package": pkgname, "class": clsname}
183 table_name = self._make_entity_name("table", nameargs)
184
185 sqlargs = dict(nameargs, table=quote(table_name), cols="\n %s\n" % ",\n ".join(coldefs))
186 sql = self._get_dialect_option("table_template").strip().format(**sqlargs)
187 return {"type": typename, "md5": typehash,
188 "definition": api.TypeMeta.make(msg).definition,
189 "table_name": table_name, "cols": cols, "sql": sql}
190
191
192 def _make_topic_insert_sql(self, topic, msg):
193 """Returns ("INSERT ..", [args]) for inserting into topics-table."""
194 POSARG = self._get_dialect_option("posarg")
195 topickey = api.TypeMeta.make(msg, topic).topickey
196 tdata = self._topics[topickey]
197
198 sql = self._get_dialect_option("insert_topic").strip().replace("%s", POSARG)
199 args = [tdata[k] for k in ("name", "type", "md5", "table_name", "view_name")]
200 return sql, args
201
202
203 def _make_type_insert_sql(self, msg):
204 """Returns ("INSERT ..", [args]) for inserting into types-table."""
205 POSARG = self._get_dialect_option("posarg")
206 typekey = api.TypeMeta.make(msg).typekey
207 tdata = self._types[typekey]
208
209 sql = self._get_dialect_option("insert_type").strip().replace("%s", POSARG)
210 args = [tdata[k] for k in ("type", "definition", "md5", "table_name")]
211 return sql, args
212
213
214 def _make_message_insert_sql(self, topic, msg, extra_cols=()):
215 """
216 Returns ("INSERT ..", [args]) for inserting into message type table.
217
218 @param extra_cols list of additional table columns, as [(name, value)]
219 """
220 typekey = api.TypeMeta.make(msg, topic).typekey
221 table_name = self._types[typekey]["table_name"]
222 sql, cols, args = self._sql_cache.get(table_name), [], []
223
224 scalars = set(x for x in self._get_dialect_option("types") if x == api.scalar(x))
225 for p, v, t in api.iter_message_fields(msg, scalars=scalars):
226 if not sql: cols.append(".".join(p))
227 args.append(self._make_column_value(v, t))
228 args = tuple(args) + tuple(v for _, v in extra_cols)
229
230 if not sql:
231 POSARG = self._get_dialect_option("posarg")
232 if extra_cols: cols.extend(c for c, _ in extra_cols)
233 sql = "INSERT INTO %s (%s) VALUES (%s)" % \
234 (quote(table_name), ", ".join(map(quote, cols)),
235 ", ".join([POSARG] * len(args)))
236 self._sql_cache[table_name] = sql
237
238 return sql, args
239
240
241 def _make_update_sql(self, table, values, where=()):
242 """Returns ("UPDATE ..", [args])."""
243 POSARG = self._get_dialect_option("posarg")
244 sql, args, sets, filters = "UPDATE %s SET " % quote(table), [], [], []
245 for lst, vals in [(sets, values), (filters, where)]:
246 for k, v in vals.items() if isinstance(vals, dict) else vals:
247 lst.append("%s = %s" % (quote(k), POSARG))
248 args.append(self._make_column_value(v))
249 sql += ", ".join(sets) + (" WHERE " if filters else "") + " AND ".join(filters)
250 return sql, args
251
252
253 def _make_entity_name(self, category, args):
254 """
255 Returns valid unique name for table/view.
256
257 @param args format arguments for table/view name template
258 """
259 name = self._get_dialect_option("%s_name_template" % category).format(**args)
260 existing = set(sum(([x["table_name"], x.get("view_name")]
261 for dct in (self._topics, self._types)
262 for x in dct.values()), []))
263 return self._make_name("entity", name, existing)
264
265
266 def _make_name(self, category, name, existing=()):
267 """
268 Returns a valid unique name for table/view/column.
269
270 Replaces invalid characters and constrains length.
271 If name already exists, appends counter like " (2)".
272 """
273 MAXLEN_ARG = "maxlen_column" if "column" == category else "maxlen_entity"
274 MAXLEN = self._get_dialect_option(MAXLEN_ARG)
275 INVALID_RGX = self._get_dialect_option("invalid_char_regex")
276 INVALID_REPL = self._get_dialect_option("invalid_char_repl")
277 if not MAXLEN and not INVALID_RGX: return name
278
279 name1 = re.sub(INVALID_RGX, INVALID_REPL, name) if INVALID_RGX else name
280 name2 = ellipsize(name1, MAXLEN)
281 counter = 2
282 while name2 in existing:
283 suffix = " (%s)" % counter
284 name2 = ellipsize(name1, MAXLEN - len(suffix)) + suffix
285 counter += 1
286 return name2
287
288
289 def _make_column_names(self, col_names):
290 """Returns valid unique names for table columns."""
291 result = []
292 for name in col_names:
293 result.append(self._make_name("column", name, result))
294 return list(result)
295
296
297 def _make_column_value(self, value, typename=None):
298 """Returns column value suitable for inserting to database."""
299 if not typename: return value
300
301 v = value
302 if isinstance(v, (list, tuple)):
303 scalartype = api.scalar(typename)
304 if scalartype in api.ROS_TIME_TYPES:
305 v = [self._convert_time(x) for x in v]
306 elif scalartype not in api.ROS_BUILTIN_TYPES:
307 if self._nesting: v = []
308 else: v = [api.message_to_dict(x) for x in v]
309 else:
310 v = self._convert_column_value(v, typename)
311 elif api.is_ros_time(v):
312 v = self._convert_time_value(v, typename)
313 elif typename not in api.ROS_BUILTIN_TYPES:
314 v = json.dumps(api.message_to_dict(v))
315 else:
316 v = self._convert_column_value(v, typename)
317 return v
318
319
320 def _make_column_type(self, typename, fallback=None):
321 """
322 Returns column type for SQL.
323
324 @param fallback fallback typename to use for lookup if no mapping for typename
325 """
326 TYPES = self._get_dialect_option("types")
327 ARRAYTEMPLATE = self._get_dialect_option("arraytype_template")
328 DEFAULTTYPE = self._get_dialect_option("defaulttype")
329
330 scalartype = api.scalar(typename)
331 timetype = api.get_ros_time_category(scalartype)
332 coltype = TYPES.get(typename) or TYPES.get(api.canonical(typename, unbounded=True))
333
334 if not coltype and scalartype in TYPES:
335 coltype = ARRAYTEMPLATE.format(type=TYPES[scalartype])
336 if not coltype and timetype in TYPES:
337 if typename != scalartype:
338 coltype = ARRAYTEMPLATE.format(type=TYPES[timetype])
339 else:
340 coltype = TYPES[timetype]
341 if not coltype and fallback:
342 coltype = self._make_column_type(fallback)
343 if not coltype:
344 coltype = DEFAULTTYPE or quote(typename)
345 return coltype
346
347
348 def _convert_column_value(self, value, typename):
349 """Returns ROS value converted to dialect value."""
350 ADAPTERS = self._get_dialect_option("adapters")
351 if not ADAPTERS: return value
352
353 adapter, iterate = ADAPTERS.get(typename), False
354 if not adapter and isinstance(value, (list, tuple)):
355 adapter, iterate = ADAPTERS.get(api.scalar(typename)), True
356 if adapter:
357 value = [adapter(x) for x in value] if iterate else adapter(value)
358 return value
359
360
361 def _convert_time_value(self, value, typename):
362 """Returns ROS time/duration value converted to dialect value."""
363 adapter = self._get_dialect_option("adapters").get(typename)
364 if adapter:
365 try: is_int = issubclass(adapter, int)
366 except Exception: is_int = False
367 v = api.to_sec(value) if is_int else "%d.%09d" % api.to_sec_nsec(value)
368 result = adapter(v)
369 else:
370 result = api.to_decimal(value)
371 return result
372
373
374 def _get_dialect_option(self, option):
375 """Returns option for current SQL dialect, falling back to default dialect."""
376 return self.DIALECTSDIALECTS[self._dialect].get(option, self.DIALECTSDIALECTS[None].get(option))
377
378
379
380 DIALECTS = {
381
382 None: {
383 # CREATE TABLE template, args: table, cols, type, hash, package, class
384 "table_template": "CREATE TABLE IF NOT EXISTS {table} ({cols});",
385 # CREATE VIEW template, args: view, cols, table, topic, type, hash, package, class
386 "view_template": """
387DROP VIEW IF EXISTS {view};
388
389CREATE VIEW {view} AS
390SELECT {cols}
391FROM {table}
392WHERE _topic = {topic};""",
393 "table_name_template": "{type}", # args: type, hash, package, class
394 "view_name_template": "{topic}", # args: topic, type, hash, package, class
395 "types": {}, # Mapping between ROS and SQL common types
396 "adapters": {}, # Mapping between ROS types and callable converters
397 "defaulttype": None, # Fallback SQL type if no mapping for ROS type
398 "arraytype_template": "{type}[]", # Array type template, args: type
399 "maxlen_entity": 0, # Maximum table/view name length, 0 disables
400 "maxlen_column": 0, # Maximum column name length, 0 disables
401 "invalid_char_regex": None, # Regex for matching invalid characters in name
402 "invalid_char_repl": "__", # Replacement for invalid characters in name
403
404 "insert_topic": """
405INSERT INTO topics (name, type, md5, table_name, view_name)
406VALUES (%s, %s, %s, %s, %s);""",
407 "insert_type": """
408INSERT INTO types (type, definition, md5, table_name)
409VALUES (%s, %s, %s, %s);""",
410 "posarg": "%s",
411 },
412
413 "sqlite": {
414 "posarg": "?",
415 "base_schema": """
416CREATE TABLE IF NOT EXISTS messages (
417 id INTEGER PRIMARY KEY,
418 topic_id INTEGER NOT NULL,
419 timestamp INTEGER NOT NULL,
420 data BLOB NOT NULL,
421
422 topic TEXT NOT NULL,
423 type TEXT NOT NULL,
424 dt TIMESTAMP NOT NULL,
425 yaml TEXT NOT NULL
426);
427
428CREATE TABLE IF NOT EXISTS topics (
429 id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
430 name TEXT NOT NULL,
431 type TEXT NOT NULL,
432 serialization_format TEXT DEFAULT "cdr",
433 offered_qos_profiles TEXT DEFAULT "",
434
435 md5 TEXT NOT NULL,
436 table_name TEXT NOT NULL,
437 view_name TEXT
438);
439
440CREATE TABLE IF NOT EXISTS types (
441 id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
442 type TEXT NOT NULL,
443 definition TEXT NOT NULL,
444 md5 TEXT NOT NULL,
445 table_name TEXT NOT NULL,
446 nested_tables JSON
447);
448
449CREATE INDEX IF NOT EXISTS timestamp_idx ON messages (timestamp ASC);
450
451PRAGMA journal_mode = WAL;
452""",
453 "insert_message": """
454INSERT INTO messages (topic_id, timestamp, data, topic, type, dt, yaml)
455VALUES (:topic_id, :timestamp, :data, :topic, :type, :dt, :yaml)
456""",
457 },
458
459 "postgres": {
460 "types": {
461 "int8": "SMALLINT", "int16": "SMALLINT", "int32": "INTEGER",
462 "uint8": "SMALLINT", "uint16": "INTEGER", "uint32": "BIGINT",
463 "int64": "BIGINT", "uint64": "BIGINT", "bool": "BOOLEAN",
464 "string": "TEXT", "wstring": "TEXT", "uint8[]": "BYTEA",
465 "float32": "REAL", "float64": "DOUBLE PRECISION",
466 },
467 "defaulttype": "JSONB",
468 "maxlen_entity": 63,
469 "maxlen_column": 63,
470
471 "insert_topic": """
472INSERT INTO topics (name, type, md5, table_name, view_name)
473VALUES (%s, %s, %s, %s, %s)
474RETURNING id;""",
475 "insert_type": """
476INSERT INTO types (type, definition, md5, table_name)
477VALUES (%s, %s, %s, %s)
478RETURNING id;""",
479 "base_schema": """
480CREATE TABLE IF NOT EXISTS topics (
481 id BIGSERIAL PRIMARY KEY,
482 name TEXT NOT NULL,
483 type TEXT NOT NULL,
484 md5 TEXT NOT NULL,
485 table_name TEXT NOT NULL,
486 view_name TEXT NOT NULL
487);
488
489CREATE TABLE IF NOT EXISTS types (
490 id BIGSERIAL PRIMARY KEY,
491 type TEXT NOT NULL,
492 definition TEXT NOT NULL,
493 md5 TEXT NOT NULL,
494 table_name TEXT NOT NULL,
495 nested_tables JSON
496);""",
497 },
498
499 "clickhouse": {
500 "table_template": "CREATE TABLE IF NOT EXISTS {table} ({cols}) ENGINE = ENGINE;",
501 "types": {
502 "int8": "Int8", "int16": "Int16", "int32": "Int32",
503 "uint8": "UInt8", "uint16": "UInt16", "uint32": "UInt32",
504 "int64": "Int64", "uint64": "UInt64", "bool": "UInt8",
505 "float32": "Float32", "float64": "Float64", "uint8[]": "String",
506 "string": "String", "wstring": "String",
507 },
508 "defaulttype": "String",
509 "arraytype_template": "Array({type})",
510 },
511 }
512
513
514
516 KEYWORDS = [
517 "A", "ABORT", "ABS", "ABSOLUTE", "ACCESS", "ACTION", "ADA", "ADD", "ADMIN", "AFTER",
518 "AGGREGATE", "ALIAS", "ALL", "ALLOCATE", "ALSO", "ALTER", "ALWAYS", "ANALYSE", "ANALYZE",
519 "AND", "ANY", "ARE", "ARRAY", "AS", "ASC", "ASENSITIVE", "ASSERTION", "ASSIGNMENT",
520 "ASYMMETRIC", "AT", "ATOMIC", "ATTACH", "ATTRIBUTE", "ATTRIBUTES", "AUDIT",
521 "AUTHORIZATION", "AUTOINCREMENT", "AUTO_INCREMENT", "AVG", "AVG_ROW_LENGTH", "BACKUP",
522 "BACKWARD", "BEFORE", "BEGIN", "BERNOULLI", "BETWEEN", "BIGINT", "BINARY", "BIT", "BITVAR",
523 "BIT_LENGTH", "BLOB", "BOOL", "BOOLEAN", "BOTH", "BREADTH", "BREAK", "BROWSE", "BULK",
524 "BY", "C", "CACHE", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", "CASE", "CAST",
525 "CATALOG", "CATALOG_NAME", "CEIL", "CEILING", "CHAIN", "CHANGE", "CHAR", "CHARACTER",
526 "CHARACTERISTICS", "CHARACTERS", "CHARACTER_LENGTH", "CHARACTER_SET_CATALOG",
527 "CHARACTER_SET_NAME", "CHARACTER_SET_SCHEMA", "CHAR_LENGTH", "CHECK", "CHECKED",
528 "CHECKPOINT", "CHECKSUM", "CLASS", "CLASS_ORIGIN", "CLOB", "CLOSE", "CLUSTER", "CLUSTERED",
529 "COALESCE", "COBOL", "COLLATE", "COLLATION", "COLLATION_CATALOG", "COLLATION_NAME",
530 "COLLATION_SCHEMA", "COLLECT", "COLUMN", "COLUMNS", "COLUMN_NAME", "COMMAND_FUNCTION",
531 "COMMAND_FUNCTION_CODE", "COMMENT", "COMMIT", "COMMITTED", "COMPLETION", "COMPRESS",
532 "COMPUTE", "CONDITION", "CONDITION_NUMBER", "CONNECT", "CONNECTION", "CONNECTION_NAME",
533 "CONSTRAINT", "CONSTRAINTS", "CONSTRAINT_CATALOG", "CONSTRAINT_NAME", "CONSTRAINT_SCHEMA",
534 "CONSTRUCTOR", "CONTAINS", "CONTAINSTABLE", "CONTINUE", "CONVERSION", "CONVERT", "COPY",
535 "CORR", "CORRESPONDING", "COUNT", "COVAR_POP", "COVAR_SAMP", "CREATE", "CREATEDB",
536 "CREATEROLE", "CREATEUSER", "CROSS", "CSV", "CUBE", "CUME_DIST", "CURRENT", "CURRENT_DATE",
537 "CURRENT_DEFAULT_TRANSFORM_GROUP", "CURRENT_PATH", "CURRENT_ROLE", "CURRENT_TIME",
538 "CURRENT_TIMESTAMP", "CURRENT_TRANSFORM_GROUP_FOR_TYPE", "CURRENT_USER", "CURSOR",
539 "CURSOR_NAME", "CYCLE", "DATA", "DATABASE", "DATABASES", "DATE", "DATETIME",
540 "DATETIME_INTERVAL_CODE", "DATETIME_INTERVAL_PRECISION", "DAY", "DAYOFMONTH", "DAYOFWEEK",
541 "DAYOFYEAR", "DAY_HOUR", "DAY_MICROSECOND", "DAY_MINUTE", "DAY_SECOND", "DBCC",
542 "DEALLOCATE", "DEC", "DECIMAL", "DECLARE", "DEFAULT", "DEFAULTS", "DEFERRABLE", "DEFERRED",
543 "DEFINED", "DEFINER", "DEGREE", "DELAYED", "DELAY_KEY_WRITE", "DELETE", "DELIMITER",
544 "DELIMITERS", "DENSE_RANK", "DENY", "DEPTH", "DEREF", "DERIVED", "DESC", "DESCRIBE",
545 "DESCRIPTOR", "DESTROY", "DESTRUCTOR", "DETACH", "DETERMINISTIC", "DIAGNOSTICS",
546 "DICTIONARY", "DISABLE", "DISCONNECT", "DISK", "DISPATCH", "DISTINCT", "DISTINCTROW",
547 "DISTRIBUTED", "DIV", "DO", "DOMAIN", "DOUBLE", "DROP", "DUAL", "DUMMY", "DUMP", "DYNAMIC",
548 "DYNAMIC_FUNCTION", "DYNAMIC_FUNCTION_CODE", "EACH", "ELEMENT", "ELSE", "ELSEIF", "ENABLE",
549 "ENCLOSED", "ENCODING", "ENCRYPTED", "END", "END-EXEC", "ENUM", "EQUALS", "ERRLVL",
550 "ESCAPE", "ESCAPED", "EVERY", "EXCEPT", "EXCEPTION", "EXCLUDE", "EXCLUDING", "EXCLUSIVE",
551 "EXEC", "EXECUTE", "EXISTING", "EXISTS", "EXIT", "EXP", "EXPLAIN", "EXTERNAL", "EXTRACT",
552 "FALSE", "FETCH", "FIELDS", "FILE", "FILLFACTOR", "FILTER", "FINAL", "FIRST", "FLOAT",
553 "FLOAT4", "FLOAT8", "FLOOR", "FLUSH", "FOLLOWING", "FOR", "FORCE", "FOREIGN", "FORTRAN",
554 "FORWARD", "FOUND", "FREE", "FREETEXT", "FREETEXTTABLE", "FREEZE", "FROM", "FULL",
555 "FULLTEXT", "FUNCTION", "FUSION", "G", "GENERAL", "GENERATED", "GET", "GLOBAL", "GO",
556 "GOTO", "GRANT", "GRANTED", "GRANTS", "GREATEST", "GROUP", "GROUPING", "HANDLER", "HAVING",
557 "HEADER", "HEAP", "HIERARCHY", "HIGH_PRIORITY", "HOLD", "HOLDLOCK", "HOST", "HOSTS",
558 "HOUR", "HOUR_MICROSECOND", "HOUR_MINUTE", "HOUR_SECOND", "IDENTIFIED", "IDENTITY",
559 "IDENTITYCOL", "IDENTITY_INSERT", "IF", "IGNORE", "ILIKE", "IMMEDIATE", "IMMUTABLE",
560 "IMPLEMENTATION", "IMPLICIT", "IN", "INCLUDE", "INCLUDING", "INCREMENT", "INDEX",
561 "INDICATOR", "INFILE", "INFIX", "INHERIT", "INHERITS", "INITIAL", "INITIALIZE",
562 "INITIALLY", "INNER", "INOUT", "INPUT", "INSENSITIVE", "INSERT", "INSERT_ID", "INSTANCE",
563 "INSTANTIABLE", "INSTEAD", "INT", "INT1", "INT2", "INT3", "INT4", "INT8", "INTEGER",
564 "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", "INVOKER", "IS", "ISAM", "ISNULL",
565 "ISOLATION", "ITERATE", "JOIN", "K", "KEY", "KEYS", "KEY_MEMBER", "KEY_TYPE", "KILL",
566 "LANCOMPILER", "LANGUAGE", "LARGE", "LAST", "LAST_INSERT_ID", "LATERAL", "LEAD", "LEADING",
567 "LEAST", "LEAVE", "LEFT", "LENGTH", "LESS", "LEVEL", "LIKE", "LIMIT", "LINENO", "LINES",
568 "LISTEN", "LN", "LOAD", "LOCAL", "LOCALTIME", "LOCALTIMESTAMP", "LOCATION", "LOCATOR",
569 "LOCK", "LOGIN", "LOGS", "LONG", "LONGBLOB", "LONGTEXT", "LOOP", "LOWER", "LOW_PRIORITY",
570 "M", "MAP", "MATCH", "MATCHED", "MAX", "MAXEXTENTS", "MAXVALUE", "MAX_ROWS", "MEDIUMBLOB",
571 "MEDIUMINT", "MEDIUMTEXT", "MEMBER", "MERGE", "MESSAGE_LENGTH", "MESSAGE_OCTET_LENGTH",
572 "MESSAGE_TEXT", "METHOD", "MIDDLEINT", "MIN", "MINUS", "MINUTE", "MINUTE_MICROSECOND",
573 "MINUTE_SECOND", "MINVALUE", "MIN_ROWS", "MLSLABEL", "MOD", "MODE", "MODIFIES", "MODIFY",
574 "MODULE", "MONTH", "MONTHNAME", "MORE", "MOVE", "MULTISET", "MUMPS", "MYISAM", "NAME",
575 "NAMES", "NATIONAL", "NATURAL", "NCHAR", "NCLOB", "NESTING", "NEW", "NEXT", "NO",
576 "NOAUDIT", "NOCHECK", "NOCOMPRESS", "NOCREATEDB", "NOCREATEROLE", "NOCREATEUSER",
577 "NOINHERIT", "NOLOGIN", "NONCLUSTERED", "NONE", "NORMALIZE", "NORMALIZED", "NOSUPERUSER",
578 "NOT", "NOTHING", "NOTIFY", "NOTNULL", "NOWAIT", "NO_WRITE_TO_BINLOG", "NULL", "NULLABLE",
579 "NULLIF", "NULLS", "NUMBER", "NUMERIC", "OBJECT", "OCTETS", "OCTET_LENGTH", "OF", "OFF",
580 "OFFLINE", "OFFSET", "OFFSETS", "OIDS", "OLD", "ON", "ONLINE", "ONLY", "OPEN",
581 "OPENDATASOURCE", "OPENQUERY", "OPENROWSET", "OPENXML", "OPERATION", "OPERATOR",
582 "OPTIMIZE", "OPTION", "OPTIONALLY", "OPTIONS", "OR", "ORDER", "ORDERING", "ORDINALITY",
583 "OTHERS", "OUT", "OUTER", "OUTFILE", "OUTPUT", "OVER", "OVERLAPS", "OVERLAY", "OVERRIDING",
584 "OWNER", "PACK_KEYS", "PAD", "PARAMETER", "PARAMETERS", "PARAMETER_MODE", "PARAMETER_NAME",
585 "PARAMETER_ORDINAL_POSITION", "PARAMETER_SPECIFIC_CATALOG", "PARAMETER_SPECIFIC_NAME",
586 "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", "PARTITION", "PASCAL", "PASSWORD", "PATH",
587 "PCTFREE", "PERCENT", "PERCENTILE_CONT", "PERCENTILE_DISC", "PERCENT_RANK", "PLACING",
588 "PLAN", "PLI", "POSITION", "POSTFIX", "POWER", "PRAGMA", "PRECEDING", "PRECISION",
589 "PREFIX", "PREORDER", "PREPARE", "PREPARED", "PRESERVE", "PRIMARY", "PRINT", "PRIOR",
590 "PRIVILEGES", "PROC", "PROCEDURAL", "PROCEDURE", "PROCESS", "PROCESSLIST", "PUBLIC",
591 "PURGE", "QUOTE", "RAID0", "RAISE", "RAISERROR", "RANGE", "RANK", "RAW", "READ", "READS",
592 "READTEXT", "REAL", "RECHECK", "RECONFIGURE", "RECURSIVE", "REF", "REFERENCES",
593 "REFERENCING", "REGEXP", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT",
594 "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "REINDEX", "RELATIVE",
595 "RELEASE", "RELOAD", "RENAME", "REPEAT", "REPEATABLE", "REPLACE", "REPLICATION", "REQUIRE",
596 "RESET", "RESIGNAL", "RESOURCE", "RESTART", "RESTORE", "RESTRICT", "RESULT", "RETURN",
597 "RETURNED_CARDINALITY", "RETURNED_LENGTH", "RETURNED_OCTET_LENGTH", "RETURNED_SQLSTATE",
598 "RETURNS", "REVOKE", "RIGHT", "RLIKE", "ROLE", "ROLLBACK", "ROLLUP", "ROUTINE",
599 "ROUTINE_CATALOG", "ROUTINE_NAME", "ROUTINE_SCHEMA", "ROW", "ROWCOUNT", "ROWGUIDCOL",
600 "ROWID", "ROWNUM", "ROWS", "ROW_COUNT", "ROW_NUMBER", "RULE", "SAVE", "SAVEPOINT", "SCALE",
601 "SCHEMA", "SCHEMAS", "SCHEMA_NAME", "SCOPE", "SCOPE_CATALOG", "SCOPE_NAME", "SCOPE_SCHEMA",
602 "SCROLL", "SEARCH", "SECOND", "SECOND_MICROSECOND", "SECTION", "SECURITY", "SELECT",
603 "SELF", "SENSITIVE", "SEPARATOR", "SEQUENCE", "SERIALIZABLE", "SERVER_NAME", "SESSION",
604 "SESSION_USER", "SET", "SETOF", "SETS", "SETUSER", "SHARE", "SHOW", "SHUTDOWN", "SIGNAL",
605 "SIMILAR", "SIMPLE", "SIZE", "SMALLINT", "SOME", "SONAME", "SOURCE", "SPACE", "SPATIAL",
606 "SPECIFIC", "SPECIFICTYPE", "SPECIFIC_NAME", "SQL", "SQLCA", "SQLCODE", "SQLERROR",
607 "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "SQL_BIG_RESULT", "SQL_BIG_SELECTS",
608 "SQL_BIG_TABLES", "SQL_CALC_FOUND_ROWS", "SQL_LOG_OFF", "SQL_LOG_UPDATE",
609 "SQL_LOW_PRIORITY_UPDATES", "SQL_SELECT_LIMIT", "SQL_SMALL_RESULT", "SQL_WARNINGS", "SQRT",
610 "SSL", "STABLE", "START", "STARTING", "STATE", "STATEMENT", "STATIC", "STATISTICS",
611 "STATUS", "STDDEV_POP", "STDDEV_SAMP", "STDIN", "STDOUT", "STORAGE", "STRAIGHT_JOIN",
612 "STRICT", "STRING", "STRUCTURE", "STYLE", "SUBCLASS_ORIGIN", "SUBLIST", "SUBMULTISET",
613 "SUBSTRING", "SUCCESSFUL", "SUM", "SUPERUSER", "SYMMETRIC", "SYNONYM", "SYSDATE", "SYSID",
614 "SYSTEM", "SYSTEM_USER", "TABLE", "TABLES", "TABLESAMPLE", "TABLESPACE", "TABLE_NAME",
615 "TEMP", "TEMPLATE", "TEMPORARY", "TERMINATE", "TERMINATED", "TEXT", "TEXTSIZE", "THAN",
616 "THEN", "TIES", "TIME", "TIMESTAMP", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TINYBLOB",
617 "TINYINT", "TINYTEXT", "TO", "TOAST", "TOP", "TOP_LEVEL_COUNT", "TRAILING", "TRAN",
618 "TRANSACTION", "TRANSACTIONS_COMMITTED", "TRANSACTIONS_ROLLED_BACK", "TRANSACTION_ACTIVE",
619 "TRANSFORM", "TRANSFORMS", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER",
620 "TRIGGER_CATALOG", "TRIGGER_NAME", "TRIGGER_SCHEMA", "TRIM", "TRUE", "TRUNCATE", "TRUSTED",
621 "TSEQUAL", "TYPE", "UESCAPE", "UID", "UNBOUNDED", "UNCOMMITTED", "UNDER", "UNDO",
622 "UNENCRYPTED", "UNION", "UNIQUE", "UNKNOWN", "UNLISTEN", "UNLOCK", "UNNAMED", "UNNEST",
623 "UNSIGNED", "UNTIL", "UPDATE", "UPDATETEXT", "UPPER", "USAGE", "USE", "USER",
624 "USER_DEFINED_TYPE_CATALOG", "USER_DEFINED_TYPE_CODE", "USER_DEFINED_TYPE_NAME",
625 "USER_DEFINED_TYPE_SCHEMA", "USING", "UTC_DATE", "UTC_TIME", "UTC_TIMESTAMP", "VACUUM",
626 "VALID", "VALIDATE", "VALIDATOR", "VALUE", "VALUES", "VARBINARY", "VARCHAR", "VARCHAR2",
627 "VARCHARACTER", "VARIABLE", "VARIABLES", "VARYING", "VAR_POP", "VAR_SAMP", "VERBOSE",
628 "VIEW", "VOLATILE", "WAITFOR", "WHEN", "WHENEVER", "WHERE", "WHILE", "WIDTH_BUCKET",
629 "WINDOW", "WITH", "WITHIN", "WITHOUT", "WORK", "WRITE", "WRITETEXT", "X509", "XOR", "YEAR",
630 "YEAR_MONTH", "ZEROFILL", "ZONE",
631 ]
632
633
634def quote(name, force=False):
635 """
636 Returns name in quotes and proper-escaped for SQL queries.
637
638 @param force quote even if name does not need quoting (starts with a letter,
639 contains only alphanumerics, and is not a reserved keyword)
640 """
641 result = name
642 if force or result.upper() in SqlMixin.KEYWORDS \
643 or re.search(r"(^[\W\d])|(?=\W)", result, re.U):
644 result = '"%s"' % result.replace('"', '""')
645 return result
646
647
648__all__ = ["SqlMixin", "quote"]
__init__(self, args=None, **kwargs)
Definition sqlbase.py:49
validate(self)
Returns whether arguments are valid.
Definition sqlbase.py:63
close(self)
Clears data structures.
Definition sqlbase.py:122
dict DIALECTS
Supported SQL dialects and options.
Definition sqlbase.py:411
str DEFAULT_DIALECT
Default SQL dialect used if dialect not specified.
Definition sqlbase.py:34