grepros 1.3.0
grep for ROS bag files and live topics
Loading...
Searching...
No Matches
sqlbase.py
Go to the documentation of this file.
1# -*- coding: utf-8 -*-
2"""
3Base class for producing SQL for topics and messages.
4
5------------------------------------------------------------------------------
6This file is part of grepros - grep for ROS bag files and live topics.
7Released under the BSD License.
8
9@author Erki Suurjaak
10@created 03.01.2022
11@modified 13.02.2026
12------------------------------------------------------------------------------
13"""
14
15import json
16import re
17
18import yaml
19
20from ... import api
21from ... common import ConsolePrinter, ellipsize, import_item, merge_dicts
22
23
24
26 """
27 Base class for producing SQL for topics and messages.
28
29 Can load additional SQL dialects or additional options for existing dialects
30 from a YAML/JSON file.
31 """
32
33
34 DEFAULT_DIALECT = "sqlite"
35
36
37 DEFAULT_ARGS = dict(WRITE_OPTIONS={})
38
39
40 def __init__(self, args=None, **kwargs):
41 """
42 @param args arguments as namespace or dictionary, case-insensitive
43 @param args.write_options ```
44 {"dialect": SQL dialect if not default,
45 "nesting": true|false to created nested type tables}
46 ```
47 @param kwargs any and all arguments as keyword overrides, case-insensitive
48 """
49 self._topics = {} # {(topic, typename, typehash): {name, table_name, view_name, sql, ..}}
50 self._types = {} # {(typename, typehash): {type, table_name, sql, ..}}
51 self._schema = {} # {(typename, typehash): {cols}}
52 self._sql_cache = {} # {table: "INSERT INTO table VALUES (%s, ..)"}
53 self._dialect = None
54 self._nesting = None
55
56
57 def validate(self):
58 """
59 Returns whether arguments are valid.
60
61 Verifies that "dialect-file" is valid and "dialect" contains supported value, if any.
62 """
63 write_options = getattr(self.args, "WRITE_OPTIONS", {})
64 self._dialect = write_options.get("dialect", (self._dialect or self.DEFAULT_DIALECT))
65 self._nesting = write_options.get("nesting")
66 return all([self._validate_dialect_file(), self._validate_dialect()])
67
68
69 def _validate_dialect_file(self):
70 """Returns whether "dialect-file" is valid in args.WRITE_OPTIONS."""
71 ok = True
72 write_options = getattr(self.args, "WRITE_OPTIONS", {})
73 if write_options.get("dialect-file"):
74 filename = write_options["dialect-file"]
75 try:
76 with open(filename, encoding="utf-8") as f:
77 dialects = yaml.safe_load(f.read())
78 if any(not isinstance(v, dict) for v in dialects.values()):
79 raise Exception("Each dialect must be a dictionary.")
80 for opts in dialects.values():
81 for k, v in list(opts.get("adapters", {}).items()):
82 try: opts["adapters"][k] = import_item(v)
83 except ImportError:
84 ok = False
85 ConsolePrinter.error("Error loading adapter %r for %r "
86 "in SQL dialect file %r.", v, k, filename)
87 merge_dicts(self.DIALECTSDIALECTS, dialects)
88 except Exception as e:
89 ok = False
90 ConsolePrinter.error("Error reading SQL dialect file %r: %s", filename, e)
91
92 # Populate ROS type aliases like "byte" and "char"
93 for opts in self.DIALECTSDIALECTS.values() if ok else ():
94 for rostype in list(opts.get("types", {})):
95 alias = api.get_type_alias(rostype)
96 if alias:
97 opts["types"][alias] = opts["types"][rostype]
98 if alias and rostype + "[]" in opts["types"]:
99 opts["types"][alias + "[]"] = opts["types"][rostype + "[]"]
100
101 return ok
102
103
104 def _validate_dialect(self):
105 """Returns whether "dialect" is valid in args.WRITE_OPTIONS."""
106 ok = True
107 write_options = getattr(self.args, "WRITE_OPTIONS", {})
108 if "dialect" in write_options \
109 and write_options["dialect"] not in tuple(filter(bool, self.DIALECTSDIALECTS)):
110 ok = False
111 ConsolePrinter.error("Unknown dialect for SQL: %r. Choose one of {%s}.",
112 write_options["dialect"],
113 "|".join(sorted(filter(bool, self.DIALECTSDIALECTS))))
114 return ok
115
116
117 def close(self):
118 """Clears data structures."""
119 self._topics.clear()
120 self._types.clear()
121 self._schema.clear()
122 self._sql_cache.clear()
123
124
125 def _make_topic_data(self, topic, msg, exclude_cols=()):
126 """
127 Returns full data dictionary for topic, including view name and SQL.
128
129 @param exclude_cols list of column names to exclude from view SELECT, if any
130 @return {"name": topic name, "type": message type name as "pkg/Cls",
131 "table_name": message type table name, "view_name": topic view name,
132 "md5": message type definition MD5 hash, "sql": "CREATE VIEW .."}
133 """
134 with api.TypeMeta.make(msg, topic) as m:
135 typename, typehash, typekey = (m.typename, m.typehash, m.typekey)
136
137 table_name = self._types[typekey]["table_name"]
138 pkgname, clsname = typename.split("/", 1)
139 nameargs = {"topic": topic, "type": typename, "hash": typehash,
140 "package": pkgname, "class": clsname}
141 view_name = self._make_entity_name("view", nameargs)
142
143 sqlargs = dict(nameargs, view=quote(view_name), table=quote(table_name, force=True),
144 topic=repr(topic), cols="*")
145 if exclude_cols:
146 exclude_cols = [x[0] if isinstance(x, (list, tuple)) else x for x in exclude_cols]
147 select_cols = [c for c in self._schema[typekey] if c not in exclude_cols]
148 sqlargs["cols"] = ", ".join(quote(c) for c in select_cols)
149 sql = self._get_dialect_option("view_template").strip().format(**sqlargs)
150
151 return {"name": topic, "type": typename, "md5": typehash,
152 "sql": sql, "table_name": table_name, "view_name": view_name}
153
154
155 def _make_type_data(self, msg, extra_cols=(), rootmsg=None):
156 """
157 Returns full data dictionary for message type, including table name and SQL.
158
159 @param rootmsg top message this message is nested under, if any
160 @param extra_cols additional table columns, as [(column name, column def)]
161 @return {"type": message type name as "pkg/Cls",
162 "table_name": message type table name,
163 "definition": message type definition,
164 "cols": [(column name, column type)],
165 "md5": message type definition MD5 hash, "sql": "CREATE TABLE .."}
166 """
167 rootmsg = rootmsg or msg
168 with api.TypeMeta.make(msg, root=rootmsg) as m:
169 typename, typehash = (m.typename, m.typehash)
170
171 cols = []
172 scalars = set(x for x in self._get_dialect_option("types") if x == api.scalar(x))
173 for path, value, subtype in api.iter_message_fields(msg, scalars=scalars):
174 coltype = self._make_column_type(subtype)
175 cols += [(".".join(path), coltype)]
176 cols.extend(extra_cols or [])
177 cols = list(zip(self._make_column_names([c for c, _ in cols]), [t for _, t in cols]))
178 namewidth = 2 + max(len(n) for n, _ in cols)
179 coldefs = ["%s %s" % (quote(n).ljust(namewidth), t) for n, t in cols]
180
181 pkgname, clsname = typename.split("/", 1)
182 nameargs = {"type": typename, "hash": typehash, "package": pkgname, "class": clsname}
183 table_name = self._make_entity_name("table", nameargs)
184
185 sqlargs = dict(nameargs, table=quote(table_name), cols="\n %s\n" % ",\n ".join(coldefs))
186 sql = self._get_dialect_option("table_template").strip().format(**sqlargs)
187 return {"type": typename, "md5": typehash,
188 "definition": api.TypeMeta.make(msg).definition,
189 "table_name": table_name, "cols": cols, "sql": sql}
190
191
192 def _make_topic_insert_sql(self, topic, msg):
193 """Returns ("INSERT ..", [args]) for inserting into topics-table."""
194 POSARG = self._get_dialect_option("posarg")
195 topickey = api.TypeMeta.make(msg, topic).topickey
196 tdata = self._topics[topickey]
197
198 sql = self._get_dialect_option("insert_topic").strip().replace("%s", POSARG)
199 args = [tdata[k] for k in ("name", "type", "md5", "table_name", "view_name")]
200 return sql, args
201
202
203 def _make_type_insert_sql(self, msg):
204 """Returns ("INSERT ..", [args]) for inserting into types-table."""
205 POSARG = self._get_dialect_option("posarg")
206 typekey = api.TypeMeta.make(msg).typekey
207 tdata = self._types[typekey]
208
209 sql = self._get_dialect_option("insert_type").strip().replace("%s", POSARG)
210 args = [tdata[k] for k in ("type", "definition", "md5", "table_name")]
211 return sql, args
212
213
214 def _make_message_insert_sql(self, topic, msg, extra_cols=()):
215 """
216 Returns ("INSERT ..", [args]) for inserting into message type table.
217
218 @param extra_cols list of additional table columns, as [(name, value)]
219 """
220 typekey = api.TypeMeta.make(msg, topic).typekey
221 table_name = self._types[typekey]["table_name"]
222 sql, cols, args = self._sql_cache.get(table_name), [], []
223
224 scalars = set(x for x in self._get_dialect_option("types") if x == api.scalar(x))
225 for p, v, t in api.iter_message_fields(msg, scalars=scalars):
226 if not sql: cols.append(".".join(p))
227 args.append(self._make_column_value(v, t))
228 args = tuple(args) + tuple(v for _, v in extra_cols)
229
230 if not sql:
231 POSARG = self._get_dialect_option("posarg")
232 if extra_cols: cols.extend(c for c, _ in extra_cols)
233 sql = "INSERT INTO %s (%s) VALUES (%s)" % \
234 (quote(table_name), ", ".join(map(quote, cols)),
235 ", ".join([POSARG] * len(args)))
236 self._sql_cache[table_name] = sql
237
238 return sql, args
239
240
241 def _make_update_sql(self, table, values, where=()):
242 """Returns ("UPDATE ..", [args])."""
243 POSARG = self._get_dialect_option("posarg")
244 sql, args, sets, filters = "UPDATE %s SET " % quote(table), [], [], []
245 for lst, vals in [(sets, values), (filters, where)]:
246 for k, v in vals.items() if isinstance(vals, dict) else vals:
247 lst.append("%s = %s" % (quote(k), POSARG))
248 args.append(self._make_column_value(v))
249 sql += ", ".join(sets) + (" WHERE " if filters else "") + " AND ".join(filters)
250 return sql, args
251
252
253 def _make_entity_name(self, category, args):
254 """
255 Returns valid unique name for table/view.
256
257 @param args format arguments for table/view name template
258 """
259 name = self._get_dialect_option("%s_name_template" % category).format(**args)
260 existing = set(sum(([x["table_name"], x.get("view_name")]
261 for dct in (self._topics, self._types)
262 for x in dct.values()), []))
263 return self._make_name("entity", name, existing)
264
265
266 def _make_name(self, category, name, existing=()):
267 """
268 Returns a valid unique name for table/view/column.
269
270 Replaces invalid characters and constrains length.
271 If name already exists, appends counter like " (2)".
272 """
273 MAXLEN_ARG = "maxlen_column" if "column" == category else "maxlen_entity"
274 MAXLEN = self._get_dialect_option(MAXLEN_ARG)
275 INVALID_RGX = self._get_dialect_option("invalid_char_regex")
276 INVALID_REPL = self._get_dialect_option("invalid_char_repl")
277 if not MAXLEN and not INVALID_RGX: return name
278
279 name1 = re.sub(INVALID_RGX, INVALID_REPL, name) if INVALID_RGX else name
280 name2 = ellipsize(name1, MAXLEN)
281 counter = 2
282 while name2 in existing:
283 suffix = " (%s)" % counter
284 name2 = ellipsize(name1, MAXLEN - len(suffix)) + suffix
285 counter += 1
286 return name2
287
288
289 def _make_column_names(self, col_names):
290 """Returns valid unique names for table columns."""
291 result = []
292 for name in col_names:
293 result.append(self._make_name("column", name, result))
294 return list(result)
295
296
297 def _make_column_value(self, value, typename=None):
298 """Returns column value suitable for inserting to database."""
299 if not typename: return value
300
301 v = value
302 if isinstance(v, (list, tuple)):
303 scalartype = api.scalar(typename)
304 if scalartype in api.ROS_TIME_TYPES:
305 v = [self._convert_time(x) for x in v]
306 elif scalartype not in api.ROS_BUILTIN_TYPES:
307 if self._nesting: v = []
308 else: v = [api.message_to_dict(x) for x in v]
309 else:
310 v = self._convert_column_value(v, typename)
311 elif api.is_ros_time(v):
312 v = self._convert_time_value(v, typename)
313 elif typename not in api.ROS_BUILTIN_TYPES:
314 v = json.dumps(api.message_to_dict(v))
315 else:
316 v = self._convert_column_value(v, typename)
317 return v
318
319
320 def _make_column_type(self, typename, fallback=None):
321 """
322 Returns column type for SQL.
323
324 @param fallback fallback typename to use for lookup if no mapping for typename
325 """
326 TYPES = self._get_dialect_option("types")
327 ARRAYTEMPLATE = self._get_dialect_option("arraytype_template")
328 DEFAULTTYPE = self._get_dialect_option("defaulttype")
329
330 scalartype = api.scalar(typename)
331 timetype = api.get_ros_time_category(scalartype)
332 coltype = TYPES.get(typename) or TYPES.get(api.canonical(typename, unbounded=True))
333
334 if not coltype and scalartype in TYPES:
335 coltype = ARRAYTEMPLATE.format(type=TYPES[scalartype])
336 if not coltype and timetype in TYPES:
337 if typename != scalartype:
338 coltype = ARRAYTEMPLATE.format(type=TYPES[timetype])
339 else:
340 coltype = TYPES[timetype]
341 if not coltype and fallback:
342 coltype = self._make_column_type(fallback)
343 if not coltype:
344 coltype = DEFAULTTYPE or quote(typename)
345 return coltype
346
347
348 def _convert_column_value(self, value, typename):
349 """Returns ROS value converted to dialect value."""
350 ADAPTERS = self._get_dialect_option("adapters")
351 if not ADAPTERS: return value
352
353 adapter, iterate = ADAPTERS.get(typename), False
354 if not adapter and isinstance(value, (list, tuple)):
355 adapter, iterate = ADAPTERS.get(api.scalar(typename)), True
356 if adapter:
357 value = [adapter(x) for x in value] if iterate else adapter(value)
358 return value
359
360
361 def _convert_time_value(self, value, typename):
362 """Returns ROS time/duration value converted to dialect value."""
363 adapter = self._get_dialect_option("adapters").get(typename)
364 if adapter:
365 try: is_int = issubclass(adapter, int)
366 except Exception: is_int = False
367 if is_int: v = api.to_sec(value)
368 else:
369 secs, nsecs = api.to_sec_nsec(value)
370 if nsecs < 0: secs, nsecs = secs - 1, 10**9 + nsecs # Proper form is secs<0 nsecs>0
371 if secs < 0 and nsecs:
372 v = "-%d.%09d" % (abs(secs) - 1, 10**9 - nsecs)
373 else: v = "%d.%09d" % (secs, nsecs)
374 result = adapter(v)
375 else:
376 result = api.to_decimal(value)
377 return result
378
379
380 def _get_dialect_option(self, option):
381 """Returns option for current SQL dialect, falling back to default dialect."""
382 return self.DIALECTSDIALECTS[self._dialect].get(option, self.DIALECTSDIALECTS[None].get(option))
383
384
385
386 DIALECTS = {
387
388 None: {
389 # CREATE TABLE template, args: table, cols, type, hash, package, class
390 "table_template": "CREATE TABLE IF NOT EXISTS {table} ({cols});",
391 # CREATE VIEW template, args: view, cols, table, topic, type, hash, package, class
392 "view_template": """
393DROP VIEW IF EXISTS {view};
394
395CREATE VIEW {view} AS
396SELECT {cols}
397FROM {table}
398WHERE _topic = {topic};""",
399 "table_name_template": "{type}", # args: type, hash, package, class
400 "view_name_template": "{topic}", # args: topic, type, hash, package, class
401 "types": {}, # Mapping between ROS and SQL common types
402 "adapters": {}, # Mapping between ROS types and callable converters
403 "defaulttype": None, # Fallback SQL type if no mapping for ROS type
404 "arraytype_template": "{type}[]", # Array type template, args: type
405 "maxlen_entity": 0, # Maximum table/view name length, 0 disables
406 "maxlen_column": 0, # Maximum column name length, 0 disables
407 "invalid_char_regex": None, # Regex for matching invalid characters in name
408 "invalid_char_repl": "__", # Replacement for invalid characters in name
409
410 "insert_topic": """
411INSERT INTO topics (name, type, md5, table_name, view_name)
412VALUES (%s, %s, %s, %s, %s);""",
413 "insert_type": """
414INSERT INTO types (type, definition, md5, table_name)
415VALUES (%s, %s, %s, %s);""",
416 "posarg": "%s",
417 },
418
419 "sqlite": {
420 "posarg": "?",
421 "base_schema": """
422CREATE TABLE IF NOT EXISTS messages (
423 id INTEGER PRIMARY KEY,
424 topic_id INTEGER NOT NULL,
425 timestamp INTEGER NOT NULL,
426 data BLOB NOT NULL,
427
428 topic TEXT NOT NULL,
429 type TEXT NOT NULL,
430 dt TIMESTAMP NOT NULL,
431 yaml TEXT NOT NULL
432);
433
434CREATE TABLE IF NOT EXISTS topics (
435 id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
436 name TEXT NOT NULL,
437 type TEXT NOT NULL,
438 serialization_format TEXT DEFAULT "cdr",
439 offered_qos_profiles TEXT DEFAULT "",
440
441 md5 TEXT NOT NULL,
442 table_name TEXT NOT NULL,
443 view_name TEXT
444);
445
446CREATE TABLE IF NOT EXISTS types (
447 id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
448 type TEXT NOT NULL,
449 definition TEXT NOT NULL,
450 md5 TEXT NOT NULL,
451 table_name TEXT NOT NULL,
452 nested_tables JSON
453);
454
455CREATE INDEX IF NOT EXISTS timestamp_idx ON messages (timestamp ASC);
456
457PRAGMA journal_mode = WAL;
458""",
459 "insert_message": """
460INSERT INTO messages (topic_id, timestamp, data, topic, type, dt, yaml)
461VALUES (:topic_id, :timestamp, :data, :topic, :type, :dt, :yaml)
462""",
463 },
464
465 "postgres": {
466 "types": {
467 "int8": "SMALLINT", "int16": "SMALLINT", "int32": "INTEGER",
468 "uint8": "SMALLINT", "uint16": "INTEGER", "uint32": "BIGINT",
469 "int64": "BIGINT", "uint64": "BIGINT", "bool": "BOOLEAN",
470 "string": "TEXT", "wstring": "TEXT", "uint8[]": "BYTEA",
471 "float32": "REAL", "float64": "DOUBLE PRECISION",
472 },
473 "defaulttype": "JSONB",
474 "maxlen_entity": 63,
475 "maxlen_column": 63,
476
477 "insert_topic": """
478INSERT INTO topics (name, type, md5, table_name, view_name)
479VALUES (%s, %s, %s, %s, %s)
480RETURNING id;""",
481 "insert_type": """
482INSERT INTO types (type, definition, md5, table_name)
483VALUES (%s, %s, %s, %s)
484RETURNING id;""",
485 "base_schema": """
486CREATE TABLE IF NOT EXISTS topics (
487 id BIGSERIAL PRIMARY KEY,
488 name TEXT NOT NULL,
489 type TEXT NOT NULL,
490 md5 TEXT NOT NULL,
491 table_name TEXT NOT NULL,
492 view_name TEXT NOT NULL
493);
494
495CREATE TABLE IF NOT EXISTS types (
496 id BIGSERIAL PRIMARY KEY,
497 type TEXT NOT NULL,
498 definition TEXT NOT NULL,
499 md5 TEXT NOT NULL,
500 table_name TEXT NOT NULL,
501 nested_tables JSON
502);""",
503 },
504
505 "clickhouse": {
506 "table_template": "CREATE TABLE IF NOT EXISTS {table} ({cols}) ENGINE = ENGINE;",
507 "types": {
508 "int8": "Int8", "int16": "Int16", "int32": "Int32",
509 "uint8": "UInt8", "uint16": "UInt16", "uint32": "UInt32",
510 "int64": "Int64", "uint64": "UInt64", "bool": "UInt8",
511 "float32": "Float32", "float64": "Float64", "uint8[]": "String",
512 "string": "String", "wstring": "String",
513 },
514 "defaulttype": "String",
515 "arraytype_template": "Array({type})",
516 },
517 }
518
519
520
522 KEYWORDS = [
523 "A", "ABORT", "ABS", "ABSOLUTE", "ACCESS", "ACTION", "ADA", "ADD", "ADMIN", "AFTER",
524 "AGGREGATE", "ALIAS", "ALL", "ALLOCATE", "ALSO", "ALTER", "ALWAYS", "ANALYSE", "ANALYZE",
525 "AND", "ANY", "ARE", "ARRAY", "AS", "ASC", "ASENSITIVE", "ASSERTION", "ASSIGNMENT",
526 "ASYMMETRIC", "AT", "ATOMIC", "ATTACH", "ATTRIBUTE", "ATTRIBUTES", "AUDIT",
527 "AUTHORIZATION", "AUTOINCREMENT", "AUTO_INCREMENT", "AVG", "AVG_ROW_LENGTH", "BACKUP",
528 "BACKWARD", "BEFORE", "BEGIN", "BERNOULLI", "BETWEEN", "BIGINT", "BINARY", "BIT", "BITVAR",
529 "BIT_LENGTH", "BLOB", "BOOL", "BOOLEAN", "BOTH", "BREADTH", "BREAK", "BROWSE", "BULK",
530 "BY", "C", "CACHE", "CALL", "CALLED", "CARDINALITY", "CASCADE", "CASCADED", "CASE", "CAST",
531 "CATALOG", "CATALOG_NAME", "CEIL", "CEILING", "CHAIN", "CHANGE", "CHAR", "CHARACTER",
532 "CHARACTERISTICS", "CHARACTERS", "CHARACTER_LENGTH", "CHARACTER_SET_CATALOG",
533 "CHARACTER_SET_NAME", "CHARACTER_SET_SCHEMA", "CHAR_LENGTH", "CHECK", "CHECKED",
534 "CHECKPOINT", "CHECKSUM", "CLASS", "CLASS_ORIGIN", "CLOB", "CLOSE", "CLUSTER", "CLUSTERED",
535 "COALESCE", "COBOL", "COLLATE", "COLLATION", "COLLATION_CATALOG", "COLLATION_NAME",
536 "COLLATION_SCHEMA", "COLLECT", "COLUMN", "COLUMNS", "COLUMN_NAME", "COMMAND_FUNCTION",
537 "COMMAND_FUNCTION_CODE", "COMMENT", "COMMIT", "COMMITTED", "COMPLETION", "COMPRESS",
538 "COMPUTE", "CONDITION", "CONDITION_NUMBER", "CONNECT", "CONNECTION", "CONNECTION_NAME",
539 "CONSTRAINT", "CONSTRAINTS", "CONSTRAINT_CATALOG", "CONSTRAINT_NAME", "CONSTRAINT_SCHEMA",
540 "CONSTRUCTOR", "CONTAINS", "CONTAINSTABLE", "CONTINUE", "CONVERSION", "CONVERT", "COPY",
541 "CORR", "CORRESPONDING", "COUNT", "COVAR_POP", "COVAR_SAMP", "CREATE", "CREATEDB",
542 "CREATEROLE", "CREATEUSER", "CROSS", "CSV", "CUBE", "CUME_DIST", "CURRENT", "CURRENT_DATE",
543 "CURRENT_DEFAULT_TRANSFORM_GROUP", "CURRENT_PATH", "CURRENT_ROLE", "CURRENT_TIME",
544 "CURRENT_TIMESTAMP", "CURRENT_TRANSFORM_GROUP_FOR_TYPE", "CURRENT_USER", "CURSOR",
545 "CURSOR_NAME", "CYCLE", "DATA", "DATABASE", "DATABASES", "DATE", "DATETIME",
546 "DATETIME_INTERVAL_CODE", "DATETIME_INTERVAL_PRECISION", "DAY", "DAYOFMONTH", "DAYOFWEEK",
547 "DAYOFYEAR", "DAY_HOUR", "DAY_MICROSECOND", "DAY_MINUTE", "DAY_SECOND", "DBCC",
548 "DEALLOCATE", "DEC", "DECIMAL", "DECLARE", "DEFAULT", "DEFAULTS", "DEFERRABLE", "DEFERRED",
549 "DEFINED", "DEFINER", "DEGREE", "DELAYED", "DELAY_KEY_WRITE", "DELETE", "DELIMITER",
550 "DELIMITERS", "DENSE_RANK", "DENY", "DEPTH", "DEREF", "DERIVED", "DESC", "DESCRIBE",
551 "DESCRIPTOR", "DESTROY", "DESTRUCTOR", "DETACH", "DETERMINISTIC", "DIAGNOSTICS",
552 "DICTIONARY", "DISABLE", "DISCONNECT", "DISK", "DISPATCH", "DISTINCT", "DISTINCTROW",
553 "DISTRIBUTED", "DIV", "DO", "DOMAIN", "DOUBLE", "DROP", "DUAL", "DUMMY", "DUMP", "DYNAMIC",
554 "DYNAMIC_FUNCTION", "DYNAMIC_FUNCTION_CODE", "EACH", "ELEMENT", "ELSE", "ELSEIF", "ENABLE",
555 "ENCLOSED", "ENCODING", "ENCRYPTED", "END", "END-EXEC", "ENUM", "EQUALS", "ERRLVL",
556 "ESCAPE", "ESCAPED", "EVERY", "EXCEPT", "EXCEPTION", "EXCLUDE", "EXCLUDING", "EXCLUSIVE",
557 "EXEC", "EXECUTE", "EXISTING", "EXISTS", "EXIT", "EXP", "EXPLAIN", "EXTERNAL", "EXTRACT",
558 "FALSE", "FETCH", "FIELDS", "FILE", "FILLFACTOR", "FILTER", "FINAL", "FIRST", "FLOAT",
559 "FLOAT4", "FLOAT8", "FLOOR", "FLUSH", "FOLLOWING", "FOR", "FORCE", "FOREIGN", "FORTRAN",
560 "FORWARD", "FOUND", "FREE", "FREETEXT", "FREETEXTTABLE", "FREEZE", "FROM", "FULL",
561 "FULLTEXT", "FUNCTION", "FUSION", "G", "GENERAL", "GENERATED", "GET", "GLOBAL", "GO",
562 "GOTO", "GRANT", "GRANTED", "GRANTS", "GREATEST", "GROUP", "GROUPING", "HANDLER", "HAVING",
563 "HEADER", "HEAP", "HIERARCHY", "HIGH_PRIORITY", "HOLD", "HOLDLOCK", "HOST", "HOSTS",
564 "HOUR", "HOUR_MICROSECOND", "HOUR_MINUTE", "HOUR_SECOND", "IDENTIFIED", "IDENTITY",
565 "IDENTITYCOL", "IDENTITY_INSERT", "IF", "IGNORE", "ILIKE", "IMMEDIATE", "IMMUTABLE",
566 "IMPLEMENTATION", "IMPLICIT", "IN", "INCLUDE", "INCLUDING", "INCREMENT", "INDEX",
567 "INDICATOR", "INFILE", "INFIX", "INHERIT", "INHERITS", "INITIAL", "INITIALIZE",
568 "INITIALLY", "INNER", "INOUT", "INPUT", "INSENSITIVE", "INSERT", "INSERT_ID", "INSTANCE",
569 "INSTANTIABLE", "INSTEAD", "INT", "INT1", "INT2", "INT3", "INT4", "INT8", "INTEGER",
570 "INTERSECT", "INTERSECTION", "INTERVAL", "INTO", "INVOKER", "IS", "ISAM", "ISNULL",
571 "ISOLATION", "ITERATE", "JOIN", "K", "KEY", "KEYS", "KEY_MEMBER", "KEY_TYPE", "KILL",
572 "LANCOMPILER", "LANGUAGE", "LARGE", "LAST", "LAST_INSERT_ID", "LATERAL", "LEAD", "LEADING",
573 "LEAST", "LEAVE", "LEFT", "LENGTH", "LESS", "LEVEL", "LIKE", "LIMIT", "LINENO", "LINES",
574 "LISTEN", "LN", "LOAD", "LOCAL", "LOCALTIME", "LOCALTIMESTAMP", "LOCATION", "LOCATOR",
575 "LOCK", "LOGIN", "LOGS", "LONG", "LONGBLOB", "LONGTEXT", "LOOP", "LOWER", "LOW_PRIORITY",
576 "M", "MAP", "MATCH", "MATCHED", "MAX", "MAXEXTENTS", "MAXVALUE", "MAX_ROWS", "MEDIUMBLOB",
577 "MEDIUMINT", "MEDIUMTEXT", "MEMBER", "MERGE", "MESSAGE_LENGTH", "MESSAGE_OCTET_LENGTH",
578 "MESSAGE_TEXT", "METHOD", "MIDDLEINT", "MIN", "MINUS", "MINUTE", "MINUTE_MICROSECOND",
579 "MINUTE_SECOND", "MINVALUE", "MIN_ROWS", "MLSLABEL", "MOD", "MODE", "MODIFIES", "MODIFY",
580 "MODULE", "MONTH", "MONTHNAME", "MORE", "MOVE", "MULTISET", "MUMPS", "MYISAM", "NAME",
581 "NAMES", "NATIONAL", "NATURAL", "NCHAR", "NCLOB", "NESTING", "NEW", "NEXT", "NO",
582 "NOAUDIT", "NOCHECK", "NOCOMPRESS", "NOCREATEDB", "NOCREATEROLE", "NOCREATEUSER",
583 "NOINHERIT", "NOLOGIN", "NONCLUSTERED", "NONE", "NORMALIZE", "NORMALIZED", "NOSUPERUSER",
584 "NOT", "NOTHING", "NOTIFY", "NOTNULL", "NOWAIT", "NO_WRITE_TO_BINLOG", "NULL", "NULLABLE",
585 "NULLIF", "NULLS", "NUMBER", "NUMERIC", "OBJECT", "OCTETS", "OCTET_LENGTH", "OF", "OFF",
586 "OFFLINE", "OFFSET", "OFFSETS", "OIDS", "OLD", "ON", "ONLINE", "ONLY", "OPEN",
587 "OPENDATASOURCE", "OPENQUERY", "OPENROWSET", "OPENXML", "OPERATION", "OPERATOR",
588 "OPTIMIZE", "OPTION", "OPTIONALLY", "OPTIONS", "OR", "ORDER", "ORDERING", "ORDINALITY",
589 "OTHERS", "OUT", "OUTER", "OUTFILE", "OUTPUT", "OVER", "OVERLAPS", "OVERLAY", "OVERRIDING",
590 "OWNER", "PACK_KEYS", "PAD", "PARAMETER", "PARAMETERS", "PARAMETER_MODE", "PARAMETER_NAME",
591 "PARAMETER_ORDINAL_POSITION", "PARAMETER_SPECIFIC_CATALOG", "PARAMETER_SPECIFIC_NAME",
592 "PARAMETER_SPECIFIC_SCHEMA", "PARTIAL", "PARTITION", "PASCAL", "PASSWORD", "PATH",
593 "PCTFREE", "PERCENT", "PERCENTILE_CONT", "PERCENTILE_DISC", "PERCENT_RANK", "PLACING",
594 "PLAN", "PLI", "POSITION", "POSTFIX", "POWER", "PRAGMA", "PRECEDING", "PRECISION",
595 "PREFIX", "PREORDER", "PREPARE", "PREPARED", "PRESERVE", "PRIMARY", "PRINT", "PRIOR",
596 "PRIVILEGES", "PROC", "PROCEDURAL", "PROCEDURE", "PROCESS", "PROCESSLIST", "PUBLIC",
597 "PURGE", "QUOTE", "RAID0", "RAISE", "RAISERROR", "RANGE", "RANK", "RAW", "READ", "READS",
598 "READTEXT", "REAL", "RECHECK", "RECONFIGURE", "RECURSIVE", "REF", "REFERENCES",
599 "REFERENCING", "REGEXP", "REGR_AVGX", "REGR_AVGY", "REGR_COUNT", "REGR_INTERCEPT",
600 "REGR_R2", "REGR_SLOPE", "REGR_SXX", "REGR_SXY", "REGR_SYY", "REINDEX", "RELATIVE",
601 "RELEASE", "RELOAD", "RENAME", "REPEAT", "REPEATABLE", "REPLACE", "REPLICATION", "REQUIRE",
602 "RESET", "RESIGNAL", "RESOURCE", "RESTART", "RESTORE", "RESTRICT", "RESULT", "RETURN",
603 "RETURNED_CARDINALITY", "RETURNED_LENGTH", "RETURNED_OCTET_LENGTH", "RETURNED_SQLSTATE",
604 "RETURNS", "REVOKE", "RIGHT", "RLIKE", "ROLE", "ROLLBACK", "ROLLUP", "ROUTINE",
605 "ROUTINE_CATALOG", "ROUTINE_NAME", "ROUTINE_SCHEMA", "ROW", "ROWCOUNT", "ROWGUIDCOL",
606 "ROWID", "ROWNUM", "ROWS", "ROW_COUNT", "ROW_NUMBER", "RULE", "SAVE", "SAVEPOINT", "SCALE",
607 "SCHEMA", "SCHEMAS", "SCHEMA_NAME", "SCOPE", "SCOPE_CATALOG", "SCOPE_NAME", "SCOPE_SCHEMA",
608 "SCROLL", "SEARCH", "SECOND", "SECOND_MICROSECOND", "SECTION", "SECURITY", "SELECT",
609 "SELF", "SENSITIVE", "SEPARATOR", "SEQUENCE", "SERIALIZABLE", "SERVER_NAME", "SESSION",
610 "SESSION_USER", "SET", "SETOF", "SETS", "SETUSER", "SHARE", "SHOW", "SHUTDOWN", "SIGNAL",
611 "SIMILAR", "SIMPLE", "SIZE", "SMALLINT", "SOME", "SONAME", "SOURCE", "SPACE", "SPATIAL",
612 "SPECIFIC", "SPECIFICTYPE", "SPECIFIC_NAME", "SQL", "SQLCA", "SQLCODE", "SQLERROR",
613 "SQLEXCEPTION", "SQLSTATE", "SQLWARNING", "SQL_BIG_RESULT", "SQL_BIG_SELECTS",
614 "SQL_BIG_TABLES", "SQL_CALC_FOUND_ROWS", "SQL_LOG_OFF", "SQL_LOG_UPDATE",
615 "SQL_LOW_PRIORITY_UPDATES", "SQL_SELECT_LIMIT", "SQL_SMALL_RESULT", "SQL_WARNINGS", "SQRT",
616 "SSL", "STABLE", "START", "STARTING", "STATE", "STATEMENT", "STATIC", "STATISTICS",
617 "STATUS", "STDDEV_POP", "STDDEV_SAMP", "STDIN", "STDOUT", "STORAGE", "STRAIGHT_JOIN",
618 "STRICT", "STRING", "STRUCTURE", "STYLE", "SUBCLASS_ORIGIN", "SUBLIST", "SUBMULTISET",
619 "SUBSTRING", "SUCCESSFUL", "SUM", "SUPERUSER", "SYMMETRIC", "SYNONYM", "SYSDATE", "SYSID",
620 "SYSTEM", "SYSTEM_USER", "TABLE", "TABLES", "TABLESAMPLE", "TABLESPACE", "TABLE_NAME",
621 "TEMP", "TEMPLATE", "TEMPORARY", "TERMINATE", "TERMINATED", "TEXT", "TEXTSIZE", "THAN",
622 "THEN", "TIES", "TIME", "TIMESTAMP", "TIMEZONE_HOUR", "TIMEZONE_MINUTE", "TINYBLOB",
623 "TINYINT", "TINYTEXT", "TO", "TOAST", "TOP", "TOP_LEVEL_COUNT", "TRAILING", "TRAN",
624 "TRANSACTION", "TRANSACTIONS_COMMITTED", "TRANSACTIONS_ROLLED_BACK", "TRANSACTION_ACTIVE",
625 "TRANSFORM", "TRANSFORMS", "TRANSLATE", "TRANSLATION", "TREAT", "TRIGGER",
626 "TRIGGER_CATALOG", "TRIGGER_NAME", "TRIGGER_SCHEMA", "TRIM", "TRUE", "TRUNCATE", "TRUSTED",
627 "TSEQUAL", "TYPE", "UESCAPE", "UID", "UNBOUNDED", "UNCOMMITTED", "UNDER", "UNDO",
628 "UNENCRYPTED", "UNION", "UNIQUE", "UNKNOWN", "UNLISTEN", "UNLOCK", "UNNAMED", "UNNEST",
629 "UNSIGNED", "UNTIL", "UPDATE", "UPDATETEXT", "UPPER", "USAGE", "USE", "USER",
630 "USER_DEFINED_TYPE_CATALOG", "USER_DEFINED_TYPE_CODE", "USER_DEFINED_TYPE_NAME",
631 "USER_DEFINED_TYPE_SCHEMA", "USING", "UTC_DATE", "UTC_TIME", "UTC_TIMESTAMP", "VACUUM",
632 "VALID", "VALIDATE", "VALIDATOR", "VALUE", "VALUES", "VARBINARY", "VARCHAR", "VARCHAR2",
633 "VARCHARACTER", "VARIABLE", "VARIABLES", "VARYING", "VAR_POP", "VAR_SAMP", "VERBOSE",
634 "VIEW", "VOLATILE", "WAITFOR", "WHEN", "WHENEVER", "WHERE", "WHILE", "WIDTH_BUCKET",
635 "WINDOW", "WITH", "WITHIN", "WITHOUT", "WORK", "WRITE", "WRITETEXT", "X509", "XOR", "YEAR",
636 "YEAR_MONTH", "ZEROFILL", "ZONE",
637 ]
638
639
640def quote(name, force=False):
641 """
642 Returns name in quotes and proper-escaped for SQL queries.
643
644 @param force quote even if name does not need quoting (starts with a letter,
645 contains only alphanumerics, and is not a reserved keyword)
646 """
647 result = name
648 if force or result.upper() in SqlMixin.KEYWORDS \
649 or re.search(r"(^[\W\d])|(?=\W)", result, re.U):
650 result = '"%s"' % result.replace('"', '""')
651 return result
652
653
654__all__ = ["SqlMixin", "quote"]
__init__(self, args=None, **kwargs)
Definition sqlbase.py:40
dict DIALECTS
Supported SQL dialects and options.
Definition sqlbase.py:386
str DEFAULT_DIALECT
Default SQL dialect used if dialect not specified.
Definition sqlbase.py:34