You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1101 lines
32KB

  1. # sybase/base.py
  2. # Copyright (C) 2010-2021 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. # get_select_precolumns(), limit_clause() implementation
  5. # copyright (C) 2007 Fisch Asset Management
  6. # AG http://www.fam.ch, with coding by Alexander Houben
  7. # alexander.houben@thor-solutions.ch
  8. #
  9. # This module is part of SQLAlchemy and is released under
  10. # the MIT License: http://www.opensource.org/licenses/mit-license.php
  11. """
  12. .. dialect:: sybase
  13. :name: Sybase
  14. .. note::
  15. The Sybase dialect within SQLAlchemy **is not currently supported**.
  16. It is not tested within continuous integration and is likely to have
  17. many issues and caveats not currently handled. Consider using the
  18. `external dialect <https://github.com/gordthompson/sqlalchemy-sybase>`_
  19. instead.
  20. .. deprecated:: 1.4 The internal Sybase dialect is deprecated and will be
  21. removed in a future version. Use the external dialect.
  22. """
  23. import re
  24. from sqlalchemy import exc
  25. from sqlalchemy import schema as sa_schema
  26. from sqlalchemy import types as sqltypes
  27. from sqlalchemy import util
  28. from sqlalchemy.engine import default
  29. from sqlalchemy.engine import reflection
  30. from sqlalchemy.sql import compiler
  31. from sqlalchemy.sql import text
  32. from sqlalchemy.types import BIGINT
  33. from sqlalchemy.types import BINARY
  34. from sqlalchemy.types import CHAR
  35. from sqlalchemy.types import DATE
  36. from sqlalchemy.types import DATETIME
  37. from sqlalchemy.types import DECIMAL
  38. from sqlalchemy.types import FLOAT
  39. from sqlalchemy.types import INT # noqa
  40. from sqlalchemy.types import INTEGER
  41. from sqlalchemy.types import NCHAR
  42. from sqlalchemy.types import NUMERIC
  43. from sqlalchemy.types import NVARCHAR
  44. from sqlalchemy.types import REAL
  45. from sqlalchemy.types import SMALLINT
  46. from sqlalchemy.types import TEXT
  47. from sqlalchemy.types import TIME
  48. from sqlalchemy.types import TIMESTAMP
  49. from sqlalchemy.types import Unicode
  50. from sqlalchemy.types import VARBINARY
  51. from sqlalchemy.types import VARCHAR
  52. RESERVED_WORDS = set(
  53. [
  54. "add",
  55. "all",
  56. "alter",
  57. "and",
  58. "any",
  59. "as",
  60. "asc",
  61. "backup",
  62. "begin",
  63. "between",
  64. "bigint",
  65. "binary",
  66. "bit",
  67. "bottom",
  68. "break",
  69. "by",
  70. "call",
  71. "capability",
  72. "cascade",
  73. "case",
  74. "cast",
  75. "char",
  76. "char_convert",
  77. "character",
  78. "check",
  79. "checkpoint",
  80. "close",
  81. "comment",
  82. "commit",
  83. "connect",
  84. "constraint",
  85. "contains",
  86. "continue",
  87. "convert",
  88. "create",
  89. "cross",
  90. "cube",
  91. "current",
  92. "current_timestamp",
  93. "current_user",
  94. "cursor",
  95. "date",
  96. "dbspace",
  97. "deallocate",
  98. "dec",
  99. "decimal",
  100. "declare",
  101. "default",
  102. "delete",
  103. "deleting",
  104. "desc",
  105. "distinct",
  106. "do",
  107. "double",
  108. "drop",
  109. "dynamic",
  110. "else",
  111. "elseif",
  112. "encrypted",
  113. "end",
  114. "endif",
  115. "escape",
  116. "except",
  117. "exception",
  118. "exec",
  119. "execute",
  120. "existing",
  121. "exists",
  122. "externlogin",
  123. "fetch",
  124. "first",
  125. "float",
  126. "for",
  127. "force",
  128. "foreign",
  129. "forward",
  130. "from",
  131. "full",
  132. "goto",
  133. "grant",
  134. "group",
  135. "having",
  136. "holdlock",
  137. "identified",
  138. "if",
  139. "in",
  140. "index",
  141. "index_lparen",
  142. "inner",
  143. "inout",
  144. "insensitive",
  145. "insert",
  146. "inserting",
  147. "install",
  148. "instead",
  149. "int",
  150. "integer",
  151. "integrated",
  152. "intersect",
  153. "into",
  154. "iq",
  155. "is",
  156. "isolation",
  157. "join",
  158. "key",
  159. "lateral",
  160. "left",
  161. "like",
  162. "lock",
  163. "login",
  164. "long",
  165. "match",
  166. "membership",
  167. "message",
  168. "mode",
  169. "modify",
  170. "natural",
  171. "new",
  172. "no",
  173. "noholdlock",
  174. "not",
  175. "notify",
  176. "null",
  177. "numeric",
  178. "of",
  179. "off",
  180. "on",
  181. "open",
  182. "option",
  183. "options",
  184. "or",
  185. "order",
  186. "others",
  187. "out",
  188. "outer",
  189. "over",
  190. "passthrough",
  191. "precision",
  192. "prepare",
  193. "primary",
  194. "print",
  195. "privileges",
  196. "proc",
  197. "procedure",
  198. "publication",
  199. "raiserror",
  200. "readtext",
  201. "real",
  202. "reference",
  203. "references",
  204. "release",
  205. "remote",
  206. "remove",
  207. "rename",
  208. "reorganize",
  209. "resource",
  210. "restore",
  211. "restrict",
  212. "return",
  213. "revoke",
  214. "right",
  215. "rollback",
  216. "rollup",
  217. "save",
  218. "savepoint",
  219. "scroll",
  220. "select",
  221. "sensitive",
  222. "session",
  223. "set",
  224. "setuser",
  225. "share",
  226. "smallint",
  227. "some",
  228. "sqlcode",
  229. "sqlstate",
  230. "start",
  231. "stop",
  232. "subtrans",
  233. "subtransaction",
  234. "synchronize",
  235. "syntax_error",
  236. "table",
  237. "temporary",
  238. "then",
  239. "time",
  240. "timestamp",
  241. "tinyint",
  242. "to",
  243. "top",
  244. "tran",
  245. "trigger",
  246. "truncate",
  247. "tsequal",
  248. "unbounded",
  249. "union",
  250. "unique",
  251. "unknown",
  252. "unsigned",
  253. "update",
  254. "updating",
  255. "user",
  256. "using",
  257. "validate",
  258. "values",
  259. "varbinary",
  260. "varchar",
  261. "variable",
  262. "varying",
  263. "view",
  264. "wait",
  265. "waitfor",
  266. "when",
  267. "where",
  268. "while",
  269. "window",
  270. "with",
  271. "with_cube",
  272. "with_lparen",
  273. "with_rollup",
  274. "within",
  275. "work",
  276. "writetext",
  277. ]
  278. )
  279. class _SybaseUnitypeMixin(object):
  280. """these types appear to return a buffer object."""
  281. def result_processor(self, dialect, coltype):
  282. def process(value):
  283. if value is not None:
  284. return str(value) # decode("ucs-2")
  285. else:
  286. return None
  287. return process
  288. class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
  289. __visit_name__ = "UNICHAR"
  290. class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
  291. __visit_name__ = "UNIVARCHAR"
  292. class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText):
  293. __visit_name__ = "UNITEXT"
  294. class TINYINT(sqltypes.Integer):
  295. __visit_name__ = "TINYINT"
  296. class BIT(sqltypes.TypeEngine):
  297. __visit_name__ = "BIT"
  298. class MONEY(sqltypes.TypeEngine):
  299. __visit_name__ = "MONEY"
  300. class SMALLMONEY(sqltypes.TypeEngine):
  301. __visit_name__ = "SMALLMONEY"
  302. class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
  303. __visit_name__ = "UNIQUEIDENTIFIER"
  304. class IMAGE(sqltypes.LargeBinary):
  305. __visit_name__ = "IMAGE"
  306. class SybaseTypeCompiler(compiler.GenericTypeCompiler):
  307. def visit_large_binary(self, type_, **kw):
  308. return self.visit_IMAGE(type_)
  309. def visit_boolean(self, type_, **kw):
  310. return self.visit_BIT(type_)
  311. def visit_unicode(self, type_, **kw):
  312. return self.visit_NVARCHAR(type_)
  313. def visit_UNICHAR(self, type_, **kw):
  314. return "UNICHAR(%d)" % type_.length
  315. def visit_UNIVARCHAR(self, type_, **kw):
  316. return "UNIVARCHAR(%d)" % type_.length
  317. def visit_UNITEXT(self, type_, **kw):
  318. return "UNITEXT"
  319. def visit_TINYINT(self, type_, **kw):
  320. return "TINYINT"
  321. def visit_IMAGE(self, type_, **kw):
  322. return "IMAGE"
  323. def visit_BIT(self, type_, **kw):
  324. return "BIT"
  325. def visit_MONEY(self, type_, **kw):
  326. return "MONEY"
  327. def visit_SMALLMONEY(self, type_, **kw):
  328. return "SMALLMONEY"
  329. def visit_UNIQUEIDENTIFIER(self, type_, **kw):
  330. return "UNIQUEIDENTIFIER"
  331. ischema_names = {
  332. "bigint": BIGINT,
  333. "int": INTEGER,
  334. "integer": INTEGER,
  335. "smallint": SMALLINT,
  336. "tinyint": TINYINT,
  337. "unsigned bigint": BIGINT, # TODO: unsigned flags
  338. "unsigned int": INTEGER, # TODO: unsigned flags
  339. "unsigned smallint": SMALLINT, # TODO: unsigned flags
  340. "numeric": NUMERIC,
  341. "decimal": DECIMAL,
  342. "dec": DECIMAL,
  343. "float": FLOAT,
  344. "double": NUMERIC, # TODO
  345. "double precision": NUMERIC, # TODO
  346. "real": REAL,
  347. "smallmoney": SMALLMONEY,
  348. "money": MONEY,
  349. "smalldatetime": DATETIME,
  350. "datetime": DATETIME,
  351. "date": DATE,
  352. "time": TIME,
  353. "char": CHAR,
  354. "character": CHAR,
  355. "varchar": VARCHAR,
  356. "character varying": VARCHAR,
  357. "char varying": VARCHAR,
  358. "unichar": UNICHAR,
  359. "unicode character": UNIVARCHAR,
  360. "nchar": NCHAR,
  361. "national char": NCHAR,
  362. "national character": NCHAR,
  363. "nvarchar": NVARCHAR,
  364. "nchar varying": NVARCHAR,
  365. "national char varying": NVARCHAR,
  366. "national character varying": NVARCHAR,
  367. "text": TEXT,
  368. "unitext": UNITEXT,
  369. "binary": BINARY,
  370. "varbinary": VARBINARY,
  371. "image": IMAGE,
  372. "bit": BIT,
  373. # not in documentation for ASE 15.7
  374. "long varchar": TEXT, # TODO
  375. "timestamp": TIMESTAMP,
  376. "uniqueidentifier": UNIQUEIDENTIFIER,
  377. }
  378. class SybaseInspector(reflection.Inspector):
  379. def __init__(self, conn):
  380. reflection.Inspector.__init__(self, conn)
  381. def get_table_id(self, table_name, schema=None):
  382. """Return the table id from `table_name` and `schema`."""
  383. return self.dialect.get_table_id(
  384. self.bind, table_name, schema, info_cache=self.info_cache
  385. )
  386. class SybaseExecutionContext(default.DefaultExecutionContext):
  387. _enable_identity_insert = False
  388. def set_ddl_autocommit(self, connection, value):
  389. """Must be implemented by subclasses to accommodate DDL executions.
  390. "connection" is the raw unwrapped DBAPI connection. "value"
  391. is True or False. when True, the connection should be configured
  392. such that a DDL can take place subsequently. when False,
  393. a DDL has taken place and the connection should be resumed
  394. into non-autocommit mode.
  395. """
  396. raise NotImplementedError()
  397. def pre_exec(self):
  398. if self.isinsert:
  399. tbl = self.compiled.statement.table
  400. seq_column = tbl._autoincrement_column
  401. insert_has_sequence = seq_column is not None
  402. if insert_has_sequence:
  403. self._enable_identity_insert = (
  404. seq_column.key in self.compiled_parameters[0]
  405. )
  406. else:
  407. self._enable_identity_insert = False
  408. if self._enable_identity_insert:
  409. self.cursor.execute(
  410. "SET IDENTITY_INSERT %s ON"
  411. % self.dialect.identifier_preparer.format_table(tbl)
  412. )
  413. if self.isddl:
  414. # TODO: to enhance this, we can detect "ddl in tran" on the
  415. # database settings. this error message should be improved to
  416. # include a note about that.
  417. if not self.should_autocommit:
  418. raise exc.InvalidRequestError(
  419. "The Sybase dialect only supports "
  420. "DDL in 'autocommit' mode at this time."
  421. )
  422. self.root_connection.engine.logger.info(
  423. "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')"
  424. )
  425. self.set_ddl_autocommit(
  426. self.root_connection.connection.connection, True
  427. )
  428. def post_exec(self):
  429. if self.isddl:
  430. self.set_ddl_autocommit(self.root_connection, False)
  431. if self._enable_identity_insert:
  432. self.cursor.execute(
  433. "SET IDENTITY_INSERT %s OFF"
  434. % self.dialect.identifier_preparer.format_table(
  435. self.compiled.statement.table
  436. )
  437. )
  438. def get_lastrowid(self):
  439. cursor = self.create_cursor()
  440. cursor.execute("SELECT @@identity AS lastrowid")
  441. lastrowid = cursor.fetchone()[0]
  442. cursor.close()
  443. return lastrowid
  444. class SybaseSQLCompiler(compiler.SQLCompiler):
  445. ansi_bind_rules = True
  446. extract_map = util.update_copy(
  447. compiler.SQLCompiler.extract_map,
  448. {"doy": "dayofyear", "dow": "weekday", "milliseconds": "millisecond"},
  449. )
  450. def get_from_hint_text(self, table, text):
  451. return text
  452. def limit_clause(self, select, **kw):
  453. text = ""
  454. if select._limit_clause is not None:
  455. text += " ROWS LIMIT " + self.process(select._limit_clause, **kw)
  456. if select._offset_clause is not None:
  457. if select._limit_clause is None:
  458. text += " ROWS"
  459. text += " OFFSET " + self.process(select._offset_clause, **kw)
  460. return text
  461. def visit_extract(self, extract, **kw):
  462. field = self.extract_map.get(extract.field, extract.field)
  463. return 'DATEPART("%s", %s)' % (field, self.process(extract.expr, **kw))
  464. def visit_now_func(self, fn, **kw):
  465. return "GETDATE()"
  466. def for_update_clause(self, select):
  467. # "FOR UPDATE" is only allowed on "DECLARE CURSOR"
  468. # which SQLAlchemy doesn't use
  469. return ""
  470. def order_by_clause(self, select, **kw):
  471. kw["literal_binds"] = True
  472. order_by = self.process(select._order_by_clause, **kw)
  473. # SybaseSQL only allows ORDER BY in subqueries if there is a LIMIT
  474. if order_by and (not self.is_subquery() or select._limit):
  475. return " ORDER BY " + order_by
  476. else:
  477. return ""
  478. def delete_table_clause(self, delete_stmt, from_table, extra_froms):
  479. """If we have extra froms make sure we render any alias as hint."""
  480. ashint = False
  481. if extra_froms:
  482. ashint = True
  483. return from_table._compiler_dispatch(
  484. self, asfrom=True, iscrud=True, ashint=ashint
  485. )
  486. def delete_extra_from_clause(
  487. self, delete_stmt, from_table, extra_froms, from_hints, **kw
  488. ):
  489. """Render the DELETE .. FROM clause specific to Sybase."""
  490. kw["asfrom"] = True
  491. return "FROM " + ", ".join(
  492. t._compiler_dispatch(self, fromhints=from_hints, **kw)
  493. for t in [from_table] + extra_froms
  494. )
  495. class SybaseDDLCompiler(compiler.DDLCompiler):
  496. def get_column_specification(self, column, **kwargs):
  497. colspec = (
  498. self.preparer.format_column(column)
  499. + " "
  500. + self.dialect.type_compiler.process(
  501. column.type, type_expression=column
  502. )
  503. )
  504. if column.table is None:
  505. raise exc.CompileError(
  506. "The Sybase dialect requires Table-bound "
  507. "columns in order to generate DDL"
  508. )
  509. seq_col = column.table._autoincrement_column
  510. # install a IDENTITY Sequence if we have an implicit IDENTITY column
  511. if seq_col is column:
  512. sequence = (
  513. isinstance(column.default, sa_schema.Sequence)
  514. and column.default
  515. )
  516. if sequence:
  517. start, increment = sequence.start or 1, sequence.increment or 1
  518. else:
  519. start, increment = 1, 1
  520. if (start, increment) == (1, 1):
  521. colspec += " IDENTITY"
  522. else:
  523. # TODO: need correct syntax for this
  524. colspec += " IDENTITY(%s,%s)" % (start, increment)
  525. else:
  526. default = self.get_column_default_string(column)
  527. if default is not None:
  528. colspec += " DEFAULT " + default
  529. if column.nullable is not None:
  530. if not column.nullable or column.primary_key:
  531. colspec += " NOT NULL"
  532. else:
  533. colspec += " NULL"
  534. return colspec
  535. def visit_drop_index(self, drop):
  536. index = drop.element
  537. return "\nDROP INDEX %s.%s" % (
  538. self.preparer.quote_identifier(index.table.name),
  539. self._prepared_index_name(drop.element, include_schema=False),
  540. )
  541. class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
  542. reserved_words = RESERVED_WORDS
  543. class SybaseDialect(default.DefaultDialect):
  544. name = "sybase"
  545. supports_unicode_statements = False
  546. supports_sane_rowcount = False
  547. supports_sane_multi_rowcount = False
  548. supports_statement_cache = True
  549. supports_native_boolean = False
  550. supports_unicode_binds = False
  551. postfetch_lastrowid = True
  552. colspecs = {}
  553. ischema_names = ischema_names
  554. type_compiler = SybaseTypeCompiler
  555. statement_compiler = SybaseSQLCompiler
  556. ddl_compiler = SybaseDDLCompiler
  557. preparer = SybaseIdentifierPreparer
  558. inspector = SybaseInspector
  559. construct_arguments = []
  560. def __init__(self, *args, **kwargs):
  561. util.warn_deprecated(
  562. "The Sybase dialect is deprecated and will be removed "
  563. "in a future version. This dialect is superseded by the external "
  564. "dialect https://github.com/gordthompson/sqlalchemy-sybase.",
  565. version="1.4",
  566. )
  567. super(SybaseDialect, self).__init__(*args, **kwargs)
  568. def _get_default_schema_name(self, connection):
  569. return connection.scalar(
  570. text("SELECT user_name() as user_name").columns(username=Unicode)
  571. )
  572. def initialize(self, connection):
  573. super(SybaseDialect, self).initialize(connection)
  574. if (
  575. self.server_version_info is not None
  576. and self.server_version_info < (15,)
  577. ):
  578. self.max_identifier_length = 30
  579. else:
  580. self.max_identifier_length = 255
  581. def get_table_id(self, connection, table_name, schema=None, **kw):
  582. """Fetch the id for schema.table_name.
  583. Several reflection methods require the table id. The idea for using
  584. this method is that it can be fetched one time and cached for
  585. subsequent calls.
  586. """
  587. table_id = None
  588. if schema is None:
  589. schema = self.default_schema_name
  590. TABLEID_SQL = text(
  591. """
  592. SELECT o.id AS id
  593. FROM sysobjects o JOIN sysusers u ON o.uid=u.uid
  594. WHERE u.name = :schema_name
  595. AND o.name = :table_name
  596. AND o.type in ('U', 'V')
  597. """
  598. )
  599. if util.py2k:
  600. if isinstance(schema, unicode): # noqa
  601. schema = schema.encode("ascii")
  602. if isinstance(table_name, unicode): # noqa
  603. table_name = table_name.encode("ascii")
  604. result = connection.execute(
  605. TABLEID_SQL, schema_name=schema, table_name=table_name
  606. )
  607. table_id = result.scalar()
  608. if table_id is None:
  609. raise exc.NoSuchTableError(table_name)
  610. return table_id
  611. @reflection.cache
  612. def get_columns(self, connection, table_name, schema=None, **kw):
  613. table_id = self.get_table_id(
  614. connection, table_name, schema, info_cache=kw.get("info_cache")
  615. )
  616. COLUMN_SQL = text(
  617. """
  618. SELECT col.name AS name,
  619. t.name AS type,
  620. (col.status & 8) AS nullable,
  621. (col.status & 128) AS autoincrement,
  622. com.text AS 'default',
  623. col.prec AS precision,
  624. col.scale AS scale,
  625. col.length AS length
  626. FROM systypes t, syscolumns col LEFT OUTER JOIN syscomments com ON
  627. col.cdefault = com.id
  628. WHERE col.usertype = t.usertype
  629. AND col.id = :table_id
  630. ORDER BY col.colid
  631. """
  632. )
  633. results = connection.execute(COLUMN_SQL, table_id=table_id)
  634. columns = []
  635. for (
  636. name,
  637. type_,
  638. nullable,
  639. autoincrement,
  640. default_,
  641. precision,
  642. scale,
  643. length,
  644. ) in results:
  645. col_info = self._get_column_info(
  646. name,
  647. type_,
  648. bool(nullable),
  649. bool(autoincrement),
  650. default_,
  651. precision,
  652. scale,
  653. length,
  654. )
  655. columns.append(col_info)
  656. return columns
  657. def _get_column_info(
  658. self,
  659. name,
  660. type_,
  661. nullable,
  662. autoincrement,
  663. default,
  664. precision,
  665. scale,
  666. length,
  667. ):
  668. coltype = self.ischema_names.get(type_, None)
  669. kwargs = {}
  670. if coltype in (NUMERIC, DECIMAL):
  671. args = (precision, scale)
  672. elif coltype == FLOAT:
  673. args = (precision,)
  674. elif coltype in (CHAR, VARCHAR, UNICHAR, UNIVARCHAR, NCHAR, NVARCHAR):
  675. args = (length,)
  676. else:
  677. args = ()
  678. if coltype:
  679. coltype = coltype(*args, **kwargs)
  680. # is this necessary
  681. # if is_array:
  682. # coltype = ARRAY(coltype)
  683. else:
  684. util.warn(
  685. "Did not recognize type '%s' of column '%s'" % (type_, name)
  686. )
  687. coltype = sqltypes.NULLTYPE
  688. if default:
  689. default = default.replace("DEFAULT", "").strip()
  690. default = re.sub("^'(.*)'$", lambda m: m.group(1), default)
  691. else:
  692. default = None
  693. column_info = dict(
  694. name=name,
  695. type=coltype,
  696. nullable=nullable,
  697. default=default,
  698. autoincrement=autoincrement,
  699. )
  700. return column_info
  701. @reflection.cache
  702. def get_foreign_keys(self, connection, table_name, schema=None, **kw):
  703. table_id = self.get_table_id(
  704. connection, table_name, schema, info_cache=kw.get("info_cache")
  705. )
  706. table_cache = {}
  707. column_cache = {}
  708. foreign_keys = []
  709. table_cache[table_id] = {"name": table_name, "schema": schema}
  710. COLUMN_SQL = text(
  711. """
  712. SELECT c.colid AS id, c.name AS name
  713. FROM syscolumns c
  714. WHERE c.id = :table_id
  715. """
  716. )
  717. results = connection.execute(COLUMN_SQL, table_id=table_id)
  718. columns = {}
  719. for col in results:
  720. columns[col["id"]] = col["name"]
  721. column_cache[table_id] = columns
  722. REFCONSTRAINT_SQL = text(
  723. """
  724. SELECT o.name AS name, r.reftabid AS reftable_id,
  725. r.keycnt AS 'count',
  726. r.fokey1 AS fokey1, r.fokey2 AS fokey2, r.fokey3 AS fokey3,
  727. r.fokey4 AS fokey4, r.fokey5 AS fokey5, r.fokey6 AS fokey6,
  728. r.fokey7 AS fokey7, r.fokey1 AS fokey8, r.fokey9 AS fokey9,
  729. r.fokey10 AS fokey10, r.fokey11 AS fokey11, r.fokey12 AS fokey12,
  730. r.fokey13 AS fokey13, r.fokey14 AS fokey14, r.fokey15 AS fokey15,
  731. r.fokey16 AS fokey16,
  732. r.refkey1 AS refkey1, r.refkey2 AS refkey2, r.refkey3 AS refkey3,
  733. r.refkey4 AS refkey4, r.refkey5 AS refkey5, r.refkey6 AS refkey6,
  734. r.refkey7 AS refkey7, r.refkey1 AS refkey8, r.refkey9 AS refkey9,
  735. r.refkey10 AS refkey10, r.refkey11 AS refkey11,
  736. r.refkey12 AS refkey12, r.refkey13 AS refkey13,
  737. r.refkey14 AS refkey14, r.refkey15 AS refkey15,
  738. r.refkey16 AS refkey16
  739. FROM sysreferences r JOIN sysobjects o on r.tableid = o.id
  740. WHERE r.tableid = :table_id
  741. """
  742. )
  743. referential_constraints = connection.execute(
  744. REFCONSTRAINT_SQL, table_id=table_id
  745. ).fetchall()
  746. REFTABLE_SQL = text(
  747. """
  748. SELECT o.name AS name, u.name AS 'schema'
  749. FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
  750. WHERE o.id = :table_id
  751. """
  752. )
  753. for r in referential_constraints:
  754. reftable_id = r["reftable_id"]
  755. if reftable_id not in table_cache:
  756. c = connection.execute(REFTABLE_SQL, table_id=reftable_id)
  757. reftable = c.fetchone()
  758. c.close()
  759. table_info = {"name": reftable["name"], "schema": None}
  760. if (
  761. schema is not None
  762. or reftable["schema"] != self.default_schema_name
  763. ):
  764. table_info["schema"] = reftable["schema"]
  765. table_cache[reftable_id] = table_info
  766. results = connection.execute(COLUMN_SQL, table_id=reftable_id)
  767. reftable_columns = {}
  768. for col in results:
  769. reftable_columns[col["id"]] = col["name"]
  770. column_cache[reftable_id] = reftable_columns
  771. reftable = table_cache[reftable_id]
  772. reftable_columns = column_cache[reftable_id]
  773. constrained_columns = []
  774. referred_columns = []
  775. for i in range(1, r["count"] + 1):
  776. constrained_columns.append(columns[r["fokey%i" % i]])
  777. referred_columns.append(reftable_columns[r["refkey%i" % i]])
  778. fk_info = {
  779. "constrained_columns": constrained_columns,
  780. "referred_schema": reftable["schema"],
  781. "referred_table": reftable["name"],
  782. "referred_columns": referred_columns,
  783. "name": r["name"],
  784. }
  785. foreign_keys.append(fk_info)
  786. return foreign_keys
  787. @reflection.cache
  788. def get_indexes(self, connection, table_name, schema=None, **kw):
  789. table_id = self.get_table_id(
  790. connection, table_name, schema, info_cache=kw.get("info_cache")
  791. )
  792. INDEX_SQL = text(
  793. """
  794. SELECT object_name(i.id) AS table_name,
  795. i.keycnt AS 'count',
  796. i.name AS name,
  797. (i.status & 0x2) AS 'unique',
  798. index_col(object_name(i.id), i.indid, 1) AS col_1,
  799. index_col(object_name(i.id), i.indid, 2) AS col_2,
  800. index_col(object_name(i.id), i.indid, 3) AS col_3,
  801. index_col(object_name(i.id), i.indid, 4) AS col_4,
  802. index_col(object_name(i.id), i.indid, 5) AS col_5,
  803. index_col(object_name(i.id), i.indid, 6) AS col_6,
  804. index_col(object_name(i.id), i.indid, 7) AS col_7,
  805. index_col(object_name(i.id), i.indid, 8) AS col_8,
  806. index_col(object_name(i.id), i.indid, 9) AS col_9,
  807. index_col(object_name(i.id), i.indid, 10) AS col_10,
  808. index_col(object_name(i.id), i.indid, 11) AS col_11,
  809. index_col(object_name(i.id), i.indid, 12) AS col_12,
  810. index_col(object_name(i.id), i.indid, 13) AS col_13,
  811. index_col(object_name(i.id), i.indid, 14) AS col_14,
  812. index_col(object_name(i.id), i.indid, 15) AS col_15,
  813. index_col(object_name(i.id), i.indid, 16) AS col_16
  814. FROM sysindexes i, sysobjects o
  815. WHERE o.id = i.id
  816. AND o.id = :table_id
  817. AND (i.status & 2048) = 0
  818. AND i.indid BETWEEN 1 AND 254
  819. """
  820. )
  821. results = connection.execute(INDEX_SQL, table_id=table_id)
  822. indexes = []
  823. for r in results:
  824. column_names = []
  825. for i in range(1, r["count"]):
  826. column_names.append(r["col_%i" % (i,)])
  827. index_info = {
  828. "name": r["name"],
  829. "unique": bool(r["unique"]),
  830. "column_names": column_names,
  831. }
  832. indexes.append(index_info)
  833. return indexes
  834. @reflection.cache
  835. def get_pk_constraint(self, connection, table_name, schema=None, **kw):
  836. table_id = self.get_table_id(
  837. connection, table_name, schema, info_cache=kw.get("info_cache")
  838. )
  839. PK_SQL = text(
  840. """
  841. SELECT object_name(i.id) AS table_name,
  842. i.keycnt AS 'count',
  843. i.name AS name,
  844. index_col(object_name(i.id), i.indid, 1) AS pk_1,
  845. index_col(object_name(i.id), i.indid, 2) AS pk_2,
  846. index_col(object_name(i.id), i.indid, 3) AS pk_3,
  847. index_col(object_name(i.id), i.indid, 4) AS pk_4,
  848. index_col(object_name(i.id), i.indid, 5) AS pk_5,
  849. index_col(object_name(i.id), i.indid, 6) AS pk_6,
  850. index_col(object_name(i.id), i.indid, 7) AS pk_7,
  851. index_col(object_name(i.id), i.indid, 8) AS pk_8,
  852. index_col(object_name(i.id), i.indid, 9) AS pk_9,
  853. index_col(object_name(i.id), i.indid, 10) AS pk_10,
  854. index_col(object_name(i.id), i.indid, 11) AS pk_11,
  855. index_col(object_name(i.id), i.indid, 12) AS pk_12,
  856. index_col(object_name(i.id), i.indid, 13) AS pk_13,
  857. index_col(object_name(i.id), i.indid, 14) AS pk_14,
  858. index_col(object_name(i.id), i.indid, 15) AS pk_15,
  859. index_col(object_name(i.id), i.indid, 16) AS pk_16
  860. FROM sysindexes i, sysobjects o
  861. WHERE o.id = i.id
  862. AND o.id = :table_id
  863. AND (i.status & 2048) = 2048
  864. AND i.indid BETWEEN 1 AND 254
  865. """
  866. )
  867. results = connection.execute(PK_SQL, table_id=table_id)
  868. pks = results.fetchone()
  869. results.close()
  870. constrained_columns = []
  871. if pks:
  872. for i in range(1, pks["count"] + 1):
  873. constrained_columns.append(pks["pk_%i" % (i,)])
  874. return {
  875. "constrained_columns": constrained_columns,
  876. "name": pks["name"],
  877. }
  878. else:
  879. return {"constrained_columns": [], "name": None}
  880. @reflection.cache
  881. def get_schema_names(self, connection, **kw):
  882. SCHEMA_SQL = text("SELECT u.name AS name FROM sysusers u")
  883. schemas = connection.execute(SCHEMA_SQL)
  884. return [s["name"] for s in schemas]
  885. @reflection.cache
  886. def get_table_names(self, connection, schema=None, **kw):
  887. if schema is None:
  888. schema = self.default_schema_name
  889. TABLE_SQL = text(
  890. """
  891. SELECT o.name AS name
  892. FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
  893. WHERE u.name = :schema_name
  894. AND o.type = 'U'
  895. """
  896. )
  897. if util.py2k:
  898. if isinstance(schema, unicode): # noqa
  899. schema = schema.encode("ascii")
  900. tables = connection.execute(TABLE_SQL, schema_name=schema)
  901. return [t["name"] for t in tables]
  902. @reflection.cache
  903. def get_view_definition(self, connection, view_name, schema=None, **kw):
  904. if schema is None:
  905. schema = self.default_schema_name
  906. VIEW_DEF_SQL = text(
  907. """
  908. SELECT c.text
  909. FROM syscomments c JOIN sysobjects o ON c.id = o.id
  910. WHERE o.name = :view_name
  911. AND o.type = 'V'
  912. """
  913. )
  914. if util.py2k:
  915. if isinstance(view_name, unicode): # noqa
  916. view_name = view_name.encode("ascii")
  917. view = connection.execute(VIEW_DEF_SQL, view_name=view_name)
  918. return view.scalar()
  919. @reflection.cache
  920. def get_view_names(self, connection, schema=None, **kw):
  921. if schema is None:
  922. schema = self.default_schema_name
  923. VIEW_SQL = text(
  924. """
  925. SELECT o.name AS name
  926. FROM sysobjects o JOIN sysusers u ON o.uid = u.uid
  927. WHERE u.name = :schema_name
  928. AND o.type = 'V'
  929. """
  930. )
  931. if util.py2k:
  932. if isinstance(schema, unicode): # noqa
  933. schema = schema.encode("ascii")
  934. views = connection.execute(VIEW_SQL, schema_name=schema)
  935. return [v["name"] for v in views]
  936. def has_table(self, connection, table_name, schema=None):
  937. self._ensure_has_table_connection(connection)
  938. try:
  939. self.get_table_id(connection, table_name, schema)
  940. except exc.NoSuchTableError:
  941. return False
  942. else:
  943. return True