亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? maxdb.py

?? SQLAlchemy. 經典的Python ORM框架。學習必看。
?? PY
?? 第 1 頁 / 共 3 頁
字號:
    'integer': MaxInteger,    'long binary': MaxBlob,    'long unicode': MaxText,    'long': MaxText,    'long': MaxText,    'smallint': MaxSmallInteger,    'time': MaxTime,    'timestamp': MaxTimestamp,    'varchar': MaxString,    }class MaxDBExecutionContext(default.DefaultExecutionContext):    def post_exec(self):        # DB-API bug: if there were any functions as values,        # then do another select and pull CURRVAL from the        # autoincrement column's implicit sequence... ugh        if self.compiled.isinsert and not self.executemany:            table = self.compiled.statement.table            index, serial_col = _autoserial_column(table)            if serial_col and (not self.compiled._safeserial or                               not(self._last_inserted_ids) or                               self._last_inserted_ids[index] in (None, 0)):                if table.schema:                    sql = "SELECT %s.CURRVAL FROM DUAL" % (                        self.compiled.preparer.format_table(table))                else:                    sql = "SELECT CURRENT_SCHEMA.%s.CURRVAL FROM DUAL" % (                        self.compiled.preparer.format_table(table))                if self.connection.engine._should_log_info:                    self.connection.engine.logger.info(sql)                rs = self.cursor.execute(sql)                id = rs.fetchone()[0]                if self.connection.engine._should_log_debug:                    self.connection.engine.logger.debug([id])                if not self._last_inserted_ids:                    # This shouldn't ever be > 1?  Right?                    self._last_inserted_ids = \                      [None] * len(table.primary_key.columns)                self._last_inserted_ids[index] = id        super(MaxDBExecutionContext, self).post_exec()    def get_result_proxy(self):        if self.cursor.description is not None:            for column in self.cursor.description:                if column[1] in ('Long Binary', 'Long', 'Long Unicode'):                    return MaxDBResultProxy(self)        return engine_base.ResultProxy(self)class MaxDBCachedColumnRow(engine_base.RowProxy):    """A RowProxy that only runs result_processors once per column."""    def __init__(self, parent, row):        super(MaxDBCachedColumnRow, self).__init__(parent, row)        self.columns = {}        self._row = row        self._parent = parent    def _get_col(self, key):        if key not in self.columns:            self.columns[key] = self._parent._get_col(self._row, key)        return self.columns[key]    def __iter__(self):        for i in xrange(len(self._row)):            yield self._get_col(i)    def __repr__(self):        return repr(list(self))    def __eq__(self, other):        return ((other is self) or                (other == tuple([self._get_col(key)                                 for key in xrange(len(self._row))])))    def __getitem__(self, key):        if isinstance(key, slice):            indices = key.indices(len(self._row))            return tuple([self._get_col(i) for i in xrange(*indices)])        else:            return self._get_col(key)    def __getattr__(self, name):        try:            return self._get_col(name)        except KeyError:            raise AttributeError(name)class MaxDBResultProxy(engine_base.ResultProxy):    _process_row = MaxDBCachedColumnRowclass MaxDBDialect(default.DefaultDialect):    supports_alter = True    supports_unicode_statements = True    max_identifier_length = 32    supports_sane_rowcount = True    supports_sane_multi_rowcount = False    preexecute_sequences = True    # MaxDB-specific    datetimeformat = 'internal'    def __init__(self, _raise_known_sql_errors=False, **kw):        super(MaxDBDialect, self).__init__(**kw)        self._raise_known = _raise_known_sql_errors        if self.dbapi is None:            self.dbapi_type_map = {}        else:            self.dbapi_type_map = {                'Long Binary': MaxBlob(),                'Long byte_t': MaxBlob(),                'Long Unicode': MaxText(),                'Timestamp': MaxTimestamp(),                'Date': MaxDate(),                'Time': MaxTime(),                datetime.datetime: MaxTimestamp(),                datetime.date: MaxDate(),                datetime.time: MaxTime(),            }    def dbapi(cls):        from sapdb import dbapi as _dbapi        return _dbapi    dbapi = classmethod(dbapi)    def create_connect_args(self, url):        opts = url.translate_connect_args(username='user')        opts.update(url.query)        return [], opts    def type_descriptor(self, typeobj):        if isinstance(typeobj, type):            typeobj = typeobj()        if isinstance(typeobj, sqltypes.Unicode):            return typeobj.adapt(MaxUnicode)        else:            return sqltypes.adapt_type(typeobj, colspecs)    def create_execution_context(self, connection, **kw):        return MaxDBExecutionContext(self, connection, **kw)    def do_execute(self, cursor, statement, parameters, context=None):        res = cursor.execute(statement, parameters)        if isinstance(res, int) and context is not None:            context._rowcount = res    def do_release_savepoint(self, connection, name):        # Does MaxDB truly support RELEASE SAVEPOINT <id>?  All my attempts        # produce "SUBTRANS COMMIT/ROLLBACK not allowed without SUBTRANS        # BEGIN SQLSTATE: I7065"        # Note that ROLLBACK TO works fine.  In theory, a RELEASE should        # just free up some transactional resources early, before the overall        # COMMIT/ROLLBACK so omitting it should be relatively ok.        pass    def get_default_schema_name(self, connection):        try:            return self._default_schema_name        except AttributeError:            name = self.identifier_preparer._normalize_name(                connection.execute('SELECT CURRENT_SCHEMA FROM DUAL').scalar())            self._default_schema_name = name            return name    def has_table(self, connection, table_name, schema=None):        denormalize = self.identifier_preparer._denormalize_name        bind = [denormalize(table_name)]        if schema is None:            sql = ("SELECT tablename FROM TABLES "                   "WHERE TABLES.TABLENAME=? AND"                   "  TABLES.SCHEMANAME=CURRENT_SCHEMA ")        else:            sql = ("SELECT tablename FROM TABLES "                   "WHERE TABLES.TABLENAME = ? AND"                   "  TABLES.SCHEMANAME=? ")            bind.append(denormalize(schema))        rp = connection.execute(sql, bind)        found = bool(rp.fetchone())        rp.close()        return found    def table_names(self, connection, schema):        if schema is None:            sql = (" SELECT TABLENAME FROM TABLES WHERE "                   " SCHEMANAME=CURRENT_SCHEMA ")            rs = connection.execute(sql)        else:            sql = (" SELECT TABLENAME FROM TABLES WHERE "                   " SCHEMANAME=? ")            matchname = self.identifier_preparer._denormalize_name(schema)            rs = connection.execute(sql, matchname)        normalize = self.identifier_preparer._normalize_name        return [normalize(row[0]) for row in rs]    def reflecttable(self, connection, table, include_columns):        denormalize = self.identifier_preparer._denormalize_name        normalize = self.identifier_preparer._normalize_name        st = ('SELECT COLUMNNAME, MODE, DATATYPE, CODETYPE, LEN, DEC, '              '  NULLABLE, "DEFAULT", DEFAULTFUNCTION '              'FROM COLUMNS '              'WHERE TABLENAME=? AND SCHEMANAME=%s '              'ORDER BY POS')        fk = ('SELECT COLUMNNAME, FKEYNAME, '              '  REFSCHEMANAME, REFTABLENAME, REFCOLUMNNAME, RULE, '              '  (CASE WHEN REFSCHEMANAME = CURRENT_SCHEMA '              '   THEN 1 ELSE 0 END) AS in_schema '              'FROM FOREIGNKEYCOLUMNS '              'WHERE TABLENAME=? AND SCHEMANAME=%s '              'ORDER BY FKEYNAME ')        params = [denormalize(table.name)]        if not table.schema:            st = st % 'CURRENT_SCHEMA'            fk = fk % 'CURRENT_SCHEMA'        else:            st = st % '?'            fk = fk % '?'            params.append(denormalize(table.schema))        rows = connection.execute(st, params).fetchall()        if not rows:            raise exceptions.NoSuchTableError(table.fullname)        include_columns = util.Set(include_columns or [])        for row in rows:            (name, mode, col_type, encoding, length, scale,             nullable, constant_def, func_def) = row            name = normalize(name)            if include_columns and name not in include_columns:                continue            type_args, type_kw = [], {}            if col_type == 'FIXED':                type_args = length, scale                # Convert FIXED(10) DEFAULT SERIAL to our Integer                if (scale == 0 and                    func_def is not None and func_def.startswith('SERIAL')):                    col_type = 'INTEGER'                    type_args = length,            elif col_type in 'FLOAT':                type_args = length,            elif col_type in ('CHAR', 'VARCHAR'):                type_args = length,                type_kw['encoding'] = encoding            elif col_type == 'LONG':                type_kw['encoding'] = encoding            try:                type_cls = ischema_names[col_type.lower()]                type_instance = type_cls(*type_args, **type_kw)            except KeyError:                util.warn("Did not recognize type '%s' of column '%s'" %                          (col_type, name))                type_instance = sqltypes.NullType            col_kw = {'autoincrement': False}            col_kw['nullable'] = (nullable == 'YES')            col_kw['primary_key'] = (mode == 'KEY')            if func_def is not None:                if func_def.startswith('SERIAL'):                    if col_kw['primary_key']:                        # No special default- let the standard autoincrement                        # support handle SERIAL pk columns.                        col_kw['autoincrement'] = True                    else:                        # strip current numbering                        col_kw['default'] = schema.PassiveDefault(                            sql.text('SERIAL'))                        col_kw['autoincrement'] = True                else:                    col_kw['default'] = schema.PassiveDefault(                        sql.text(func_def))            elif constant_def is not None:                col_kw['default'] = schema.PassiveDefault(sql.text(                    "'%s'" % constant_def.replace("'", "''")))            table.append_column(schema.Column(name, type_instance, **col_kw))        fk_sets = itertools.groupby(connection.execute(fk, params),                                    lambda row: row.FKEYNAME)        for fkeyname, fkey in fk_sets:            fkey = list(fkey)            if include_columns:                key_cols = util.Set([r.COLUMNNAME for r in fkey])                if key_cols != include_columns:                    continue            columns, referants = [], []            quote = self.identifier_preparer._maybe_quote_identifier            for row in fkey:                columns.append(normalize(row.COLUMNNAME))                if table.schema or not row.in_schema:                    referants.append('.'.join(                        [quote(normalize(row[c]))                         for c in ('REFSCHEMANAME', 'REFTABLENAME',                                   'REFCOLUMNNAME')]))                else:                    referants.append('.'.join(                        [quote(normalize(row[c]))                         for c in ('REFTABLENAME', 'REFCOLUMNNAME')]))            constraint_kw = {'name': fkeyname.lower()}            if fkey[0].RULE is not None:                rule = fkey[0].RULE                if rule.startswith('DELETE '):                    rule = rule[7:]                constraint_kw['ondelete'] = rule            table_kw = {}            if table.schema or not row.in_schema:                table_kw['schema'] = normalize(fkey[0].REFSCHEMANAME)            ref_key = schema._get_table_key(normalize(fkey[0].REFTABLENAME),                                            table_kw.get('schema'))            if ref_key not in table.metadata.tables:                schema.Table(normalize(fkey[0].REFTABLENAME),                             table.metadata,                             autoload=True, autoload_with=connection,                             **table_kw)            constraint = schema.ForeignKeyConstraint(columns, referants,                                                     **constraint_kw)            table.append_constraint(constraint)    def has_sequence(self, connection, name):        # [ticket:726] makes this schema-aware.        denormalize = self.identifier_preparer._denormalize_name        sql = ("SELECT sequence_name FROM SEQUENCES "               "WHERE SEQUENCE_NAME=? ")        rp = connection.execute(sql, denormalize(name))        found = bool(rp.fetchone())        rp.close()        return foundclass MaxDBCompiler(compiler.DefaultCompiler):    operators = compiler.DefaultCompiler.operators.copy()    operators[sql_operators.mod] = lambda x, y: 'mod(%s, %s)' % (x, y)    function_conversion = {        'CURRENT_DATE': 'DATE',        'CURRENT_TIME': 'TIME',        'CURRENT_TIMESTAMP': 'TIMESTAMP',        }    # These functions must be written without parens when called with no    # parameters.  e.g. 'SELECT DATE FROM DUAL' not 'SELECT DATE() FROM DUAL'    bare_functions = util.Set([        'CURRENT_SCHEMA', 'DATE', 'FALSE', 'SYSDBA', 'TIME', 'TIMESTAMP',        'TIMEZONE', 'TRANSACTION', 'TRUE', 'USER', 'UID', 'USERGROUP',        'UTCDATE', 'UTCDIFF'])    def default_from(self):        return ' FROM DUAL'

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美片在线播放| 中文字幕日本乱码精品影院| 久久精品一级爱片| 亚洲老妇xxxxxx| 久久99日本精品| 欧美日韩一区三区| 亚洲国产成人午夜在线一区| 三级不卡在线观看| 日本久久一区二区| 中文天堂在线一区| 蜜臀av一区二区在线免费观看| 91视视频在线直接观看在线看网页在线看| 欧美婷婷六月丁香综合色| 久久久久久久网| 亚洲成人av福利| 99久久伊人精品| 久久久不卡网国产精品二区 | 亚洲美女屁股眼交| 狠狠色综合播放一区二区| 欧美视频日韩视频在线观看| 久久午夜色播影院免费高清| 日韩av一区二区在线影视| 在线观看一区日韩| 亚洲色图丝袜美腿| 国产成人精品免费| 久久久久久免费网| 久久不见久久见免费视频1| 欧美精品丝袜久久久中文字幕| 亚洲麻豆国产自偷在线| 成人污视频在线观看| 久久这里只有精品视频网| 奇米一区二区三区| 制服丝袜成人动漫| 日韩电影一区二区三区| 51精品久久久久久久蜜臀| 亚洲高清免费视频| 在线看日本不卡| 亚洲精品亚洲人成人网| 在线一区二区三区四区五区| 亚洲品质自拍视频| 91在线观看地址| 亚洲精品欧美专区| 欧洲一区二区av| 亚洲一区二区三区四区五区黄| 在线视频欧美精品| 亚洲一区二区三区不卡国产欧美| 91美女片黄在线| 亚洲综合激情小说| 欧美日韩成人高清| 精品一区二区久久久| 久久久蜜桃精品| 懂色av中文一区二区三区 | 欧美精品久久久久久久多人混战| 亚洲乱码日产精品bd| 在线视频一区二区三区| 日韩**一区毛片| 精品久久人人做人人爱| 国产高清不卡二三区| 亚洲欧美日韩在线| 欧美午夜电影在线播放| 蜜桃视频第一区免费观看| 久久久不卡网国产精品一区| 色综合天天综合色综合av| 亚洲6080在线| 国产亚洲制服色| 91国产免费看| 久久国产视频网| 中文字幕乱码久久午夜不卡| 91官网在线免费观看| 丝袜诱惑亚洲看片| 日本一区二区免费在线观看视频| 色综合视频一区二区三区高清| 亚洲影院在线观看| 久久综合资源网| 91福利在线免费观看| 麻豆免费精品视频| 曰韩精品一区二区| 久久久精品天堂| 欧美日韩aaaaa| 成人免费黄色大片| 免费在线观看视频一区| 亚洲品质自拍视频| 久久久精品黄色| 91 com成人网| 一本色道久久加勒比精品| 国产在线视频不卡二| 一区二区三区四区视频精品免费 | 成人激情开心网| 亚洲成人福利片| 久久久久久久久伊人| 欧美男同性恋视频网站| 99热精品国产| 韩国理伦片一区二区三区在线播放| 亚洲一区二区三区爽爽爽爽爽 | 欧美日韩成人一区二区| voyeur盗摄精品| 国产尤物一区二区在线| 奇米精品一区二区三区在线观看一| 国产欧美日韩在线| 精品久久人人做人人爰| 欧美日韩国产首页| 91成人在线免费观看| 风间由美一区二区av101| 免费在线观看不卡| 亚洲国产美女搞黄色| 中文字幕亚洲在| 国产亚洲婷婷免费| 精品少妇一区二区三区视频免付费| 日本二三区不卡| 99国产精品国产精品久久| 成人免费精品视频| 成人一区二区视频| 成人黄色在线看| 国产麻豆视频一区二区| 开心九九激情九九欧美日韩精美视频电影| 亚洲精选视频在线| 夜夜夜精品看看| 一区二区三区在线免费视频| 国产精品久久久久影院老司| 欧美极品少妇xxxxⅹ高跟鞋 | 国产精品理论片在线观看| 精品入口麻豆88视频| 日韩免费高清av| 精品国产乱码久久久久久1区2区 | 91网站黄www| 91色在线porny| 91麻豆免费视频| 欧美性色欧美a在线播放| 色综合激情五月| 欧美在线你懂得| 欧美久久久久久久久| 欧美一区二区人人喊爽| 在线播放91灌醉迷j高跟美女| 欧美日韩国产另类一区| 欧美一区二区视频在线观看2020 | 色综合久久中文综合久久97| 91香蕉国产在线观看软件| 在线观看区一区二| 91精品国产aⅴ一区二区| 91精品国产欧美一区二区成人| 欧美一区二区在线免费播放| 欧美成人高清电影在线| 久久精品一区二区三区四区| 自拍偷拍亚洲欧美日韩| 亚洲狠狠丁香婷婷综合久久久| 日韩国产欧美三级| 国产成人精品亚洲日本在线桃色| av福利精品导航| 欧美日韩久久一区| 久久精品视频网| 亚洲精品国久久99热| 美女任你摸久久| 99re成人精品视频| 91精品国产综合久久久久 | 91精品国产91综合久久蜜臀| 久久久久久久久99精品| 一区二区三区高清| 久久国产尿小便嘘嘘尿| thepron国产精品| 日韩欧美一级在线播放| 国产精品国产精品国产专区不蜜 | 国产剧情一区在线| 91黄色激情网站| 国产亚洲一二三区| 日韩精品一区第一页| 成人一区二区视频| 欧美电视剧在线观看完整版| 亚洲乱码国产乱码精品精98午夜| 捆绑调教美女网站视频一区| 色悠久久久久综合欧美99| 亚洲高清免费视频| 国产超碰在线一区| 欧美电影免费提供在线观看| 亚洲免费观看高清完整版在线观看熊 | 亚洲一区二区三区四区的| 国产成人精品影院| 日韩欧美一区中文| 一个色在线综合| 99在线精品一区二区三区| 精品免费视频一区二区| 亚洲成人av资源| 色激情天天射综合网| 国产女人aaa级久久久级| 久久精品噜噜噜成人av农村| 欧美色图在线观看| 成人免费在线视频| 国产成人综合视频| 精品国产一区二区三区忘忧草| 性感美女极品91精品| 日本高清免费不卡视频| 国产精品传媒在线| 粉嫩一区二区三区性色av| 26uuu国产电影一区二区| 美日韩一区二区三区| 欧美喷潮久久久xxxxx| 午夜精品成人在线视频| 欧洲一区二区av| 午夜久久福利影院| 欧美日韩一区中文字幕| 亚洲国产另类av|