亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? mssql.py

?? SQLAlchemy. 經典的Python ORM框架。學習必看。
?? PY
?? 第 1 頁 / 共 3 頁
字號:
class MSSQLExecutionContext_pyodbc (MSSQLExecutionContext):    def pre_exec(self):        """where appropriate, issue "select scope_identity()" in the same statement"""        super(MSSQLExecutionContext_pyodbc, self).pre_exec()        if self.compiled.isinsert and self.HASIDENT and (not self.IINSERT) \                and len(self.parameters) == 1 and self.dialect.use_scope_identity:            self.statement += "; select scope_identity()"    def post_exec(self):        if self.compiled.isinsert and self.HASIDENT and (not self.IINSERT) and self.dialect.use_scope_identity:            # do nothing - id was fetched in dialect.do_execute()            pass        else:            super(MSSQLExecutionContext_pyodbc, self).post_exec()class MSSQLDialect(default.DefaultDialect):    colspecs = {        sqltypes.Unicode : MSNVarchar,        sqltypes.Integer : MSInteger,        sqltypes.Smallinteger: MSSmallInteger,        sqltypes.Numeric : MSNumeric,        sqltypes.Float : MSFloat,        sqltypes.DateTime : MSDateTime,        sqltypes.Date : MSDate,        sqltypes.Time : MSTime,        sqltypes.String : MSString,        sqltypes.Binary : MSBinary,        sqltypes.Boolean : MSBoolean,        sqltypes.Text : MSText,        sqltypes.CHAR: MSChar,        sqltypes.NCHAR: MSNChar,        sqltypes.TIMESTAMP: MSTimeStamp,    }    ischema_names = {        'int' : MSInteger,        'bigint': MSBigInteger,        'smallint' : MSSmallInteger,        'tinyint' : MSTinyInteger,        'varchar' : MSString,        'nvarchar' : MSNVarchar,        'char' : MSChar,        'nchar' : MSNChar,        'text' : MSText,        'ntext' : MSText,        'decimal' : MSNumeric,        'numeric' : MSNumeric,        'float' : MSFloat,        'datetime' : MSDateTime,        'smalldatetime' : MSDate,        'binary' : MSBinary,        'varbinary' : MSBinary,        'bit': MSBoolean,        'real' : MSFloat,        'image' : MSBinary,        'timestamp': MSTimeStamp,        'money': MSMoney,        'smallmoney': MSSmallMoney,        'uniqueidentifier': MSUniqueIdentifier,        'sql_variant': MSVariant,    }    def __new__(cls, dbapi=None, *args, **kwargs):        if cls != MSSQLDialect:            return super(MSSQLDialect, cls).__new__(cls, *args, **kwargs)        if dbapi:            dialect = dialect_mapping.get(dbapi.__name__)            return dialect(*args, **kwargs)        else:            return object.__new__(cls, *args, **kwargs)    def __init__(self, auto_identity_insert=True, **params):        super(MSSQLDialect, self).__init__(**params)        self.auto_identity_insert = auto_identity_insert        self.text_as_varchar = False        self.use_scope_identity = False        self.has_window_funcs = False        self.set_default_schema_name("dbo")    def dbapi(cls, module_name=None):        if module_name:            try:                dialect_cls = dialect_mapping[module_name]                return dialect_cls.import_dbapi()            except KeyError:                raise exceptions.InvalidRequestError("Unsupported MSSQL module '%s' requested (must be adodbpi, pymssql or pyodbc)" % module_name)        else:            for dialect_cls in [MSSQLDialect_pyodbc, MSSQLDialect_pymssql, MSSQLDialect_adodbapi]:                try:                    return dialect_cls.import_dbapi()                except ImportError, e:                    pass            else:                raise ImportError('No DBAPI module detected for MSSQL - please install pyodbc, pymssql, or adodbapi')    dbapi = classmethod(dbapi)    def create_connect_args(self, url):        opts = url.translate_connect_args(username='user')        opts.update(url.query)        if 'auto_identity_insert' in opts:            self.auto_identity_insert = bool(int(opts.pop('auto_identity_insert')))        if 'query_timeout' in opts:            self.query_timeout = int(opts.pop('query_timeout'))        if 'text_as_varchar' in opts:            self.text_as_varchar = bool(int(opts.pop('text_as_varchar')))        if 'use_scope_identity' in opts:            self.use_scope_identity = bool(int(opts.pop('use_scope_identity')))        if 'has_window_funcs' in opts:            self.has_window_funcs =  bool(int(opts.pop('has_window_funcs')))        return self.make_connect_string(opts)    def create_execution_context(self, *args, **kwargs):        return MSSQLExecutionContext(self, *args, **kwargs)    def type_descriptor(self, typeobj):        newobj = sqltypes.adapt_type(typeobj, self.colspecs)        # Some types need to know about the dialect        if isinstance(newobj, (MSText, MSNVarchar)):            newobj.dialect = self        return newobj    def last_inserted_ids(self):        return self.context.last_inserted_ids    def get_default_schema_name(self, connection):        return self.schema_name    def set_default_schema_name(self, schema_name):        self.schema_name = schema_name    def last_inserted_ids(self):        return self.context.last_inserted_ids    def do_execute(self, cursor, statement, params, context=None, **kwargs):        if params == {}:            params = ()        try:            super(MSSQLDialect, self).do_execute(cursor, statement, params, context=context, **kwargs)        finally:            if context.IINSERT:                cursor.execute("SET IDENTITY_INSERT %s OFF" % self.identifier_preparer.format_table(context.compiled.statement.table))    def do_executemany(self, cursor, statement, params, context=None, **kwargs):        try:            super(MSSQLDialect, self).do_executemany(cursor, statement, params, context=context, **kwargs)        finally:            if context.IINSERT:                cursor.execute("SET IDENTITY_INSERT %s OFF" % self.identifier_preparer.format_table(context.compiled.statement.table))    def _execute(self, c, statement, parameters):        try:            if parameters == {}:                parameters = ()            c.execute(statement, parameters)            self.context.rowcount = c.rowcount            c.DBPROP_COMMITPRESERVE = "Y"        except Exception, e:            raise exceptions.DBAPIError.instance(statement, parameters, e)    def table_names(self, connection, schema):        from sqlalchemy.databases import information_schema as ischema        return ischema.table_names(connection, schema)    def raw_connection(self, connection):        """Pull the raw pymmsql connection out--sensative to "pool.ConnectionFairy" and pymssql.pymssqlCnx Classes"""        try:            # TODO: probably want to move this to individual dialect subclasses to            # save on the exception throw + simplify            return connection.connection.__dict__['_pymssqlCnx__cnx']        except:            return connection.connection.adoConn    def uppercase_table(self, t):        # convert all names to uppercase -- fixes refs to INFORMATION_SCHEMA for case-senstive DBs, and won't matter for case-insensitive        t.name = t.name.upper()        if t.schema:            t.schema = t.schema.upper()        for c in t.columns:            c.name = c.name.upper()        return t    def has_table(self, connection, tablename, schema=None):        import sqlalchemy.databases.information_schema as ischema        current_schema = schema or self.get_default_schema_name(connection)        columns = self.uppercase_table(ischema.columns)        s = sql.select([columns],                   current_schema                       and sql.and_(columns.c.table_name==tablename, columns.c.table_schema==current_schema)                       or columns.c.table_name==tablename,                   )        c = connection.execute(s)        row  = c.fetchone()        return row is not None    def reflecttable(self, connection, table, include_columns):        import sqlalchemy.databases.information_schema as ischema        # Get base columns        if table.schema is not None:            current_schema = table.schema        else:            current_schema = self.get_default_schema_name(connection)        columns = self.uppercase_table(ischema.columns)        s = sql.select([columns],                   current_schema                       and sql.and_(columns.c.table_name==table.name, columns.c.table_schema==current_schema)                       or columns.c.table_name==table.name,                   order_by=[columns.c.ordinal_position])        c = connection.execute(s)        found_table = False        while True:            row = c.fetchone()            if row is None:                break            found_table = True            (name, type, nullable, charlen, numericprec, numericscale, default) = (                row[columns.c.column_name],                row[columns.c.data_type],                row[columns.c.is_nullable] == 'YES',                row[columns.c.character_maximum_length],                row[columns.c.numeric_precision],                row[columns.c.numeric_scale],                row[columns.c.column_default]            )            if include_columns and name not in include_columns:                continue            args = []            for a in (charlen, numericprec, numericscale):                if a is not None:                    args.append(a)            coltype = self.ischema_names.get(type, None)            if coltype == MSString and charlen == -1:                coltype = MSText()            else:                if coltype is None:                    util.warn("Did not recognize type '%s' of column '%s'" %                              (type, name))                    coltype = sqltypes.NULLTYPE                elif coltype in (MSNVarchar, AdoMSNVarchar) and charlen == -1:                    args[0] = None                coltype = coltype(*args)            colargs= []            if default is not None:                colargs.append(schema.PassiveDefault(sql.text(default)))            table.append_column(schema.Column(name, coltype, nullable=nullable, autoincrement=False, *colargs))        if not found_table:            raise exceptions.NoSuchTableError(table.name)        # We also run an sp_columns to check for identity columns:        cursor = connection.execute("sp_columns @table_name = '%s', @table_owner = '%s'" % (table.name, current_schema))        ic = None        while True:            row = cursor.fetchone()            if row is None:                break            col_name, type_name = row[3], row[5]            if type_name.endswith("identity"):                ic = table.c[col_name]                ic.autoincrement = True                # setup a psuedo-sequence to represent the identity attribute - we interpret this at table.create() time as the identity attribute                ic.sequence = schema.Sequence(ic.name + '_identity')                # MSSQL: only one identity per table allowed                cursor.close()                break        if not ic is None:            try:                cursor = connection.execute("select ident_seed(?), ident_incr(?)", table.fullname, table.fullname)                row = cursor.fetchone()                cursor.close()                if not row is None:                    ic.sequence.start=int(row[0])                    ic.sequence.increment=int(row[1])            except:                # ignoring it, works just like before                pass        # Add constraints        RR = self.uppercase_table(ischema.ref_constraints)    #information_schema.referential_constraints        TC = self.uppercase_table(ischema.constraints)        #information_schema.table_constraints        C  = self.uppercase_table(ischema.pg_key_constraints).alias('C') #information_schema.constraint_column_usage: the constrained column        R  = self.uppercase_table(ischema.pg_key_constraints).alias('R') #information_schema.constraint_column_usage: the referenced column        # Primary key constraints        s = sql.select([C.c.column_name, TC.c.constraint_type], sql.and_(TC.c.constraint_name == C.c.constraint_name,                                                                         C.c.table_name == table.name))        c = connection.execute(s)        for row in c:            if 'PRIMARY' in row[TC.c.constraint_type.name]:                table.primary_key.add(table.c[row[0]])        # Foreign key constraints        s = sql.select([C.c.column_name,                        R.c.table_schema, R.c.table_name, R.c.column_name,                        RR.c.constraint_name, RR.c.match_option, RR.c.update_rule, RR.c.delete_rule],                       sql.and_(C.c.table_name == table.name,                                C.c.table_schema == current_schema,                                C.c.constraint_name == RR.c.constraint_name,                                R.c.constraint_name == RR.c.unique_constraint_name,                                C.c.ordinal_position == R.c.ordinal_position                                ),                       order_by = [RR.c.constraint_name, R.c.ordinal_position])        rows = connection.execute(s).fetchall()        # group rows by constraint ID, to handle multi-column FKs        fknm, scols, rcols = (None, [], [])        for r in rows:            scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r            if rfknm != fknm:                if fknm:                    table.append_constraint(schema.ForeignKeyConstraint(scols, ['%s.%s' % (t,c) for (s,t,c) in rcols], fknm))                fknm, scols, rcols = (rfknm, [], [])            if (not scol in scols): scols.append(scol)            if (not (rschema, rtbl, rcol) in rcols): rcols.append((rschema, rtbl, rcol))        if fknm and scols:            table.append_constraint(schema.ForeignKeyConstraint(scols, ['%s.%s' % (t,c) for (s,t,c) in rcols], fknm))class MSSQLDialect_pymssql(MSSQLDialect):    supports_sane_rowcount = False    max_identifier_length = 30    def import_dbapi(cls):        import pymssql as module        # pymmsql doesn't have a Binary method.  we use string        # TODO: monkeypatching here is less than ideal        module.Binary = lambda st: str(st)        return module    import_dbapi = classmethod(import_dbapi)    colspecs = MSSQLDialect.colspecs.copy()    colspecs[sqltypes.Date] = MSDate_pymssql    ischema_names = MSSQLDialect.ischema_names.copy()    ischema_names['smalldatetime'] = MSDate_pymssql    def __init__(self, **params):        super(MSSQLDialect_pymssql, self).__init__(**params)        self.use_scope_identity = True        # pymssql understands only ascii        if self.convert_unicode:            self.encoding = params.get('encoding', 'ascii')

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲视频在线观看三级| 夫妻av一区二区| 成人理论电影网| 日韩亚洲电影在线| 亚洲一区二区在线播放相泽| 精品一区二区国语对白| 在线精品视频一区二区三四| 精品国产一区a| 亚洲高清免费一级二级三级| 成人一级片网址| 精品国产凹凸成av人网站| 亚洲国产精品综合小说图片区| 北条麻妃一区二区三区| 精品黑人一区二区三区久久| 亚洲第一av色| 欧洲在线/亚洲| 亚洲精品日韩一| 成人av网在线| 亚洲国产精品t66y| 国产传媒日韩欧美成人| 精品国产精品一区二区夜夜嗨| 午夜欧美2019年伦理| 在线观看日韩高清av| 亚洲视频1区2区| 91在线云播放| 亚洲人精品一区| 99久久婷婷国产综合精品电影| 国产欧美日韩视频一区二区| 国产精品资源网站| 久久先锋影音av鲁色资源网| 国产真实乱偷精品视频免| 精品久久一区二区| 久久机这里只有精品| 精品久久人人做人人爰| 国产精品一区二区不卡| 国产欧美一区二区三区鸳鸯浴| 国产91精品精华液一区二区三区 | 精品久久久久久无| 青青青爽久久午夜综合久久午夜| 欧美巨大另类极品videosbest | 久久蜜桃香蕉精品一区二区三区| 蜜臀av国产精品久久久久| 精品国产乱码久久| 成人久久久精品乱码一区二区三区| 欧美激情在线免费观看| 成人动漫视频在线| 亚洲自拍偷拍av| 日韩一区二区在线看| 国产91精品一区二区麻豆网站| 国产精品毛片高清在线完整版 | av亚洲产国偷v产偷v自拍| 国产精品家庭影院| 色8久久精品久久久久久蜜| 亚洲影视资源网| 日韩精品一区二区在线| 国产福利91精品一区二区三区| 日本一区二区高清| 日本乱人伦aⅴ精品| 日韩中文字幕区一区有砖一区| 日韩欧美视频一区| 成人午夜短视频| 亚洲国产另类av| 国产色婷婷亚洲99精品小说| 色系网站成人免费| 麻豆免费看一区二区三区| 国产日韩av一区二区| 欧美综合久久久| 精品一区二区三区香蕉蜜桃| 亚洲三级在线观看| 欧美一级艳片视频免费观看| 成人av在线观| 日韩精品久久理论片| 国产精品成人免费| 欧美一级日韩不卡播放免费| 成人黄色小视频| 日韩av中文字幕一区二区| 中文字幕成人av| 日韩一级片在线观看| 色综合久久中文字幕| 久久99久久精品| 亚洲国产精品综合小说图片区| 国产清纯美女被跳蛋高潮一区二区久久w | 日本不卡1234视频| 久久人人97超碰com| 91亚洲午夜精品久久久久久| 日本成人中文字幕在线视频| 亚洲激情第一区| 久久精品人人做人人爽人人| 欧美理论在线播放| 成人免费高清视频在线观看| 老汉av免费一区二区三区| 亚洲一区二区三区四区不卡| 国产精品久久免费看| 国产亚洲精品资源在线26u| 欧美日韩高清影院| 91麻豆免费在线观看| 国产精品影视网| 国产一区二区三区高清播放| 日本网站在线观看一区二区三区| 1区2区3区国产精品| 欧美极品美女视频| 欧美精品一区二区精品网| 欧美一区二区三区视频在线观看| 在线观看免费成人| 99国产精品99久久久久久| 成人免费视频免费观看| 国产成人一级电影| 国产露脸91国语对白| 麻豆国产91在线播放| 日韩不卡免费视频| 秋霞av亚洲一区二区三| 青青国产91久久久久久| 青椒成人免费视频| 六月婷婷色综合| 麻豆国产欧美一区二区三区| 久久国产婷婷国产香蕉| 狠狠色丁香久久婷婷综| 国产精品一区二区你懂的| 国产一区二区三区精品视频| 国产成人在线视频播放| 国产成人精品aa毛片| 成人三级在线视频| 91麻豆高清视频| 欧美日韩小视频| 91精品蜜臀在线一区尤物| 日韩视频123| 久久夜色精品国产欧美乱极品| 久久久九九九九| 亚洲图片欧美激情| 午夜精品福利一区二区蜜股av| 日韩精彩视频在线观看| 国产在线精品免费| gogo大胆日本视频一区| 精品视频123区在线观看| 宅男在线国产精品| 久久久久青草大香线综合精品| 久久中文娱乐网| 亚洲欧美色图小说| 日韩福利视频导航| 国产高清久久久久| 一本色道a无线码一区v| 日韩欧美久久久| ...av二区三区久久精品| 亚洲成人av一区二区三区| 久久成人羞羞网站| av在线不卡免费看| 7777精品伊人久久久大香线蕉完整版 | 国内精品国产成人国产三级粉色| 国产不卡一区视频| 91福利国产成人精品照片| 日韩一区二区三区精品视频 | 日韩欧美综合在线| 亚洲国产精品精华液ab| 丝袜亚洲另类欧美| 成人亚洲一区二区一| 4438x亚洲最大成人网| 国产精品久久久久桃色tv| 日韩精品一区第一页| 岛国av在线一区| 欧美一二区视频| 亚洲专区一二三| 成人福利在线看| 日韩欧美的一区二区| 亚洲精品第1页| 国产一区二三区好的| 欧美日韩国产色站一区二区三区| 国产欧美日韩一区二区三区在线观看| 午夜不卡在线视频| 91丨porny丨国产入口| 久久久美女艺术照精彩视频福利播放| 亚洲一区二区三区在线播放| 成人亚洲一区二区一| 欧美成人a视频| 三级欧美韩日大片在线看| 99精品一区二区| 亚洲国产成人在线| 国产精选一区二区三区| 91麻豆精品国产91久久久久久| 亚洲欧美在线另类| 国产福利91精品| 欧美不卡视频一区| 老司机午夜精品| 欧美一区二区美女| 日韩中文字幕一区二区三区| 在线观看av一区| 亚洲精品久久久蜜桃| proumb性欧美在线观看| 国产精品网友自拍| 国产精品一区在线| 久久综合成人精品亚洲另类欧美| 另类专区欧美蜜桃臀第一页| 欧美一级欧美三级| 欧美a级理论片| 7777精品伊人久久久大香线蕉最新版| 亚洲一区免费视频| 在线亚洲欧美专区二区| 亚洲精品欧美激情| 欧美色精品天天在线观看视频| 一区二区三区色| 欧美三级欧美一级|