亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? firebird.py

?? SQLAlchemy. 經(jīng)典的Python ORM框架。學(xué)習(xí)必看。
?? PY
?? 第 1 頁 / 共 2 頁
字號:
        return [self._normalize_name(row[0]) for row in connection.execute(s)]    def has_table(self, connection, table_name, schema=None):        """Return ``True`` if the given table exists, ignoring the `schema`."""        tblqry = """        SELECT 1 FROM rdb$database        WHERE EXISTS (SELECT rdb$relation_name                      FROM rdb$relations                      WHERE rdb$relation_name=?)        """        c = connection.execute(tblqry, [self._denormalize_name(table_name)])        row = c.fetchone()        if row is not None:            return True        else:            return False    def has_sequence(self, connection, sequence_name):        """Return ``True`` if the given sequence (generator) exists."""        genqry = """        SELECT 1 FROM rdb$database        WHERE EXISTS (SELECT rdb$generator_name                      FROM rdb$generators                      WHERE rdb$generator_name=?)        """        c = connection.execute(genqry, [self._denormalize_name(sequence_name)])        row = c.fetchone()        if row is not None:            return True        else:            return False    def is_disconnect(self, e):        if isinstance(e, self.dbapi.OperationalError):            return 'Unable to complete network request to host' in str(e)        elif isinstance(e, self.dbapi.ProgrammingError):            return 'Invalid connection state' in str(e)        else:            return False    def reflecttable(self, connection, table, include_columns):        # Query to extract the details of all the fields of the given table        tblqry = """        SELECT DISTINCT r.rdb$field_name AS fname,                        r.rdb$null_flag AS null_flag,                        t.rdb$type_name AS ftype,                        f.rdb$field_sub_type AS stype,                        f.rdb$field_length AS flen,                        f.rdb$field_precision AS fprec,                        f.rdb$field_scale AS fscale,                        COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault        FROM rdb$relation_fields r             JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name             JOIN rdb$types t ON t.rdb$type=f.rdb$field_type AND t.rdb$field_name='RDB$FIELD_TYPE'        WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?        ORDER BY r.rdb$field_position        """        # Query to extract the PK/FK constrained fields of the given table        keyqry = """        SELECT se.rdb$field_name AS fname        FROM rdb$relation_constraints rc             JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name        WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?        """        # Query to extract the details of each UK/FK of the given table        fkqry = """        SELECT rc.rdb$constraint_name AS cname,               cse.rdb$field_name AS fname,               ix2.rdb$relation_name AS targetrname,               se.rdb$field_name AS targetfname        FROM rdb$relation_constraints rc             JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name             JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key             JOIN rdb$index_segments cse ON cse.rdb$index_name=ix1.rdb$index_name             JOIN rdb$index_segments se ON se.rdb$index_name=ix2.rdb$index_name AND se.rdb$field_position=cse.rdb$field_position        WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?        ORDER BY se.rdb$index_name, se.rdb$field_position        """        # Heuristic-query to determine the generator associated to a PK field        genqry = """        SELECT trigdep.rdb$depended_on_name AS fgenerator        FROM rdb$dependencies tabdep             JOIN rdb$dependencies trigdep ON (tabdep.rdb$dependent_name=trigdep.rdb$dependent_name                                               AND trigdep.rdb$depended_on_type=14                                               AND trigdep.rdb$dependent_type=2)             JOIN rdb$triggers trig ON (trig.rdb$trigger_name=tabdep.rdb$dependent_name)        WHERE tabdep.rdb$depended_on_name=?          AND tabdep.rdb$depended_on_type=0          AND trig.rdb$trigger_type=1          AND tabdep.rdb$field_name=?          AND (SELECT count(*)               FROM rdb$dependencies trigdep2               WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2        """        tablename = self._denormalize_name(table.name)        # get primary key fields        c = connection.execute(keyqry, ["PRIMARY KEY", tablename])        pkfields =[self._normalize_name(r['fname']) for r in c.fetchall()]        # get all of the fields for this table        c = connection.execute(tblqry, [tablename])        found_table = False        while True:            row = c.fetchone()            if row is None:                break            found_table = True            name = self._normalize_name(row['fname'])            if include_columns and name not in include_columns:                continue            args = [name]            kw = {}            # get the data type            coltype = ischema_names.get(row['ftype'].rstrip())            if coltype is None:                util.warn("Did not recognize type '%s' of column '%s'" %                          (str(row['ftype']), name))                coltype = sqltypes.NULLTYPE            else:                coltype = coltype(row)            args.append(coltype)            # is it a primary key?            kw['primary_key'] = name in pkfields            # is it nullable?            kw['nullable'] = not bool(row['null_flag'])            # does it have a default value?            if row['fdefault'] is not None:                # the value comes down as "DEFAULT 'value'"                assert row['fdefault'].startswith('DEFAULT ')                defvalue = row['fdefault'][8:]                args.append(schema.PassiveDefault(sql.text(defvalue)))            col = schema.Column(*args, **kw)            if kw['primary_key']:                # if the PK is a single field, try to see if its linked to                # a sequence thru a trigger                if len(pkfields)==1:                    genc = connection.execute(genqry, [tablename, row['fname']])                    genr = genc.fetchone()                    if genr is not None:                        col.sequence = schema.Sequence(self._normalize_name(genr['fgenerator']))            table.append_column(col)        if not found_table:            raise exceptions.NoSuchTableError(table.name)        # get the foreign keys        c = connection.execute(fkqry, ["FOREIGN KEY", tablename])        fks = {}        while True:            row = c.fetchone()            if not row: break            cname = self._normalize_name(row['cname'])            try:                fk = fks[cname]            except KeyError:                fks[cname] = fk = ([], [])            rname = self._normalize_name(row['targetrname'])            schema.Table(rname, table.metadata, autoload=True, autoload_with=connection)            fname = self._normalize_name(row['fname'])            refspec = rname + '.' + self._normalize_name(row['targetfname'])            fk[0].append(fname)            fk[1].append(refspec)        for name,value in fks.iteritems():            table.append_constraint(schema.ForeignKeyConstraint(value[0], value[1], name=name))    def do_execute(self, cursor, statement, parameters, **kwargs):        # kinterbase does not accept a None, but wants an empty list        # when there are no arguments.        cursor.execute(statement, parameters or [])    def do_rollback(self, connection):        # Use the retaining feature, that keeps the transaction going        connection.rollback(True)    def do_commit(self, connection):        # Use the retaining feature, that keeps the transaction going        connection.commit(True)class FBCompiler(sql.compiler.DefaultCompiler):    """Firebird specific idiosincrasies"""    # Firebird lacks a builtin modulo operator, but there is    # an equivalent function in the ib_udf library.    operators = sql.compiler.DefaultCompiler.operators.copy()    operators.update({        sql.operators.mod : lambda x, y:"mod(%s, %s)" % (x, y)        })    def visit_alias(self, alias, asfrom=False, **kwargs):        # Override to not use the AS keyword which FB 1.5 does not like        if asfrom:            return self.process(alias.original, asfrom=True, **kwargs) + " " + self.preparer.format_alias(alias, self._anonymize(alias.name))        else:            return self.process(alias.original, **kwargs)    def function_argspec(self, func):        if func.clauses:            return self.process(func.clause_expr)        else:            return ""    def default_from(self):        return " FROM rdb$database"    def visit_sequence(self, seq):        return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)    def get_select_precolumns(self, select):        """Called when building a ``SELECT`` statement, position is just        before column list Firebird puts the limit and offset right        after the ``SELECT``...        """        result = ""        if select._limit:            result += "FIRST %d "  % select._limit        if select._offset:            result +="SKIP %d "  %  select._offset        if select._distinct:            result += "DISTINCT "        return result    def limit_clause(self, select):        """Already taken care of in the `get_select_precolumns` method."""        return ""    LENGTH_FUNCTION_NAME = 'char_length'    def function_string(self, func):        """Substitute the ``length`` function.        On newer FB there is a ``char_length`` function, while older        ones need the ``strlen`` UDF.        """        if func.name == 'length':            return self.LENGTH_FUNCTION_NAME + '%(expr)s'        return super(FBCompiler, self).function_string(func)class FBSchemaGenerator(sql.compiler.SchemaGenerator):    """Firebird syntactic idiosincrasies"""    def get_column_specification(self, column, **kwargs):        colspec = self.preparer.format_column(column)        colspec += " " + column.type.dialect_impl(self.dialect, _for_ddl=column).get_col_spec()        default = self.get_column_default_string(column)        if default is not None:            colspec += " DEFAULT " + default        if not column.nullable or column.primary_key:            colspec += " NOT NULL"        return colspec    def visit_sequence(self, sequence):        """Generate a ``CREATE GENERATOR`` statement for the sequence."""        self.append("CREATE GENERATOR %s" % self.preparer.format_sequence(sequence))        self.execute()class FBSchemaDropper(sql.compiler.SchemaDropper):    """Firebird syntactic idiosincrasies"""    def visit_sequence(self, sequence):        """Generate a ``DROP GENERATOR`` statement for the sequence."""        self.append("DROP GENERATOR %s" % self.preparer.format_sequence(sequence))        self.execute()class FBDefaultRunner(base.DefaultRunner):    """Firebird specific idiosincrasies"""    def visit_sequence(self, seq):        """Get the next value from the sequence using ``gen_id()``."""        return self.execute_string("SELECT gen_id(%s, 1) FROM rdb$database" % \            self.dialect.identifier_preparer.format_sequence(seq))RESERVED_WORDS = util.Set(    ["action", "active", "add", "admin", "after", "all", "alter", "and", "any",     "as", "asc", "ascending", "at", "auto", "autoddl", "avg", "based", "basename",     "base_name", "before", "begin", "between", "bigint", "blob", "blobedit", "buffer",     "by", "cache", "cascade", "case", "cast", "char", "character", "character_length",     "char_length", "check", "check_point_len", "check_point_length", "close", "collate",     "collation", "column", "commit", "committed", "compiletime", "computed", "conditional",     "connect", "constraint", "containing", "continue", "count", "create", "cstring",     "current", "current_connection", "current_date", "current_role", "current_time",     "current_timestamp", "current_transaction", "current_user", "cursor", "database",     "date", "day", "db_key", "debug", "dec", "decimal", "declare", "default", "delete",     "desc", "descending", "describe", "descriptor", "disconnect", "display", "distinct",     "do", "domain", "double", "drop", "echo", "edit", "else", "end", "entry_point",     "escape", "event", "exception", "execute", "exists", "exit", "extern", "external",     "extract", "fetch", "file", "filter", "float", "for", "foreign", "found", "free_it",     "from", "full", "function", "gdscode", "generator", "gen_id", "global", "goto",     "grant", "group", "group_commit_", "group_commit_wait", "having", "help", "hour",     "if", "immediate", "in", "inactive", "index", "indicator", "init", "inner", "input",     "input_type", "insert", "int", "integer", "into", "is", "isolation", "isql", "join",     "key", "lc_messages", "lc_type", "left", "length", "lev", "level", "like", "logfile",     "log_buffer_size", "log_buf_size", "long", "manual", "max", "maximum", "maximum_segment",     "max_segment", "merge", "message", "min", "minimum", "minute", "module_name", "month",     "names", "national", "natural", "nchar", "no", "noauto", "not", "null", "numeric",     "num_log_buffers", "num_log_bufs", "octet_length", "of", "on", "only", "open", "option",     "or", "order", "outer", "output", "output_type", "overflow", "page", "pagelength",     "pages", "page_size", "parameter", "password", "plan", "position", "post_event",     "precision", "prepare", "primary", "privileges", "procedure", "protected", "public",     "quit", "raw_partitions", "rdb$db_key", "read", "real", "record_version", "recreate",     "references", "release", "release", "reserv", "reserving", "restrict", "retain",     "return", "returning_values", "returns", "revoke", "right", "role", "rollback",     "row_count", "runtime", "savepoint", "schema", "second", "segment", "select",     "set", "shadow", "shared", "shell", "show", "singular", "size", "smallint",     "snapshot", "some", "sort", "sqlcode", "sqlerror", "sqlwarning", "stability",     "starting", "starts", "statement", "static", "statistics", "sub_type", "sum",     "suspend", "table", "terminator", "then", "time", "timestamp", "to", "transaction",     "translate", "translation", "trigger", "trim", "type", "uncommitted", "union",     "unique", "update", "upper", "user", "using", "value", "values", "varchar",     "variable", "varying", "version", "view", "wait", "wait_time", "weekday", "when",     "whenever", "where", "while", "with", "work", "write", "year", "yearday" ])class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):    """Install Firebird specific reserved words."""    reserved_words = RESERVED_WORDS    def __init__(self, dialect):        super(FBIdentifierPreparer,self).__init__(dialect, omit_schema=True)dialect = FBDialectdialect.statement_compiler = FBCompilerdialect.schemagenerator = FBSchemaGeneratordialect.schemadropper = FBSchemaDropperdialect.defaultrunner = FBDefaultRunnerdialect.preparer = FBIdentifierPreparer

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产区在线观看成人精品 | 国产一区二区在线观看免费| 精品国产一区二区三区忘忧草 | 一二三区精品视频| 久久亚洲二区三区| 欧美日韩精品系列| 国产精品一区二区视频| 一区二区三区产品免费精品久久75| 日韩视频免费观看高清完整版在线观看| 99久久综合国产精品| 激情综合亚洲精品| 亚洲高清视频的网址| 国产精品高清亚洲| 久久久久久毛片| 欧美一级艳片视频免费观看| 色婷婷综合激情| av在线不卡网| 国产成人免费视频网站| 久久国产精品99精品国产 | 国产成人综合在线| 免费在线观看不卡| 亚洲一区在线播放| 自拍偷自拍亚洲精品播放| 久久久久九九视频| 精品日韩欧美在线| 91精品在线麻豆| 欧美性猛片xxxx免费看久爱| www.爱久久.com| 国产91丝袜在线播放九色| 精品在线你懂的| 美女在线观看视频一区二区| 亚洲电影一区二区三区| 一区二区国产视频| 最新国产精品久久精品| 国产精品理论在线观看| 国产精品成人免费在线| 中国色在线观看另类| 久久久久国产精品厨房| 久久久久久亚洲综合影院红桃| 91精品麻豆日日躁夜夜躁| 欧美日本在线播放| 欧美日韩国产综合久久| 欧美日本在线一区| 日韩欧美亚洲国产精品字幕久久久| 日韩一区和二区| 欧美不卡一二三| 久久久精品影视| 欧美韩国日本综合| 国产精品国产三级国产普通话蜜臀 | 欧美精品黑人性xxxx| 欧美喷水一区二区| 欧美一区二区三区四区视频| 欧美一级艳片视频免费观看| 日韩免费看的电影| 久久一二三国产| 中文字幕国产一区二区| 国产精品热久久久久夜色精品三区 | 国产三区在线成人av| 国产精品无圣光一区二区| 国产精品国产精品国产专区不蜜| 成人欧美一区二区三区视频网页| 综合久久久久综合| 五月天国产精品| 久久国产婷婷国产香蕉| 国产精品69久久久久水密桃| 国产99一区视频免费| av电影一区二区| 欧美四级电影在线观看| 日韩视频免费观看高清在线视频| 久久九九99视频| 亚洲免费观看高清完整版在线 | 久久精品亚洲乱码伦伦中文| 国产精品不卡一区| 亚洲bt欧美bt精品777| 久久99精品久久久久| 波波电影院一区二区三区| 欧美日韩精品电影| 26uuu色噜噜精品一区二区| 日韩理论片一区二区| 日韩国产精品久久久| 成人国产精品免费观看动漫| 欧美日韩情趣电影| 久久精品一区蜜桃臀影院| 亚洲在线一区二区三区| 国产一区在线精品| 欧美日韩五月天| 国产亚洲成aⅴ人片在线观看| 国产精品久久久久影视| 免费人成精品欧美精品 | a美女胸又www黄视频久久| 欧美午夜不卡视频| 国产欧美日韩中文久久| 日韩成人dvd| 99久久精品免费| 精品伦理精品一区| 一区二区三区在线观看网站| 激情久久五月天| 欧美日韩精品是欧美日韩精品| 国产免费成人在线视频| 免费在线观看不卡| 欧美日韩免费观看一区二区三区 | 亚洲精品欧美综合四区| 国产在线国偷精品免费看| 欧美日韩电影一区| 亚洲视频在线一区| 国产精品 欧美精品| 欧美老年两性高潮| 一区二区在线观看视频在线观看| 国产一区二区三区香蕉 | 欧美mv和日韩mv国产网站| 亚洲乱码国产乱码精品精98午夜 | 欧美系列一区二区| 中文字幕国产一区| 国产精品99久久久久久久vr| 欧美高清www午色夜在线视频| 悠悠色在线精品| 一本大道久久a久久综合婷婷| 国产视频在线观看一区二区三区| 男女男精品视频| 在线播放一区二区三区| 亚洲国产视频一区二区| 欧美午夜精品理论片a级按摩| 中文字幕永久在线不卡| 风间由美性色一区二区三区| 久久久久久久久久久久久夜| 黄色资源网久久资源365| 日韩免费高清av| 激情五月播播久久久精品| 欧美成人bangbros| 久久精品99久久久| 欧美成人性福生活免费看| 久久精品国产久精国产爱| 日韩一级二级三级精品视频| 天天av天天翘天天综合网| 欧美日韩另类一区| 日精品一区二区| 日韩欧美的一区二区| 久久se这里有精品| 精品嫩草影院久久| 国产一区二区毛片| 中文字幕欧美激情一区| 成人美女在线视频| 中文字幕亚洲不卡| 91蜜桃网址入口| 一区二区三区国产精华| 欧美日本在线观看| 精品在线一区二区三区| 国产日韩视频一区二区三区| 成人国产电影网| 欧美一区二区三区啪啪| 日韩限制级电影在线观看| 精品福利二区三区| av电影在线观看不卡| 欧美这里有精品| 成人黄页毛片网站| 五月天中文字幕一区二区| 日韩手机在线导航| 国产乱人伦偷精品视频免下载| 日韩在线播放一区二区| 日韩伦理免费电影| 久久午夜电影网| 欧日韩精品视频| 欧美一卡2卡3卡4卡| 欧美日韩免费观看一区二区三区| 色哟哟日韩精品| 国产成人欧美日韩在线电影| 久久狠狠亚洲综合| 91亚洲资源网| 国产成人免费在线观看| 亚洲国产日韩a在线播放| 亚洲欧洲日产国码二区| 国产精品福利在线播放| 亚洲四区在线观看| 久久欧美中文字幕| 欧美三区在线视频| 精品久久久久久亚洲综合网| 亚洲精品一线二线三线无人区| 91小视频在线| 欧美亚洲国产一区二区三区 | 奇米一区二区三区| 美女视频黄a大片欧美| 成人精品国产免费网站| 欧美性感一区二区三区| 一区二区在线免费观看| 国产美女娇喘av呻吟久久| 欧美一区二区三区在| 亚洲自拍偷拍av| 不卡一区中文字幕| 欧美高清在线精品一区| 国产一区二区福利视频| 久久夜色精品国产欧美乱极品| 美日韩一区二区三区| 制服丝袜日韩国产| 亚洲午夜国产一区99re久久| av成人动漫在线观看| 国产精品久久久久影视| 福利电影一区二区三区| 精品视频一区二区不卡| 肉肉av福利一精品导航| 欧美高清你懂得|