亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? sad_sse2.asm

?? xvid MPEG-4 編解碼源代碼
?? ASM
字號:
;/*****************************************************************************; *; *  XVID MPEG-4 VIDEO CODEC; *  sse2 sum of absolute difference; *; *  Copyright(C) 2002 Dmitry Rozhdestvensky; *; *  This file is part of XviD, a free MPEG-4 video encoder/decoder; *; *  XviD is free software; you can redistribute it and/or modify it; *  under the terms of the GNU General Public License as published by; *  the Free Software Foundation; either version 2 of the License, or; *  (at your option) any later version.; *; *  This program is distributed in the hope that it will be useful,; *  but WITHOUT ANY WARRANTY; without even the implied warranty of; *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the; *  GNU General Public License for more details.; *; *  You should have received a copy of the GNU General Public License; *  along with this program; if not, write to the Free Software; *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA; *; *  Under section 8 of the GNU General Public License, the copyright; *  holders of XVID explicitly forbid distribution in the following; *  countries:; *; *    - Japan; *    - United States of America; *; *  Linking XviD statically or dynamically with other modules is making a; *  combined work based on XviD.  Thus, the terms and conditions of the; *  GNU General Public License cover the whole combination.; *; *  As a special exception, the copyright holders of XviD give you; *  permission to link XviD with independent modules that communicate with; *  XviD solely through the VFW1.1 and DShow interfaces, regardless of the; *  license terms of these independent modules, and to copy and distribute; *  the resulting combined work under terms of your choice, provided that; *  every copy of the combined work is accompanied by a complete copy of; *  the source code of XviD (the version of XviD used to produce the; *  combined work), being distributed under the terms of the GNU General; *  Public License plus this exception.  An independent module is a module; *  which is not derived from or based on XviD.; *; *  Note that people who make modified versions of XviD are not obligated; *  to grant this special exception for their modified versions; it is; *  their choice whether to do so.  The GNU General Public License gives; *  permission to release a modified version without this exception; this; *  exception also makes it possible to release a modified version which; *  carries forward this exception.; *; * $Id: sad_sse2.asm,v 1.7 2002/11/17 00:32:06 edgomez Exp $; *; ****************************************************************************/bits 32%macro cglobal 1 	%ifdef PREFIX		global _%1 		%define %1 _%1	%else		global %1	%endif%endmacro%define sad_debug 0 ;1=unaligned 2=ref unaligned 3=aligned 0=autodetect%define dev_debug 2 ;1=unaligned 2=aligned 0=autodetect%define test_stride_alignment 0 ;test stride for alignment while autodetect%define early_return 0 ;use early return in sadsection .dataalign 64buffer  times 4*8 dd 0   ;8 128-bit wordszero    times 4   dd 0section .textcglobal  sad16_sse2cglobal  dev16_sse2;===========================================================================;               General macros for SSE2 code;===========================================================================%macro load_stride 1                mov     ecx,%1                add     ecx,ecx                mov     edx,ecx                add     ecx,%1          ;stride*3                add     edx,edx         ;stride*4%endmacro%macro sad8lines 1                psadbw  xmm0,[%1]                psadbw  xmm1,[%1+ebx]                psadbw  xmm2,[%1+ebx*2]                psadbw  xmm3,[%1+ecx]                add     %1,edx                psadbw  xmm4,[%1]                psadbw  xmm5,[%1+ebx]                psadbw  xmm6,[%1+ebx*2]                psadbw  xmm7,[%1+ecx]                add     %1,edx%endmacro%macro after_sad 1 ; Summarizes 0th and 4th words of all xmm registers                paddusw xmm0,xmm1                paddusw xmm2,xmm3                paddusw xmm4,xmm5                paddusw xmm6,xmm7                paddusw xmm0,xmm2                paddusw xmm4,xmm6                paddusw xmm4,xmm0                pshufd  xmm5,xmm4,11111110b                paddusw xmm5,xmm4                pextrw  %1,xmm5,0       ;less latency then movd%endmacro%macro restore 1  ;restores used registers%if %1=1                pop ebp%endif                pop edi                pop esi                pop ebx%endmacro;===========================================================================;; uint32_t sad16_sse2 (const uint8_t * const cur,;					const uint8_t * const ref,;					const uint32_t stride,;					const uint32_t best_sad);;;;===========================================================================align 16sad16_sse2                push    ebx                push    esi                push    edi                mov     ebx,[esp + 3*4 + 12]    ;stride%if sad_debug<>0                mov     edi,[esp + 3*4 + 4]                mov     esi,[esp + 3*4 + 8]%endif%if sad_debug=1                jmp     sad16_sse2_ul%endif%if sad_debug=2                jmp     sad16_sse2_semial%endif        %if sad_debug=3                jmp     sad16_sse2_al%endif%if test_stride_alignment<>0                test    ebx,15                jnz     sad16_sse2_ul%endif                mov     edi,[esp + 3*4 + 4]     ;cur (most likely aligned)                test    edi,15                cmovz   esi,[esp + 3*4 + 8]     ;load esi if edi is aligned                cmovnz  esi,edi                 ;move to esi and load edi                cmovnz  edi,[esp + 3*4 + 8]     ;if not                jnz     esi_unaligned                test    esi,15                                     jnz     near sad16_sse2_semial                           jmp     sad16_sse2_alesi_unaligned:  test    edi,15                jnz     near sad16_sse2_ul                jmp     sad16_sse2_semial;===========================================================================;       Branch requires 16-byte alignment of esi and edi and stride;===========================================================================%macro sad16x8_al 1                movdqa  xmm0,[esi]                movdqa  xmm1,[esi+ebx]                movdqa  xmm2,[esi+ebx*2]                movdqa  xmm3,[esi+ecx]                add     esi,edx                movdqa  xmm4,[esi]                movdqa  xmm5,[esi+ebx]                movdqa  xmm6,[esi+ebx*2]                movdqa  xmm7,[esi+ecx]                add     esi,edx                sad8lines edi                after_sad %1%endmacroalign 16sad16_sse2_al                load_stride ebx                sad16x8_al eax%if early_return=1                cmp     eax,[esp + 3*4 + 16]    ;best_sad                jg      continue_al%endif                sad16x8_al ebx                add     eax,ebxcontinue_al:    restore 0                ret;===========================================================================;       Branch requires 16-byte alignment of the edi and stride only;===========================================================================%macro sad16x8_semial 1                movdqu  xmm0,[esi]                movdqu  xmm1,[esi+ebx]                movdqu  xmm2,[esi+ebx*2]                movdqu  xmm3,[esi+ecx]                add     esi,edx                movdqu  xmm4,[esi]                movdqu  xmm5,[esi+ebx]                movdqu  xmm6,[esi+ebx*2]                movdqu  xmm7,[esi+ecx]                add     esi,edx                sad8lines edi                after_sad %1%endmacroalign 16sad16_sse2_semial                load_stride ebx                sad16x8_semial eax%if early_return=1                cmp     eax,[esp + 3*4 + 16]    ;best_sad                jg      cont_semial%endif                sad16x8_semial ebx                add     eax,ebxcont_semial:    restore 0                ret;===========================================================================;               Branch does not require alignment, even stride;===========================================================================%macro sad16x4_ul 1                movdqu  xmm0,[esi]                movdqu  xmm1,[esi+ebx]                movdqu  xmm2,[esi+ebx*2]                movdqu  xmm3,[esi+ecx]                add     esi,edx                movdqu  xmm4,[edi]                movdqu  xmm5,[edi+ebx]                movdqu  xmm6,[edi+ebx*2]                movdqu  xmm7,[edi+ecx]                add     edi,edx                psadbw  xmm4,xmm0                psadbw  xmm5,xmm1                psadbw  xmm6,xmm2                psadbw  xmm7,xmm3                paddusw xmm4,xmm5                paddusw xmm6,xmm7                paddusw xmm4,xmm6                pshufd  xmm7,xmm4,11111110b                paddusw xmm7,xmm4                pextrw  %1,xmm7,0%endmacro                align 16sad16_sse2_ul                load_stride ebx                push ebp                sad16x4_ul eax%if early_return=1                cmp     eax,[esp + 4*4 + 16]    ;best_sad                jg      continue_ul%endif                sad16x4_ul ebp                add     eax,ebp%if early_return=1                cmp     eax,[esp + 4*4 + 16]    ;best_sad                jg      continue_ul%endif                sad16x4_ul ebp                add     eax,ebp%if early_return=1                cmp     eax,[esp + 4*4 + 16]    ;best_sad                jg      continue_ul%endif                sad16x4_ul ebp                add     eax,ebpcontinue_ul:    restore 1                ret;===========================================================================;; uint32_t dev16_sse2(const uint8_t * const cur,;					const uint32_t stride);;; experimental!;;===========================================================================align 16dev16_sse2                push    ebx		push 	esi		push 	edi                push    ebp                mov     esi, [esp + 4*4 + 4]      ; cur                mov     ebx, [esp + 4*4 + 8]      ; stride                mov     edi, buffer%if dev_debug=1                jmp     dev16_sse2_ul%endif%if dev_debug=2                jmp     dev16_sse2_al%endif                test    esi,15                jnz     near dev16_sse2_ul%if test_stride_alignment=1                test    ebx,15                jnz     dev16_sse2_ul%endif                mov     edi,esi                jmp     dev16_sse2_al;===========================================================================;               Branch requires alignment of both the cur and stride;===========================================================================%macro make_mean 0                add     eax,ebp         ;mean 16-bit                mov     al,ah           ;eax= {0 0 mean/256 mean/256}                mov     ebp,eax                shl     ebp,16                or      eax,ebp%endmacro%macro sad_mean16x8_al 3        ;destination,0=zero,1=mean from eax,source%if %2=0                pxor    xmm0,xmm0%else                movd    xmm0,eax                pshufd  xmm0,xmm0,0%endif                movdqa  xmm1,xmm0                movdqa  xmm2,xmm0                movdqa  xmm3,xmm0                movdqa  xmm4,xmm0                movdqa  xmm5,xmm0                movdqa  xmm6,xmm0                movdqa  xmm7,xmm0                sad8lines %3                after_sad %1%endmacroalign 16dev16_sse2_al                load_stride ebx                sad_mean16x8_al eax,0,esi                sad_mean16x8_al ebp,0,esi                make_mean                sad_mean16x8_al ebp,1,edi                sad_mean16x8_al eax,1,edi                add eax,ebp                restore 1                ret;===========================================================================;               Branch does not require alignment;===========================================================================%macro sad_mean16x8_ul 2                pxor    xmm7,xmm7                movdqu  xmm0,[%1]                movdqu  xmm1,[%1+ebx]                movdqu  xmm2,[%1+ebx*2]                movdqu  xmm3,[%1+ecx]                add     %1,edx                movdqa  [buffer+16*0],xmm0                movdqa  [buffer+16*1],xmm1                movdqa  [buffer+16*2],xmm2                movdqa  [buffer+16*3],xmm3                                movdqu  xmm4,[%1]                movdqu  xmm5,[%1+ebx]                movdqu  xmm6,[%1+ebx*2]                movdqa  [buffer+16*4],xmm4                movdqa  [buffer+16*5],xmm5                movdqa  [buffer+16*6],xmm6                psadbw  xmm0,xmm7                psadbw  xmm1,xmm7                psadbw  xmm2,xmm7                psadbw  xmm3,xmm7                psadbw  xmm4,xmm7                psadbw  xmm5,xmm7                psadbw  xmm6,xmm7                movdqu  xmm7,[%1+ecx]                movdqa  [buffer+16*7],xmm7                psadbw  xmm7,[zero]                add     %1,edx                after_sad %2%endmacroalign 16dev16_sse2_ul                load_stride ebx                sad_mean16x8_ul esi,eax                sad_mean16x8_ul esi,ebp                make_mean                sad_mean16x8_al ebp,1,edi                sad_mean16x8_al eax,1,edi                add     eax,ebp                restore 1                ret

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
2020国产精品| 欧美成人精品高清在线播放 | 欧美三级日韩在线| 国产成人啪免费观看软件| 日韩高清一级片| 全国精品久久少妇| 精品制服美女久久| 国产揄拍国内精品对白| 国产高清不卡二三区| 成人黄色小视频| 91成人看片片| 这里是久久伊人| 精品第一国产综合精品aⅴ| 久久久噜噜噜久久中文字幕色伊伊 | 国产精品91xxx| 福利一区福利二区| 91麻豆产精品久久久久久| 欧美三级日本三级少妇99| 欧美r级在线观看| 中文av一区二区| 亚洲一区影音先锋| 久久99久久久久| 成人听书哪个软件好| 欧美三级午夜理伦三级中视频| 日韩国产成人精品| 国产福利不卡视频| 91黄色免费观看| 久久青草国产手机看片福利盒子 | 亚洲女爱视频在线| 亚洲二区视频在线| 玖玖九九国产精品| 91亚洲精品一区二区乱码| 欧美高清性hdvideosex| 久久精品男人天堂av| 亚洲精品日日夜夜| 精品一区二区三区欧美| 在线观看欧美日本| 久久影音资源网| 亚洲国产成人porn| 福利一区二区在线| 日韩亚洲欧美一区二区三区| 国产精品国产三级国产| 蜜臀va亚洲va欧美va天堂| 一本到不卡精品视频在线观看 | 中文字幕一区二区三区四区| 婷婷国产在线综合| 国产成人自拍在线| 日韩一区和二区| 亚洲精品写真福利| 国产99精品国产| 91精品国产高清一区二区三区 | 成人免费毛片高清视频| 欧美怡红院视频| 久久久天堂av| 奇米888四色在线精品| 色域天天综合网| 国产精品福利在线播放| 国产精品亚洲成人| 欧美精品一区二区三区四区| 日韩精品欧美成人高清一区二区| 午夜欧美在线一二页| 狠狠色丁香九九婷婷综合五月| 国内精品第一页| 91精品国产福利在线观看| 亚洲男人都懂的| 成人午夜精品在线| 久久久午夜精品| 韩国女主播一区| 欧美一区二区视频在线观看2022| 日韩无一区二区| 日本亚洲最大的色成网站www| 天天免费综合色| 欧美性猛交xxxx黑人交| 亚洲色图自拍偷拍美腿丝袜制服诱惑麻豆| 欧美国产精品一区二区三区| 国产在线观看一区二区| 日韩视频在线永久播放| 午夜精品久久一牛影视| 欧美日韩精品高清| 免费高清不卡av| 日韩精品中文字幕在线一区| 麻豆视频观看网址久久| 欧美精品一区二区三区四区| 国产高清精品在线| 国产精品国产馆在线真实露脸 | 久久久久久久国产精品影院| 国产麻豆9l精品三级站| 国产欧美一区二区精品婷婷| 成人aa视频在线观看| 国产精品理伦片| 91福利社在线观看| 日韩av中文字幕一区二区三区| 国产91富婆露脸刺激对白| 日本一区二区三区在线观看| 国产成人精品亚洲777人妖| 欧美国产乱子伦| 欧美日韩美少妇| 精品亚洲免费视频| 亚洲女爱视频在线| 日韩欧美另类在线| av日韩在线网站| 日韩二区三区在线观看| 国产女人水真多18毛片18精品视频| 日韩精彩视频在线观看| 精品国产一区久久| 97超碰欧美中文字幕| 天堂av在线一区| 国产欧美一区二区精品性| 91成人在线免费观看| 国模娜娜一区二区三区| 一区二区三区**美女毛片| 91精品国产入口| 91首页免费视频| 久久国产福利国产秒拍| 亚洲三级久久久| 亚洲精品在线一区二区| 欧美色大人视频| av在线不卡免费看| 韩国av一区二区| 日韩精品一二三四| 夜夜嗨av一区二区三区中文字幕 | 亚洲高清久久久| 欧美国产成人在线| 91精品国产福利在线观看| 91小宝寻花一区二区三区| 麻豆国产91在线播放| 亚洲最大色网站| 国产精品嫩草影院com| 欧美狂野另类xxxxoooo| 成人毛片在线观看| 国产综合成人久久大片91| 日韩成人伦理电影在线观看| 一区二区三区日韩在线观看| 国产精品激情偷乱一区二区∴| 91丝袜国产在线播放| 国产在线麻豆精品观看| 日一区二区三区| 亚洲一区二区四区蜜桃| 18欧美乱大交hd1984| 欧美高清在线一区| 久久精品在线观看| 欧美mv日韩mv亚洲| 欧美一级片在线| 欧美高清你懂得| 欧美日韩精品一区二区| 欧美性一二三区| 精品视频资源站| 欧美高清www午色夜在线视频| 精品在线观看免费| 天使萌一区二区三区免费观看| 精品嫩草影院久久| 欧美tickling挠脚心丨vk| 欧美不卡视频一区| 日韩一区二区不卡| 欧美成人三级电影在线| 久久综合久久久久88| 精品国产成人在线影院| 久久久99精品久久| 中日韩av电影| 亚洲女爱视频在线| 亚洲二区视频在线| 久久精品国产第一区二区三区| 中文字幕一区二区视频| 国产精品美女久久久久久久 | 国产精品一区二区在线观看不卡 | 亚洲日本中文字幕区| 亚洲激情成人在线| 亚洲大片在线观看| 日本美女视频一区二区| 蜜桃久久精品一区二区| 国产精品一区二区久久精品爱涩| 亚洲国产一区二区a毛片| 亚洲午夜成aⅴ人片| 麻豆国产精品官网| 成人黄色在线网站| 欧美日韩精品欧美日韩精品一综合| 国产综合久久久久久久久久久久 | 欧美偷拍一区二区| 91高清视频免费看| 日韩精品中文字幕一区二区三区| 色天天综合久久久久综合片| 欧美一区二区在线不卡| 久久品道一品道久久精品| 国产精品乱人伦| 香蕉久久一区二区不卡无毒影院 | 在线不卡欧美精品一区二区三区| 国产九色sp调教91| 在线视频国内自拍亚洲视频| 欧美大尺度电影在线| 国产精品福利影院| 丝袜亚洲另类丝袜在线| 成人免费看黄yyy456| 欧美一区二区精品久久911| 亚洲欧洲日产国产综合网| 日本欧美在线看| 99国产精品国产精品毛片| 日韩一区二区三区在线| 一片黄亚洲嫩模| 成人美女视频在线看| 久久欧美一区二区|