亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? pixel-sse2.asm

?? DM642 H.264 codec DM642 H.264 codec DM642 H.264 codec DM642 H.264 codec
?? ASM
字號:
;*****************************************************************************;* pixel-sse2.asm: h264 encoder library;*****************************************************************************;* Copyright (C) 2005 x264 project;*;* Authors: Alex Izvorski <aizvorksi@gmail.com>;*;* This program is free software; you can redistribute it and/or modify;* it under the terms of the GNU General Public License as published by;* the Free Software Foundation; either version 2 of the License, or;* (at your option) any later version.;*;* This program is distributed in the hope that it will be useful,;* but WITHOUT ANY WARRANTY; without even the implied warranty of;* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the;* GNU General Public License for more details.;*;* You should have received a copy of the GNU General Public License;* along with this program; if not, write to the Free Software;* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111, USA.;*****************************************************************************BITS 64;=============================================================================; Macros and other preprocessor constants;=============================================================================%include "amd64inc.asm"SECTION .rodata align=16pd_0000ffff: times 4 dd 0x0000ffffSECTION .textcglobal x264_pixel_sad_16x16_sse2cglobal x264_pixel_sad_16x8_sse2cglobal x264_pixel_ssd_16x16_sse2cglobal x264_pixel_ssd_16x8_sse2cglobal x264_pixel_satd_8x4_sse2cglobal x264_pixel_satd_8x8_sse2cglobal x264_pixel_satd_16x8_sse2cglobal x264_pixel_satd_8x16_sse2cglobal x264_pixel_satd_16x16_sse2%macro SAD_INC_4x16P_SSE2 0    movdqu  xmm1,   [rdx]    movdqu  xmm2,   [rdx+rcx]    lea     rdx,    [rdx+2*rcx]    movdqu  xmm3,   [rdx]    movdqu  xmm4,   [rdx+rcx]    psadbw  xmm1,   [rdi]    psadbw  xmm2,   [rdi+rsi]    lea     rdi,    [rdi+2*rsi]    psadbw  xmm3,   [rdi]    psadbw  xmm4,   [rdi+rsi]    lea     rdi,    [rdi+2*rsi]    lea     rdx,    [rdx+2*rcx]    paddw   xmm1,   xmm2    paddw   xmm3,   xmm4    paddw   xmm0,   xmm1    paddw   xmm0,   xmm3%endmacro%macro SAD_START_SSE2 0;   mov     rdi, rdi            ; pix1    movsxd  rsi, esi            ; stride1;   mov     rdx, rdx            ; pix2    movsxd  rcx, ecx            ; stride2%endmacro%macro SAD_END_SSE2 0    movdqa  xmm1, xmm0    psrldq  xmm0,  8    paddw   xmm0, xmm1    movd    eax,  xmm0    ret%endmacroALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_sad_16x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_sad_16x16_sse2:    SAD_START_SSE2    movdqu xmm0, [rdx]    movdqu xmm1, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    movdqu xmm2, [rdx]    movdqu xmm3, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    psadbw xmm0, [rdi]    psadbw xmm1, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm4, [rdx]    paddw  xmm0, xmm1    psadbw xmm2, [rdi]    psadbw xmm3, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm5, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm2, xmm3    movdqu xmm6, [rdx]    movdqu xmm7, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm0, xmm2    psadbw xmm4, [rdi]    psadbw xmm5, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm1, [rdx]    paddw  xmm4, xmm5    psadbw xmm6, [rdi]    psadbw xmm7, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm2, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm6, xmm7    movdqu xmm3, [rdx]    paddw  xmm0, xmm4    movdqu xmm4, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm0, xmm6    psadbw xmm1, [rdi]    psadbw xmm2, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm5, [rdx]    paddw  xmm1, xmm2    psadbw xmm3, [rdi]    psadbw xmm4, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm6, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm3, xmm4    movdqu xmm7, [rdx]    paddw  xmm0, xmm1    movdqu xmm1, [rdx+rcx]    paddw  xmm0, xmm3    psadbw xmm5, [rdi]    psadbw xmm6, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    paddw  xmm5, xmm6    psadbw xmm7, [rdi]    psadbw xmm1, [rdi+rsi]    paddw  xmm7, xmm1    paddw  xmm0, xmm5    paddw  xmm0, xmm7    SAD_END_SSE2ALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_sad_16x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_sad_16x8_sse2:    SAD_START_SSE2    pxor    xmm0,   xmm0    SAD_INC_4x16P_SSE2    SAD_INC_4x16P_SSE2    SAD_END_SSE2%macro SSD_INC_2x16P_SSE2 0    movdqu  xmm1,   [rdi]    movdqu  xmm2,   [rdx]    movdqu  xmm3,   [rdi+rsi]    movdqu  xmm4,   [rdx+rcx]    movdqa  xmm5,   xmm1    movdqa  xmm6,   xmm3    psubusb xmm1,   xmm2    psubusb xmm3,   xmm4    psubusb xmm2,   xmm5    psubusb xmm4,   xmm6    por     xmm1,   xmm2    por     xmm3,   xmm4    movdqa  xmm2,   xmm1    movdqa  xmm4,   xmm3    punpcklbw xmm1, xmm7    punpckhbw xmm2, xmm7    punpcklbw xmm3, xmm7    punpckhbw xmm4, xmm7    pmaddwd xmm1,   xmm1    pmaddwd xmm2,   xmm2    pmaddwd xmm3,   xmm3    pmaddwd xmm4,   xmm4    lea     rdi,    [rdi+2*rsi]    lea     rdx,    [rdx+2*rcx]    paddd   xmm1,   xmm2    paddd   xmm3,   xmm4    paddd   xmm0,   xmm1    paddd   xmm0,   xmm3%endmacro%macro SSD_INC_8x16P_SSE2 0    SSD_INC_2x16P_SSE2    SSD_INC_2x16P_SSE2    SSD_INC_2x16P_SSE2    SSD_INC_2x16P_SSE2%endmacro%macro SSD_START_SSE2 0;   mov     rdi, rdi            ; pix1    movsxd  rsi, esi            ; stride1;   mov     rdx, rdx            ; pix2    movsxd  rcx, ecx            ; stride2    pxor    xmm7,   xmm7        ; zero    pxor    xmm0,   xmm0        ; mm0 holds the sum%endmacro%macro SSD_END_SSE2 0    movdqa  xmm1,   xmm0    psrldq  xmm1,    8    paddd   xmm0,   xmm1    movdqa  xmm1,   xmm0    psrldq  xmm1,    4    paddd   xmm0,   xmm1    movd    eax,    xmm0    ret%endmacroALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_ssd_16x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_ssd_16x16_sse2:    SSD_START_SSE2    SSD_INC_8x16P_SSE2    SSD_INC_8x16P_SSE2    SSD_END_SSE2ALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_ssd_16x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_ssd_16x8_sse2:    SSD_START_SSE2    SSD_INC_8x16P_SSE2    SSD_END_SSE2; %1=(row2, row0) %2=(row3, row1) %3=junk; output in %1=(row3, row0) and %3=(row2, row1)%macro HADAMARD4x4_SSE2 3    movdqa     %3, %1    paddw      %1, %2    psubw      %3, %2    movdqa     %2, %1    punpcklqdq %1, %3    punpckhqdq %2, %3    movdqa     %3, %1    paddw      %1, %2    psubw      %3, %2%endmacro;;; two HADAMARD4x4_SSE2 running side-by-side%macro HADAMARD4x4_TWO_SSE2 6    ; a02 a13 junk1 b02 b13 junk2 (1=4 2=5 3=6)    movdqa     %3, %1    movdqa     %6, %4    paddw      %1, %2    paddw      %4, %5    psubw      %3, %2    psubw      %6, %5    movdqa     %2, %1    movdqa     %5, %4    punpcklqdq %1, %3    punpcklqdq %4, %6    punpckhqdq %2, %3    punpckhqdq %5, %6    movdqa     %3, %1    movdqa     %6, %4    paddw      %1, %2    paddw      %4, %5    psubw      %3, %2    psubw      %6, %5%endmacro%macro TRANSPOSE4x4_TWIST_SSE2 3    ; %1=(row3, row0) %2=(row2, row1) %3=junk, output in %1 and %2    movdqa     %3, %1    punpcklwd  %1, %2    punpckhwd  %2, %3             ; backwards because the high quadwords are already swapped    movdqa     %3, %1    punpckldq  %1, %2    punpckhdq  %3, %2    movdqa     %2, %1    punpcklqdq %1, %3    punpckhqdq %2, %3%endmacro;;; two TRANSPOSE4x4_TWIST_SSE2 running side-by-side%macro TRANSPOSE4x4_TWIST_TWO_SSE2 6    ; a02 a13 junk1 b02 b13 junk2 (1=4 2=5 3=6)    movdqa     %3, %1    movdqa     %6, %4    punpcklwd  %1, %2    punpcklwd  %4, %5    punpckhwd  %2, %3    punpckhwd  %5, %6    movdqa     %3, %1    movdqa     %6, %4    punpckldq  %1, %2    punpckldq  %4, %5    punpckhdq  %3, %2    punpckhdq  %6, %5    movdqa     %2, %1    movdqa     %5, %4    punpcklqdq %1, %3    punpcklqdq %4, %6    punpckhqdq %2, %3    punpckhqdq %5, %6%endmacro;;; loads the difference of two 4x4 blocks into xmm0,xmm1 and xmm4,xmm5 in interleaved-row order;;; destroys xmm2, 3;;; the value in xmm7 doesn't matter: it's only subtracted from itself%macro LOAD4x8_DIFF_SSE2 0    movq      xmm0, [rdi]    movq      xmm4, [rdx]    punpcklbw xmm0, xmm7    punpcklbw xmm4, xmm7    psubw     xmm0, xmm4    movq      xmm1, [rdi+rsi]    movq      xmm5, [rdx+rcx]    lea       rdi,  [rdi+2*rsi]    lea       rdx,  [rdx+2*rcx]    punpcklbw xmm1, xmm7    punpcklbw xmm5, xmm7    psubw     xmm1, xmm5    movq       xmm2, [rdi]    movq       xmm4, [rdx]    punpcklbw  xmm2, xmm7    punpcklbw  xmm4, xmm7    psubw      xmm2, xmm4    movdqa     xmm4, xmm0    punpcklqdq xmm0, xmm2        ; rows 0 and 2    punpckhqdq xmm4, xmm2        ; next 4x4 rows 0 and 2    movq       xmm3, [rdi+rsi]    movq       xmm5, [rdx+rcx]    lea        rdi,  [rdi+2*rsi]    lea        rdx,  [rdx+2*rcx]    punpcklbw  xmm3, xmm7    punpcklbw  xmm5, xmm7    psubw      xmm3, xmm5    movdqa     xmm5, xmm1    punpcklqdq xmm1, xmm3        ; rows 1 and 3    punpckhqdq xmm5, xmm3        ; next 4x4 rows 1 and 3%endmacro%macro SUM4x4_SSE2 4    ; 02 13 junk sum    pxor    %3, %3    psubw   %3, %1    pmaxsw  %1, %3    pxor    %3, %3    psubw   %3, %2    pmaxsw  %2, %3    paddusw %4, %1    paddusw %4, %2%endmacro;;; two SUM4x4_SSE2 running side-by-side%macro SUM4x4_TWO_SSE2 7    ; a02 a13 junk1 b02 b13 junk2 (1=4 2=5 3=6) sum    pxor    %3, %3    pxor    %6, %6    psubw   %3, %1    psubw   %6, %4    pmaxsw  %1, %3    pmaxsw  %4, %6    pxor    %3, %3    pxor    %6, %6    psubw   %3, %2    psubw   %6, %5    pmaxsw  %2, %3    pmaxsw  %5, %6    paddusw %1, %2    paddusw %4, %5    paddusw %7, %1    paddusw %7, %4%endmacro%macro SUM_MM_SSE2 2    ; sum junk    ; each column sum of SATD is necessarily even, so we don't lose any precision by shifting first.    psrlw   %1, 1    movdqa  %2, %1    psrldq  %1, 2    paddusw %1, %2    pand    %1, [pd_0000ffff GLOBAL]    movdqa  %2, %1    psrldq  %1, 4    paddd   %1, %2    movdqa  %2, %1    psrldq  %1, 8    paddd   %1, %2    movd    eax,%1%endmacro%macro SATD_TWO_SSE2 0    LOAD4x8_DIFF_SSE2    HADAMARD4x4_TWO_SSE2        xmm0, xmm1, xmm2, xmm4, xmm5, xmm3    TRANSPOSE4x4_TWIST_TWO_SSE2 xmm0, xmm2, xmm1, xmm4, xmm3, xmm5    HADAMARD4x4_TWO_SSE2        xmm0, xmm2, xmm1, xmm4, xmm3, xmm5    SUM4x4_TWO_SSE2             xmm0, xmm1, xmm2, xmm4, xmm5, xmm3, xmm6%endmacro%macro SATD_START 0;   mov     rdi, rdi            ; pix1    movsxd  rsi, esi            ; stride1;   mov     rdx, rdx            ; pix2    movsxd  rcx, ecx            ; stride2    pxor    xmm6, xmm6%endmacro%macro SATD_END 0    SUM_MM_SSE2  xmm6, xmm7    ret%endmacroALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_16x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_16x16_sse2:    SATD_START    mov     r8,  rdi    mov     r9,  rdx    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    lea     rdi, [r8+8]    lea     rdx, [r9+8]    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_8x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_8x16_sse2:    SATD_START    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_16x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_16x8_sse2:    SATD_START    mov     r8,  rdi    mov     r9,  rdx    SATD_TWO_SSE2    SATD_TWO_SSE2    lea     rdi, [r8+8]    lea     rdx, [r9+8]    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_8x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_8x8_sse2:    SATD_START    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_8x4_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_8x4_sse2:    SATD_START    SATD_TWO_SSE2    SATD_END

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲mv在线观看| 国产亚洲一区二区三区四区 | 日韩精品成人一区二区三区| 精品国产99国产精品| 欧美性xxxxxx少妇| 日韩激情av在线| 亚洲国产精品久久人人爱| 日韩一区欧美小说| 欧美在线观看视频一区二区三区| 国产高清在线观看免费不卡| 国产一区二区导航在线播放| 国产精品美女久久久久aⅴ国产馆| 欧美成人一区二区三区| 国产v综合v亚洲欧| 久久91精品国产91久久小草| 日韩av中文字幕一区二区三区| 一区二区三区四区不卡在线 | 日韩午夜三级在线| 精品视频123区在线观看| 欧美又粗又大又爽| 色伊人久久综合中文字幕| 国产成人免费av在线| 狠狠久久亚洲欧美| 国产九九视频一区二区三区| 国产精品一品二品| av亚洲精华国产精华精华 | 亚洲精品综合在线| 欧美激情在线一区二区三区| 久久精品一区蜜桃臀影院| 欧美激情在线看| 欧美极品xxx| 最新中文字幕一区二区三区| 一区二区欧美精品| 老司机免费视频一区二区三区| 亚洲国产毛片aaaaa无费看| 亚洲人成在线观看一区二区| 国产亚洲精品bt天堂精选| 国产精品剧情在线亚洲| 亚洲一区在线视频| 亚洲午夜电影网| 亚洲国产wwwccc36天堂| 美女精品自拍一二三四| 春色校园综合激情亚洲| 成人一级片网址| 欧美日韩精品一区二区三区| 91精品婷婷国产综合久久性色| 欧美精品丝袜中出| 久久精品在线观看| 亚洲精品国产无天堂网2021| 久久成人精品无人区| 国产河南妇女毛片精品久久久| 91福利在线播放| 国产欧美一区二区精品婷婷| 亚洲一区二区三区四区在线免费观看 | 欧美狂野另类xxxxoooo| 久久久久久久久97黄色工厂| 亚洲色图在线看| 久久99精品一区二区三区三区| 成人伦理片在线| 日韩精品一区二区三区视频| 亚洲色图丝袜美腿| 国产乱码精品一区二区三区五月婷| 色视频成人在线观看免| 国产精品少妇自拍| 日韩高清不卡一区二区| 91小视频免费看| 国产欧美日韩在线看| 日日夜夜精品视频天天综合网| youjizz国产精品| 久久免费偷拍视频| 麻豆国产欧美日韩综合精品二区| 丁香天五香天堂综合| 日韩欧美中文一区二区| 一区二区三区在线高清| 国产精品一二三区| 91精品在线一区二区| 亚洲一区二区精品久久av| proumb性欧美在线观看| 久久青草国产手机看片福利盒子| 一区二区三区中文字幕| 97精品电影院| 国产精品久久二区二区| 国产高清一区日本| 久久久久久夜精品精品免费| 一区二区三区在线播| 99久久精品久久久久久清纯| 国产欧美精品日韩区二区麻豆天美| 久久www免费人成看片高清| 欧美一区二区精品| 日韩av不卡在线观看| 91精品国产入口| 日本中文在线一区| 91久久精品一区二区二区| 国产精品久久久久7777按摩| 99久久精品国产导航| 亚洲免费视频中文字幕| 欧美一区二区三区免费在线看| 久久99精品久久久久久国产越南| 国产人成一区二区三区影院| 91麻豆视频网站| 蜜臀av性久久久久蜜臀aⅴ流畅| 久久久久久久久岛国免费| 日本伦理一区二区| 日韩成人免费电影| 中文字幕一区二区三区在线不卡 | 亚洲成人久久影院| 26uuu国产在线精品一区二区| 不卡av在线免费观看| 亚洲成av人片www| 久久久精品黄色| 欧美色国产精品| 粗大黑人巨茎大战欧美成人| 亚洲一二三四区不卡| 国产无一区二区| 欧美日韩精品一区二区三区蜜桃 | 99精品在线免费| 日本欧美在线看| 亚洲欧美日韩在线不卡| 欧美成人精品1314www| 色偷偷成人一区二区三区91 | 久久久久亚洲蜜桃| 欧美日本一区二区在线观看| 成熟亚洲日本毛茸茸凸凹| 日韩成人一区二区| 亚洲精品日韩综合观看成人91| 日韩欧美成人激情| 欧美日韩成人激情| 一本久久综合亚洲鲁鲁五月天| 久久精品久久综合| 亚洲va韩国va欧美va| 亚洲黄色免费电影| 国产欧美日韩另类一区| 精品国精品国产尤物美女| 欧美日韩一区二区在线观看视频| 国产91精品入口| 国产99一区视频免费| 黑人精品欧美一区二区蜜桃| 日韩在线播放一区二区| 亚洲国产综合人成综合网站| 亚洲视频 欧洲视频| 国产精品亲子乱子伦xxxx裸| 久久女同精品一区二区| 精品毛片乱码1区2区3区| 制服视频三区第一页精品| 欧美日韩另类国产亚洲欧美一级| 91福利区一区二区三区| 色婷婷狠狠综合| 欧美在线free| 欧美精品亚洲二区| 制服丝袜在线91| 欧美一区二区高清| 欧美一区二区三区婷婷月色| 8v天堂国产在线一区二区| 欧美日韩国产电影| 7777精品伊人久久久大香线蕉 | 制服丝袜国产精品| 555www色欧美视频| 日韩片之四级片| 精品蜜桃在线看| 国产夜色精品一区二区av| 国产欧美精品国产国产专区| 国产精品丝袜91| 一区二区三区欧美亚洲| 亚洲a一区二区| 麻豆精品新av中文字幕| 国产一区二三区| 成人av免费在线播放| 在线日韩一区二区| 日韩一卡二卡三卡国产欧美| 精品免费一区二区三区| 国产精品入口麻豆九色| 亚洲视频图片小说| 丝袜美腿亚洲色图| 韩国女主播一区二区三区| 成人av在线播放网址| 欧美伊人精品成人久久综合97| 日韩一二三区不卡| 久久久99精品久久| 亚洲乱码日产精品bd| 天天影视网天天综合色在线播放| 久久精品国产第一区二区三区| 国产美女在线观看一区| 91在线视频18| 欧美一区二区三区四区视频| 中文av字幕一区| 亚洲在线视频一区| 国产精品18久久久| 欧美日韩三级一区二区| 久久网站热最新地址| 亚洲专区一二三| 盗摄精品av一区二区三区| 欧美三级资源在线| 欧美国产精品劲爆| 日韩电影免费在线观看网站| 国产成人精品亚洲777人妖| 7799精品视频| 日韩美女精品在线| 国产精品综合av一区二区国产馆| 在线观看免费视频综合| 国产精品污污网站在线观看|