亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? pixel-sse2.asm

?? DM642 H.264 codec DM642 H.264 codec DM642 H.264 codec DM642 H.264 codec
?? ASM
字號:
;*****************************************************************************;* pixel-sse2.asm: h264 encoder library;*****************************************************************************;* Copyright (C) 2005 x264 project;*;* Authors: Alex Izvorski <aizvorksi@gmail.com>;*;* This program is free software; you can redistribute it and/or modify;* it under the terms of the GNU General Public License as published by;* the Free Software Foundation; either version 2 of the License, or;* (at your option) any later version.;*;* This program is distributed in the hope that it will be useful,;* but WITHOUT ANY WARRANTY; without even the implied warranty of;* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the;* GNU General Public License for more details.;*;* You should have received a copy of the GNU General Public License;* along with this program; if not, write to the Free Software;* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111, USA.;*****************************************************************************BITS 64;=============================================================================; Macros and other preprocessor constants;=============================================================================%include "amd64inc.asm"SECTION .rodata align=16pd_0000ffff: times 4 dd 0x0000ffffSECTION .textcglobal x264_pixel_sad_16x16_sse2cglobal x264_pixel_sad_16x8_sse2cglobal x264_pixel_ssd_16x16_sse2cglobal x264_pixel_ssd_16x8_sse2cglobal x264_pixel_satd_8x4_sse2cglobal x264_pixel_satd_8x8_sse2cglobal x264_pixel_satd_16x8_sse2cglobal x264_pixel_satd_8x16_sse2cglobal x264_pixel_satd_16x16_sse2%macro SAD_INC_4x16P_SSE2 0    movdqu  xmm1,   [rdx]    movdqu  xmm2,   [rdx+rcx]    lea     rdx,    [rdx+2*rcx]    movdqu  xmm3,   [rdx]    movdqu  xmm4,   [rdx+rcx]    psadbw  xmm1,   [rdi]    psadbw  xmm2,   [rdi+rsi]    lea     rdi,    [rdi+2*rsi]    psadbw  xmm3,   [rdi]    psadbw  xmm4,   [rdi+rsi]    lea     rdi,    [rdi+2*rsi]    lea     rdx,    [rdx+2*rcx]    paddw   xmm1,   xmm2    paddw   xmm3,   xmm4    paddw   xmm0,   xmm1    paddw   xmm0,   xmm3%endmacro%macro SAD_START_SSE2 0;   mov     rdi, rdi            ; pix1    movsxd  rsi, esi            ; stride1;   mov     rdx, rdx            ; pix2    movsxd  rcx, ecx            ; stride2%endmacro%macro SAD_END_SSE2 0    movdqa  xmm1, xmm0    psrldq  xmm0,  8    paddw   xmm0, xmm1    movd    eax,  xmm0    ret%endmacroALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_sad_16x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_sad_16x16_sse2:    SAD_START_SSE2    movdqu xmm0, [rdx]    movdqu xmm1, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    movdqu xmm2, [rdx]    movdqu xmm3, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    psadbw xmm0, [rdi]    psadbw xmm1, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm4, [rdx]    paddw  xmm0, xmm1    psadbw xmm2, [rdi]    psadbw xmm3, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm5, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm2, xmm3    movdqu xmm6, [rdx]    movdqu xmm7, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm0, xmm2    psadbw xmm4, [rdi]    psadbw xmm5, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm1, [rdx]    paddw  xmm4, xmm5    psadbw xmm6, [rdi]    psadbw xmm7, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm2, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm6, xmm7    movdqu xmm3, [rdx]    paddw  xmm0, xmm4    movdqu xmm4, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm0, xmm6    psadbw xmm1, [rdi]    psadbw xmm2, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm5, [rdx]    paddw  xmm1, xmm2    psadbw xmm3, [rdi]    psadbw xmm4, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    movdqu xmm6, [rdx+rcx]    lea    rdx,  [rdx+2*rcx]    paddw  xmm3, xmm4    movdqu xmm7, [rdx]    paddw  xmm0, xmm1    movdqu xmm1, [rdx+rcx]    paddw  xmm0, xmm3    psadbw xmm5, [rdi]    psadbw xmm6, [rdi+rsi]    lea    rdi,  [rdi+2*rsi]    paddw  xmm5, xmm6    psadbw xmm7, [rdi]    psadbw xmm1, [rdi+rsi]    paddw  xmm7, xmm1    paddw  xmm0, xmm5    paddw  xmm0, xmm7    SAD_END_SSE2ALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_sad_16x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_sad_16x8_sse2:    SAD_START_SSE2    pxor    xmm0,   xmm0    SAD_INC_4x16P_SSE2    SAD_INC_4x16P_SSE2    SAD_END_SSE2%macro SSD_INC_2x16P_SSE2 0    movdqu  xmm1,   [rdi]    movdqu  xmm2,   [rdx]    movdqu  xmm3,   [rdi+rsi]    movdqu  xmm4,   [rdx+rcx]    movdqa  xmm5,   xmm1    movdqa  xmm6,   xmm3    psubusb xmm1,   xmm2    psubusb xmm3,   xmm4    psubusb xmm2,   xmm5    psubusb xmm4,   xmm6    por     xmm1,   xmm2    por     xmm3,   xmm4    movdqa  xmm2,   xmm1    movdqa  xmm4,   xmm3    punpcklbw xmm1, xmm7    punpckhbw xmm2, xmm7    punpcklbw xmm3, xmm7    punpckhbw xmm4, xmm7    pmaddwd xmm1,   xmm1    pmaddwd xmm2,   xmm2    pmaddwd xmm3,   xmm3    pmaddwd xmm4,   xmm4    lea     rdi,    [rdi+2*rsi]    lea     rdx,    [rdx+2*rcx]    paddd   xmm1,   xmm2    paddd   xmm3,   xmm4    paddd   xmm0,   xmm1    paddd   xmm0,   xmm3%endmacro%macro SSD_INC_8x16P_SSE2 0    SSD_INC_2x16P_SSE2    SSD_INC_2x16P_SSE2    SSD_INC_2x16P_SSE2    SSD_INC_2x16P_SSE2%endmacro%macro SSD_START_SSE2 0;   mov     rdi, rdi            ; pix1    movsxd  rsi, esi            ; stride1;   mov     rdx, rdx            ; pix2    movsxd  rcx, ecx            ; stride2    pxor    xmm7,   xmm7        ; zero    pxor    xmm0,   xmm0        ; mm0 holds the sum%endmacro%macro SSD_END_SSE2 0    movdqa  xmm1,   xmm0    psrldq  xmm1,    8    paddd   xmm0,   xmm1    movdqa  xmm1,   xmm0    psrldq  xmm1,    4    paddd   xmm0,   xmm1    movd    eax,    xmm0    ret%endmacroALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_ssd_16x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_ssd_16x16_sse2:    SSD_START_SSE2    SSD_INC_8x16P_SSE2    SSD_INC_8x16P_SSE2    SSD_END_SSE2ALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_ssd_16x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_ssd_16x8_sse2:    SSD_START_SSE2    SSD_INC_8x16P_SSE2    SSD_END_SSE2; %1=(row2, row0) %2=(row3, row1) %3=junk; output in %1=(row3, row0) and %3=(row2, row1)%macro HADAMARD4x4_SSE2 3    movdqa     %3, %1    paddw      %1, %2    psubw      %3, %2    movdqa     %2, %1    punpcklqdq %1, %3    punpckhqdq %2, %3    movdqa     %3, %1    paddw      %1, %2    psubw      %3, %2%endmacro;;; two HADAMARD4x4_SSE2 running side-by-side%macro HADAMARD4x4_TWO_SSE2 6    ; a02 a13 junk1 b02 b13 junk2 (1=4 2=5 3=6)    movdqa     %3, %1    movdqa     %6, %4    paddw      %1, %2    paddw      %4, %5    psubw      %3, %2    psubw      %6, %5    movdqa     %2, %1    movdqa     %5, %4    punpcklqdq %1, %3    punpcklqdq %4, %6    punpckhqdq %2, %3    punpckhqdq %5, %6    movdqa     %3, %1    movdqa     %6, %4    paddw      %1, %2    paddw      %4, %5    psubw      %3, %2    psubw      %6, %5%endmacro%macro TRANSPOSE4x4_TWIST_SSE2 3    ; %1=(row3, row0) %2=(row2, row1) %3=junk, output in %1 and %2    movdqa     %3, %1    punpcklwd  %1, %2    punpckhwd  %2, %3             ; backwards because the high quadwords are already swapped    movdqa     %3, %1    punpckldq  %1, %2    punpckhdq  %3, %2    movdqa     %2, %1    punpcklqdq %1, %3    punpckhqdq %2, %3%endmacro;;; two TRANSPOSE4x4_TWIST_SSE2 running side-by-side%macro TRANSPOSE4x4_TWIST_TWO_SSE2 6    ; a02 a13 junk1 b02 b13 junk2 (1=4 2=5 3=6)    movdqa     %3, %1    movdqa     %6, %4    punpcklwd  %1, %2    punpcklwd  %4, %5    punpckhwd  %2, %3    punpckhwd  %5, %6    movdqa     %3, %1    movdqa     %6, %4    punpckldq  %1, %2    punpckldq  %4, %5    punpckhdq  %3, %2    punpckhdq  %6, %5    movdqa     %2, %1    movdqa     %5, %4    punpcklqdq %1, %3    punpcklqdq %4, %6    punpckhqdq %2, %3    punpckhqdq %5, %6%endmacro;;; loads the difference of two 4x4 blocks into xmm0,xmm1 and xmm4,xmm5 in interleaved-row order;;; destroys xmm2, 3;;; the value in xmm7 doesn't matter: it's only subtracted from itself%macro LOAD4x8_DIFF_SSE2 0    movq      xmm0, [rdi]    movq      xmm4, [rdx]    punpcklbw xmm0, xmm7    punpcklbw xmm4, xmm7    psubw     xmm0, xmm4    movq      xmm1, [rdi+rsi]    movq      xmm5, [rdx+rcx]    lea       rdi,  [rdi+2*rsi]    lea       rdx,  [rdx+2*rcx]    punpcklbw xmm1, xmm7    punpcklbw xmm5, xmm7    psubw     xmm1, xmm5    movq       xmm2, [rdi]    movq       xmm4, [rdx]    punpcklbw  xmm2, xmm7    punpcklbw  xmm4, xmm7    psubw      xmm2, xmm4    movdqa     xmm4, xmm0    punpcklqdq xmm0, xmm2        ; rows 0 and 2    punpckhqdq xmm4, xmm2        ; next 4x4 rows 0 and 2    movq       xmm3, [rdi+rsi]    movq       xmm5, [rdx+rcx]    lea        rdi,  [rdi+2*rsi]    lea        rdx,  [rdx+2*rcx]    punpcklbw  xmm3, xmm7    punpcklbw  xmm5, xmm7    psubw      xmm3, xmm5    movdqa     xmm5, xmm1    punpcklqdq xmm1, xmm3        ; rows 1 and 3    punpckhqdq xmm5, xmm3        ; next 4x4 rows 1 and 3%endmacro%macro SUM4x4_SSE2 4    ; 02 13 junk sum    pxor    %3, %3    psubw   %3, %1    pmaxsw  %1, %3    pxor    %3, %3    psubw   %3, %2    pmaxsw  %2, %3    paddusw %4, %1    paddusw %4, %2%endmacro;;; two SUM4x4_SSE2 running side-by-side%macro SUM4x4_TWO_SSE2 7    ; a02 a13 junk1 b02 b13 junk2 (1=4 2=5 3=6) sum    pxor    %3, %3    pxor    %6, %6    psubw   %3, %1    psubw   %6, %4    pmaxsw  %1, %3    pmaxsw  %4, %6    pxor    %3, %3    pxor    %6, %6    psubw   %3, %2    psubw   %6, %5    pmaxsw  %2, %3    pmaxsw  %5, %6    paddusw %1, %2    paddusw %4, %5    paddusw %7, %1    paddusw %7, %4%endmacro%macro SUM_MM_SSE2 2    ; sum junk    ; each column sum of SATD is necessarily even, so we don't lose any precision by shifting first.    psrlw   %1, 1    movdqa  %2, %1    psrldq  %1, 2    paddusw %1, %2    pand    %1, [pd_0000ffff GLOBAL]    movdqa  %2, %1    psrldq  %1, 4    paddd   %1, %2    movdqa  %2, %1    psrldq  %1, 8    paddd   %1, %2    movd    eax,%1%endmacro%macro SATD_TWO_SSE2 0    LOAD4x8_DIFF_SSE2    HADAMARD4x4_TWO_SSE2        xmm0, xmm1, xmm2, xmm4, xmm5, xmm3    TRANSPOSE4x4_TWIST_TWO_SSE2 xmm0, xmm2, xmm1, xmm4, xmm3, xmm5    HADAMARD4x4_TWO_SSE2        xmm0, xmm2, xmm1, xmm4, xmm3, xmm5    SUM4x4_TWO_SSE2             xmm0, xmm1, xmm2, xmm4, xmm5, xmm3, xmm6%endmacro%macro SATD_START 0;   mov     rdi, rdi            ; pix1    movsxd  rsi, esi            ; stride1;   mov     rdx, rdx            ; pix2    movsxd  rcx, ecx            ; stride2    pxor    xmm6, xmm6%endmacro%macro SATD_END 0    SUM_MM_SSE2  xmm6, xmm7    ret%endmacroALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_16x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_16x16_sse2:    SATD_START    mov     r8,  rdi    mov     r9,  rdx    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    lea     rdi, [r8+8]    lea     rdx, [r9+8]    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_8x16_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_8x16_sse2:    SATD_START    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_16x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_16x8_sse2:    SATD_START    mov     r8,  rdi    mov     r9,  rdx    SATD_TWO_SSE2    SATD_TWO_SSE2    lea     rdi, [r8+8]    lea     rdx, [r9+8]    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_8x8_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_8x8_sse2:    SATD_START    SATD_TWO_SSE2    SATD_TWO_SSE2    SATD_ENDALIGN 16;-----------------------------------------------------------------------------;   int x264_pixel_satd_8x4_sse2 (uint8_t *, int, uint8_t *, int );-----------------------------------------------------------------------------x264_pixel_satd_8x4_sse2:    SATD_START    SATD_TWO_SSE2    SATD_END

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
99久久国产综合色|国产精品| 一区二区久久久久久| 亚洲国产精品传媒在线观看| 亚洲欧洲国产日韩| 亚洲影院久久精品| 日本欧美一区二区在线观看| 国产一区二区三区黄视频 | 91免费版在线| 欧美午夜不卡在线观看免费| 精品久久久久久久久久久久久久久久久 | 国产精品激情偷乱一区二区∴| 一区二区理论电影在线观看| 免费xxxx性欧美18vr| 国产成人午夜精品影院观看视频 | 一区二区三区在线免费视频| 日本欧美一区二区| 国产91丝袜在线观看| 欧美性猛片xxxx免费看久爱| 337p粉嫩大胆噜噜噜噜噜91av| 中文字幕色av一区二区三区| 日本不卡123| 91视频一区二区三区| 日韩视频免费观看高清在线视频| 亚洲欧洲精品一区二区精品久久久| 奇米四色…亚洲| 91久久精品国产91性色tv| 欧美大片拔萝卜| 一区二区三区精品久久久| 久久99精品国产麻豆不卡| 色婷婷久久久久swag精品| 久久一区二区视频| 日韩av一区二区三区四区| 97久久超碰精品国产| 2017欧美狠狠色| 天天影视色香欲综合网老头| 99精品视频一区二区三区| 欧美大片在线观看| 亚洲一区二区三区国产| 成人黄色一级视频| 欧美不卡在线视频| 午夜久久久影院| 95精品视频在线| 久久久久99精品一区| 奇米精品一区二区三区在线观看一 | 久久久久久久久久久久久女国产乱| 亚洲观看高清完整版在线观看| 成人一区二区三区中文字幕| 欧美一级生活片| 亚洲午夜激情av| 97精品超碰一区二区三区| 国产欧美精品一区| 激情另类小说区图片区视频区| 欧美日韩高清影院| 尤物在线观看一区| 99re成人精品视频| 国产日韩亚洲欧美综合| 精品一二三四区| 日韩欧美久久久| 日本亚洲欧美天堂免费| 欧美午夜一区二区三区免费大片| 1024成人网色www| 不卡电影一区二区三区| 欧美高清在线一区二区| 国产精品系列在线观看| 久久欧美中文字幕| 精品亚洲成av人在线观看| 日韩一级片在线观看| 亚洲国产精品久久人人爱 | 日韩精品亚洲专区| 在线影院国内精品| 亚洲国产综合色| 欧美网站一区二区| 亚洲电影第三页| 欧美日韩国产区一| 日精品一区二区| 欧美精品精品一区| 蜜臀av性久久久久蜜臀aⅴ流畅 | 激情综合网av| 久久综合中文字幕| 国产成人自拍网| 国产欧美日韩视频一区二区 | 亚洲免费av在线| 91行情网站电视在线观看高清版| 亚洲激情五月婷婷| 精品视频在线免费| 丝袜诱惑亚洲看片| 欧美电影免费观看完整版 | 国产欧美日韩综合精品一区二区| 国产成a人亚洲精| 亚洲日本va午夜在线影院| 日本精品视频一区二区| 亚洲国产精品人人做人人爽| 日韩视频一区二区| 国产成人啪免费观看软件| 中文字幕亚洲成人| 在线视频国产一区| 婷婷丁香久久五月婷婷| 欧美成人一级视频| 国产成人夜色高潮福利影视| 亚洲欧美综合另类在线卡通| 在线观看视频一区| 免费观看一级欧美片| 久久久亚洲精品一区二区三区| av在线播放一区二区三区| 亚洲一区二区偷拍精品| 日韩一区二区免费在线电影| 国产精品原创巨作av| 亚洲情趣在线观看| 91精品国模一区二区三区| 国产自产视频一区二区三区| 亚洲欧洲99久久| 91麻豆精品国产| 国产在线看一区| 亚洲九九爱视频| 欧美成人aa大片| 91猫先生在线| 麻豆91精品91久久久的内涵| 国产精品麻豆网站| 欧美三级日韩在线| 国产一区不卡在线| 亚洲成人午夜电影| 国产夜色精品一区二区av| 色综合天天综合在线视频| 免费人成精品欧美精品| 亚洲视频 欧洲视频| 日韩欧美国产三级| 91免费观看国产| 九一久久久久久| 一区二区免费看| 久久久午夜精品理论片中文字幕| 欧美亚一区二区| 国产成人亚洲综合a∨婷婷图片| 亚洲第一狼人社区| 国产精品入口麻豆原神| 91精品国产91久久综合桃花| jlzzjlzz国产精品久久| 久久99精品国产麻豆不卡| 一二三区精品视频| 国产喷白浆一区二区三区| 欧美高清www午色夜在线视频| gogo大胆日本视频一区| 国内精品在线播放| 亚洲高清免费视频| 综合久久综合久久| 精品国产乱码久久久久久免费| 欧美专区在线观看一区| 福利一区二区在线观看| 美女性感视频久久| 亚洲一二三四久久| 1024成人网| 国产精品免费丝袜| 精品久久久网站| 正在播放亚洲一区| 欧美综合色免费| 色综合久久久久综合体桃花网| 国产成人在线免费观看| 狂野欧美性猛交blacked| 亚洲亚洲人成综合网络| 亚洲欧洲综合另类在线| 欧美韩国日本不卡| 久久久国产午夜精品| 精品国产不卡一区二区三区| 69久久夜色精品国产69蝌蚪网| 在线视频你懂得一区| 成人午夜激情影院| 国产成人av在线影院| 精品影院一区二区久久久| 青草av.久久免费一区| 天天av天天翘天天综合网| 亚洲制服丝袜一区| 亚洲精选一二三| 亚洲免费在线看| 亚洲三级免费电影| 成人免费在线视频| 国产精品久久久久影院色老大| 中文字幕av资源一区| 国产欧美一区二区在线观看| 国产亚洲视频系列| 国产清纯白嫩初高生在线观看91 | 国产成人精品影视| 国产精华液一区二区三区| 国内精品国产成人| 寂寞少妇一区二区三区| 九九精品一区二区| 国产呦精品一区二区三区网站| 极品尤物av久久免费看| 国产美女久久久久| 高清成人在线观看| 不卡的av网站| 色偷偷一区二区三区| 色婷婷综合五月| 欧美写真视频网站| 欧美高清www午色夜在线视频| 欧美一级片免费看| 精品久久免费看| 久久老女人爱爱| 亚洲品质自拍视频网站| 午夜一区二区三区视频| 美腿丝袜在线亚洲一区| 久久精品999|