亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? mem_transfer_mmx.asm

?? xvid MPEG-4 編解碼源代碼
?? ASM
字號:
;/**************************************************************************; *; *	XVID MPEG-4 VIDEO CODEC; *	mmx 8bit<->16bit transfers; *; *  This file is part of XviD, a free MPEG-4 video encoder/decoder; *; *  XviD is free software; you can redistribute it and/or modify it; *  under the terms of the GNU General Public License as published by; *  the Free Software Foundation; either version 2 of the License, or; *  (at your option) any later version.; *; *  This program is distributed in the hope that it will be useful,; *  but WITHOUT ANY WARRANTY; without even the implied warranty of; *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the; *  GNU General Public License for more details.; *; *  You should have received a copy of the GNU General Public License; *  along with this program; if not, write to the Free Software; *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA; *; *  Under section 8 of the GNU General Public License, the copyright; *  holders of XVID explicitly forbid distribution in the following; *  countries:; *; *    - Japan; *    - United States of America; *; *  Linking XviD statically or dynamically with other modules is making a; *  combined work based on XviD.  Thus, the terms and conditions of the; *  GNU General Public License cover the whole combination.; *; *  As a special exception, the copyright holders of XviD give you; *  permission to link XviD with independent modules that communicate with; *  XviD solely through the VFW1.1 and DShow interfaces, regardless of the; *  license terms of these independent modules, and to copy and distribute; *  the resulting combined work under terms of your choice, provided that; *  every copy of the combined work is accompanied by a complete copy of; *  the source code of XviD (the version of XviD used to produce the; *  combined work), being distributed under the terms of the GNU General; *  Public License plus this exception.  An independent module is a module; *  which is not derived from or based on XviD.; *; *  Note that people who make modified versions of XviD are not obligated; *  to grant this special exception for their modified versions; it is; *  their choice whether to do so.  The GNU General Public License gives; *  permission to release a modified version without this exception; this; *  exception also makes it possible to release a modified version which; *  carries forward this exception.; *; * $Id: mem_transfer_mmx.asm,v 1.8 2002/11/17 00:51:11 edgomez Exp $; *; *************************************************************************/;/**************************************************************************; *; *	History:; *; * 04.06.2002  speed enhancement (unroll+overlap). -Skal-; *             + added transfer_8to16sub2_mmx/xmm; * 07.01.2002	merge functions from compensate_mmx; rename functions; *	07.11.2001	initial version; (c)2001 peter ross <pross@xvid.org>; *; *************************************************************************/bits 32%macro cglobal 1 	%ifdef PREFIX		global _%1 		%define %1 _%1	%else		global %1	%endif%endmacrosection .textcglobal transfer_8to16copy_mmxcglobal transfer_16to8copy_mmxcglobal transfer_8to16sub_mmxcglobal transfer_8to16sub2_mmxcglobal transfer_8to16sub2_xmmcglobal transfer_16to8add_mmxcglobal transfer8x8_copy_mmx;===========================================================================;; void transfer_8to16copy_mmx(int16_t * const dst,;							const uint8_t * const src,;							uint32_t stride);;;===========================================================================%macro COPY_8_TO_16 1  movq mm0, [eax]  movq mm1, [eax+edx]  movq mm2, mm0  movq mm3, mm1  punpcklbw mm0, mm7  movq [ecx+%1*32], mm0  punpcklbw mm1, mm7  movq [ecx+%1*32+16], mm1  punpckhbw mm2, mm7  punpckhbw mm3, mm7  lea eax,[eax+2*edx]  movq [ecx+%1*32+8], mm2  movq [ecx+%1*32+24], mm3%endmacroalign 16transfer_8to16copy_mmx:  mov ecx, [esp+ 4] ; Dst  mov eax, [esp+ 8] ; Src  mov edx, [esp+12] ; Stride  pxor mm7,mm7  COPY_8_TO_16 0  COPY_8_TO_16 1  COPY_8_TO_16 2  COPY_8_TO_16 3  ret;===========================================================================;; void transfer_16to8copy_mmx(uint8_t * const dst,;							const int16_t * const src,;							uint32_t stride);;;===========================================================================%macro COPY_16_TO_8 1  movq mm0, [eax+%1*32]  movq mm1, [eax+%1*32+8]  packuswb mm0, mm1  movq [ecx], mm0  movq mm2, [eax+%1*32+16]  movq mm3, [eax+%1*32+24]  packuswb mm2, mm3  movq [ecx+edx], mm2%endmacroalign 16transfer_16to8copy_mmx:  mov ecx, [esp+ 4] ; Dst  mov eax, [esp+ 8] ; Src  mov edx, [esp+12] ; Stride  COPY_16_TO_8 0  lea ecx,[ecx+2*edx]  COPY_16_TO_8 1  lea ecx,[ecx+2*edx]  COPY_16_TO_8 2  lea ecx,[ecx+2*edx]  COPY_16_TO_8 3  ret;===========================================================================;; void transfer_8to16sub_mmx(int16_t * const dct,;				uint8_t * const cur,;				const uint8_t * const ref,;				const uint32_t stride);;;===========================================================================;/**************************************************************************; *; *	History:; *; * 27.12.2001	renamed from 'compensate' to 'transfer_8to16sub'; * 02.12.2001  loop unrolled, code runs 10% faster now (Isibaar); * 30.11.2001  16 pixels are processed per iteration (Isibaar); * 30.11.2001	.text missing; *	06.11.2001	inital version; (c)2001 peter ross <pross@xvid.org>; *; *************************************************************************/%macro COPY_8_TO_16_SUB 1  movq mm0, [eax]      ; cur  movq mm2, [eax+edx]  movq mm1, mm0  movq mm3, mm2  punpcklbw mm0, mm7  punpcklbw mm2, mm7  movq mm4, [ebx]      ; ref	punpckhbw mm1, mm7	punpckhbw mm3, mm7  movq mm5, [ebx+edx]  ; ref  movq mm6, mm4  movq [eax], mm4  movq [eax+edx], mm5  punpcklbw mm4, mm7  punpckhbw mm6, mm7  psubsw mm0, mm4  psubsw mm1, mm6  movq mm6, mm5  punpcklbw mm5, mm7  punpckhbw mm6, mm7  psubsw mm2, mm5  lea eax,[eax+2*edx]  psubsw mm3, mm6  lea ebx,[ebx+2*edx]  movq [ecx+%1*32+ 0], mm0 ; dst	movq [ecx+%1*32+ 8], mm1	movq [ecx+%1*32+16], mm2	movq [ecx+%1*32+24], mm3%endmacroalign 16transfer_8to16sub_mmx:  mov ecx, [esp  + 4] ; Dst  mov eax, [esp  + 8] ; Cur  push ebx  mov ebx, [esp+4+12] ; Ref  mov edx, [esp+4+16] ; Stride  pxor mm7, mm7  COPY_8_TO_16_SUB 0  COPY_8_TO_16_SUB 1  COPY_8_TO_16_SUB 2  COPY_8_TO_16_SUB 3  pop ebx  ret;===========================================================================;; void transfer_8to16sub2_mmx(int16_t * const dct,;				uint8_t * const cur,;				const uint8_t * ref1,;				const uint8_t * ref2,;				const uint32_t stride);;===========================================================================%macro COPY_8_TO_16_SUB2_MMX 1  movq mm0, [eax]      ; cur  movq mm2, [eax+edx]    ; mm4 <- (ref1+ref2+1) / 2  movq mm4, [ebx]      ; ref1  movq mm1, [esi]      ; ref2  movq mm6, mm4  movq mm3, mm1  punpcklbw mm4, mm7  punpcklbw mm1, mm7  punpckhbw mm6, mm7  punpckhbw mm3, mm7  paddusw mm4, mm1  paddusw mm6, mm3  psrlw mm4,1  psrlw mm6,1  packuswb mm4, mm6    ; mm5 <- (ref1+ref2+1) / 2  movq mm5, [ebx+edx]  ; ref1  movq mm1, [esi+edx]  ; ref2  movq mm6, mm5  movq mm3, mm1  punpcklbw mm5, mm7  punpcklbw mm1, mm7  punpckhbw mm6, mm7  punpckhbw mm3, mm7    paddusw mm5, mm1  paddusw mm6, mm3  lea esi,[esi+2*edx]  psrlw mm5,1  psrlw mm6,1  packuswb mm5, mm6  movq mm1, mm0  movq mm3, mm2  punpcklbw mm0, mm7  punpcklbw mm2, mm7	punpckhbw mm1, mm7	punpckhbw mm3, mm7  movq mm6, mm4  punpcklbw mm4, mm7  punpckhbw mm6, mm7  psubsw mm0, mm4  psubsw mm1, mm6  movq mm6, mm5  punpcklbw mm5, mm7  punpckhbw mm6, mm7  psubsw mm2, mm5  lea eax,[eax+2*edx]  psubsw mm3, mm6  lea ebx,[ebx+2*edx]  movq [ecx+%1*32+ 0], mm0 ; dst	movq [ecx+%1*32+ 8], mm1	movq [ecx+%1*32+16], mm2	movq [ecx+%1*32+24], mm3%endmacroalign 16transfer_8to16sub2_mmx:  mov ecx, [esp  + 4] ; Dst  mov eax, [esp  + 8] ; Cur  push ebx  mov ebx, [esp+4+12] ; Ref1  push esi  mov esi, [esp+8+16] ; Ref2  mov edx, [esp+8+20] ; Stride  pxor mm7, mm7  COPY_8_TO_16_SUB2_MMX 0  COPY_8_TO_16_SUB2_MMX 1  COPY_8_TO_16_SUB2_MMX 2  COPY_8_TO_16_SUB2_MMX 3  pop esi  pop ebx  ret;===========================================================================;; void transfer_8to16sub2_xmm(int16_t * const dct,;				uint8_t * const cur,;				const uint8_t * ref1,;				const uint8_t * ref2,;				const uint32_t stride);;===========================================================================%macro COPY_8_TO_16_SUB2_SSE 1  movq mm0, [eax]      ; cur  movq mm2, [eax+edx]  movq mm1, mm0  movq mm3, mm2  punpcklbw mm0, mm7  punpcklbw mm2, mm7  movq mm4, [ebx]      ; ref1  pavgb mm4, [esi]     ; ref2	punpckhbw mm1, mm7	punpckhbw mm3, mm7  movq mm5, [ebx+edx]  ; ref  pavgb mm5, [esi+edx] ; ref2  movq mm6, mm4  punpcklbw mm4, mm7  punpckhbw mm6, mm7  psubsw mm0, mm4  psubsw mm1, mm6  lea esi,[esi+2*edx]  movq mm6, mm5  punpcklbw mm5, mm7  punpckhbw mm6, mm7  psubsw mm2, mm5  lea eax,[eax+2*edx]  psubsw mm3, mm6  lea ebx,[ebx+2*edx]  movq [ecx+%1*32+ 0], mm0 ; dst	movq [ecx+%1*32+ 8], mm1	movq [ecx+%1*32+16], mm2	movq [ecx+%1*32+24], mm3%endmacroalign 16transfer_8to16sub2_xmm:  mov ecx, [esp  + 4] ; Dst  mov eax, [esp  + 8] ; Cur  push ebx  mov ebx, [esp+4+12] ; Ref1  push esi  mov esi, [esp+8+16] ; Ref2  mov edx, [esp+8+20] ; Stride  pxor mm7, mm7  COPY_8_TO_16_SUB2_SSE 0  COPY_8_TO_16_SUB2_SSE 1  COPY_8_TO_16_SUB2_SSE 2  COPY_8_TO_16_SUB2_SSE 3  pop esi  pop ebx  ret;===========================================================================;; void transfer_16to8add_mmx(uint8_t * const dst,;						const int16_t * const src,;						uint32_t stride);;;===========================================================================%macro COPY_16_TO_8_ADD 1  movq mm0, [ecx]  movq mm2, [ecx+edx]  movq mm1, mm0  movq mm3, mm2  punpcklbw mm0, mm7  punpcklbw mm2, mm7  punpckhbw mm1, mm7  punpckhbw mm3, mm7  paddsw mm0, [eax+%1*32+ 0]  paddsw mm1, [eax+%1*32+ 8]  paddsw mm2, [eax+%1*32+16]  paddsw mm3, [eax+%1*32+24]  packuswb mm0, mm1  movq [ecx], mm0  packuswb mm2, mm3  movq [ecx+edx], mm2%endmacroalign 16transfer_16to8add_mmx:  mov ecx, [esp+ 4] ; Dst  mov eax, [esp+ 8] ; Src  mov edx, [esp+12] ; Stride  pxor mm7, mm7  COPY_16_TO_8_ADD 0  lea ecx,[ecx+2*edx]  COPY_16_TO_8_ADD 1  lea ecx,[ecx+2*edx]  COPY_16_TO_8_ADD 2  lea ecx,[ecx+2*edx]  COPY_16_TO_8_ADD 3  ret;===========================================================================;; void transfer8x8_copy_mmx(uint8_t * const dst,;					const uint8_t * const src,;					const uint32_t stride);;;;===========================================================================%macro COPY_8_TO_8 0  movq mm0, [eax]  movq mm1, [eax+edx]  movq [ecx], mm0  lea eax,[eax+2*edx]  movq [ecx+edx], mm1%endmacroalign 16transfer8x8_copy_mmx:  mov ecx, [esp+ 4] ; Dst  mov eax, [esp+ 8] ; Src  mov edx, [esp+12] ; Stride  COPY_8_TO_8  lea ecx,[ecx+2*edx]  COPY_8_TO_8  lea ecx,[ecx+2*edx]  COPY_8_TO_8  lea ecx,[ecx+2*edx]  COPY_8_TO_8  ret

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
天天综合色天天综合色h| 亚洲国产精品久久人人爱| 91小视频在线免费看| 全国精品久久少妇| 国产精品久久久久久久久久久免费看| 在线免费观看视频一区| 国产精品一区二区三区99| 亚洲一区二区在线播放相泽| 国产片一区二区| 欧美一区二区观看视频| 日本韩国欧美一区二区三区| 国产精品综合av一区二区国产馆| 一区二区三区丝袜| 国产欧美日韩卡一| 欧美zozo另类异族| 欧美喷潮久久久xxxxx| a级精品国产片在线观看| 久久国产成人午夜av影院| 一区二区三区免费| 国产精品乱子久久久久| 日韩美女视频在线| 亚洲柠檬福利资源导航| 日韩欧美在线不卡| 欧美伦理电影网| 欧美一a一片一级一片| 99久久精品免费看国产| 国产成人精品免费视频网站| 狠狠色丁香九九婷婷综合五月| 午夜久久久久久久久| 亚洲精品中文在线观看| 国产精品三级电影| 国产视频亚洲色图| 久久久久久久电影| 久久奇米777| 精品国产a毛片| 日韩午夜激情电影| 精品入口麻豆88视频| 91精品久久久久久蜜臀| 欧美三级视频在线播放| 在线亚洲+欧美+日本专区| 色香色香欲天天天影视综合网| 国产成人亚洲精品狼色在线| 国产盗摄一区二区三区| 国产精品综合视频| 成人sese在线| 成人aa视频在线观看| 成人免费av网站| gogogo免费视频观看亚洲一| 不卡av在线网| 色欧美片视频在线观看| 欧美揉bbbbb揉bbbbb| 欧美日韩成人高清| 91精品久久久久久蜜臀| 精品久久久久久久久久久久包黑料| 欧美一级生活片| 精品国产乱码91久久久久久网站| 久久综合精品国产一区二区三区| 久久精品视频网| 国产精品白丝在线| 亚洲一区二区三区小说| 亚洲.国产.中文慕字在线| 秋霞av亚洲一区二区三| 国产一区二区三区免费看| 不卡av免费在线观看| 欧美性受xxxx| 日韩欧美的一区| 国产三级久久久| 亚洲欧美日韩国产另类专区| 亚洲福中文字幕伊人影院| 日本欧美一区二区| 成人性生交大合| 欧美男生操女生| 久久免费看少妇高潮| 一色屋精品亚洲香蕉网站| 亚洲国产另类精品专区| 精品午夜久久福利影院| 99久久夜色精品国产网站| 欧美女孩性生活视频| 国产视频一区二区在线| 亚洲一二三四在线| 久久aⅴ国产欧美74aaa| 色综合久久88色综合天天免费| 欧美日韩综合在线免费观看| 国产亚洲欧美在线| 亚洲自拍偷拍综合| 国产麻豆9l精品三级站| 欧美午夜精品一区二区蜜桃| 亚洲精品一线二线三线无人区| 日本一区二区三级电影在线观看 | 欧洲一区在线电影| 精品国产不卡一区二区三区| 亚洲精品日韩专区silk| 国内精品免费在线观看| 在线欧美日韩国产| 国产清纯美女被跳蛋高潮一区二区久久w| 一区二区三区电影在线播| 国产一区二区三区蝌蚪| 欧美剧在线免费观看网站 | 91小视频在线观看| 日韩欧美一二三区| 亚洲精品成人a在线观看| 狂野欧美性猛交blacked| 在线观看亚洲精品| 中文乱码免费一区二区| 另类综合日韩欧美亚洲| 欧美性猛交xxxxxxxx| 国产精品国产馆在线真实露脸| 久久国产乱子精品免费女| 欧美亚男人的天堂| 亚洲毛片av在线| 99久久精品免费看| 久久精品亚洲麻豆av一区二区| 蜜臀99久久精品久久久久久软件| 色狠狠综合天天综合综合| 欧美经典一区二区| 国精产品一区一区三区mba视频| 欧美群妇大交群中文字幕| 亚洲激情在线播放| 91美女片黄在线| 国产精品电影一区二区三区| 国产98色在线|日韩| 精品国产乱码久久久久久蜜臀| 丝袜美腿高跟呻吟高潮一区| 欧美影视一区二区三区| 亚洲色图清纯唯美| 99久久精品国产一区二区三区 | 精品国产乱码久久久久久影片| 日韩不卡在线观看日韩不卡视频| 在线免费观看不卡av| 亚洲精品中文在线| 色丁香久综合在线久综合在线观看| 欧美高清在线精品一区| 国产成人av在线影院| 久久久久久久久久电影| 久久国产精品露脸对白| 精品sm捆绑视频| 狠狠久久亚洲欧美| 日本一区二区三区久久久久久久久不 | 五月天视频一区| 在线不卡一区二区| 奇米777欧美一区二区| 宅男噜噜噜66一区二区66| 天堂一区二区在线免费观看| 91精品婷婷国产综合久久竹菊| 日韩经典中文字幕一区| 日韩午夜激情电影| 国产在线一区二区综合免费视频| www精品美女久久久tv| 日本免费在线视频不卡一不卡二| 7777精品伊人久久久大香线蕉完整版| 亚洲成av人**亚洲成av**| 欧美日韩不卡一区| 经典三级在线一区| 国产精品午夜在线观看| 99re成人在线| 香蕉乱码成人久久天堂爱免费| 欧美一区二区三级| 国产精品综合av一区二区国产馆| 国产精品毛片高清在线完整版| 99久久er热在这里只有精品15| 亚洲国产va精品久久久不卡综合 | 欧美精品一区二区三区在线播放| 韩国av一区二区三区四区| 国产性天天综合网| 99在线精品视频| 亚洲福中文字幕伊人影院| 日韩免费成人网| 成人黄色电影在线| 亚洲国产美女搞黄色| 欧美成人艳星乳罩| k8久久久一区二区三区| 午夜私人影院久久久久| 精品久久国产字幕高潮| 99久久久精品| 蜜桃在线一区二区三区| 国产精品嫩草影院com| 欧美亚洲综合另类| 激情综合五月天| 中文字幕在线一区免费| 91精品免费在线| 成人av网在线| 青草av.久久免费一区| 国产日产欧美一区二区视频| 欧美在线影院一区二区| 国产精品亚洲第一 | 丁香另类激情小说| 日韩精品久久理论片| 国产精品久久影院| 欧美成人高清电影在线| 9人人澡人人爽人人精品| 久久不见久久见免费视频7| 曰韩精品一区二区| 欧美精品一区二区三区久久久 | 国产激情一区二区三区四区 | 男人的天堂久久精品| 中文字幕一区三区| 久久综合久久综合久久综合| 欧美在线免费视屏| 波多野结衣欧美| 国产激情视频一区二区在线观看|