diff --git a/.gitignore b/.gitignore index 7c4f0f6cd5f0d2e3e45eea2fde9d2e1aa55affb4..24e1c5d1970968679002f804ad7eb4132331094c 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,5 @@ job_api_fuzz_test* *_lnk.def TAGS acvp_app +test/wycheproof/wycheproof +!wycheproof.c diff --git a/Makefile b/Makefile index e4a298d5ba2bd255922215ccbbd168d5ef60be2b..9da44c69c48686061ad500f17fb14406ff6c1efb 100644 --- a/Makefile +++ b/Makefile @@ -30,16 +30,21 @@ all: $(MAKE) -C lib $(MAKE) -C test +ifeq ($(ARCH),x86_64) + $(MAKE) -C test/wycheproof +endif $(MAKE) -C perf clean: $(MAKE) -C lib clean $(MAKE) -C test clean + $(MAKE) -C test/wycheproof clean $(MAKE) -C perf clean style: $(MAKE) -C lib style $(MAKE) -C test style + $(MAKE) -C test/wycheproof style $(MAKE) -C perf style install: @@ -54,9 +59,6 @@ help: doxy: $(MAKE) -C lib doxy -README: README.md - pandoc -f markdown -t plain $< -o $@ - .PHONY: TAGS TAGS: find ./ -name "*.[ch]" -print | etags - @@ -72,14 +74,14 @@ TAGS: # -I FILE -- File containing words to be ignored # CODESPELL ?= codespell -CS_IGNORE_WORDS ?= iinclude,struc,fo,ue,od,ba +CS_IGNORE_WORDS ?= iinclude,struc,fo,ue,od,ba,padd .PHONY: spellcheck spellcheck: $(CODESPELL) -d -L $(CS_IGNORE_WORDS) \ -S "*.obj,*.o,*.a,*.so,*.lib,*~,*.so,*.so.*,*.d,ipsec_perf" \ -S "ipsec_MB_testapp,ipsec_xvalid_test" \ - ./lib ./perf ./test README README.md SECURITY.md CONTRIBUTING \ + ./lib ./perf ./test README.md SECURITY.md CONTRIBUTING \ Makefile win_x64.mak ReleaseNotes.txt LICENSE $(CS_EXTRA_OPTS) # cppcheck analysis check diff --git a/README.md b/README.md index 900fe6f0e9d1b20ef938bd9e762a5dabbb3772da..b187d42cec3c4d5909c6ae29e2308351f954b23b 100644 --- a/README.md +++ b/README.md @@ -67,9 +67,9 @@ Table 1. List of supported cipher algorithms and their implementations. | AES128-CTR | N | Y by8 | Y by8 | N | N | Y by16 | N | | AES192-CTR | N | Y by8 | Y by8 | N | N | Y by16 | N | | AES256-CTR | N | Y by8 | Y by8 | N | N | Y by16 | N | -| AES128-ECB | N | Y by4 | Y by4 | N | N | N | N | -| AES192-ECB | N | Y by4 | Y by4 | N | N | N | N | -| AES256-ECB | N | Y by4 | Y by4 | N | N | N | N | +| AES128-ECB | N | Y(1) | Y by8 | Y(10) | N | Y by16 | N | +| AES192-ECB | N | Y(1) | Y by8 | Y(10) | N | Y by16 | N | +| AES256-ECB | N | Y(1) | Y by8 | Y(10) | N | Y by16 | N | | NULL | Y | N | N | N | N | N | N | | AES128-DOCSIS | N | Y(2) | Y(4) | N | Y(7) | Y(8) | N | | AES256-DOCSIS | N | Y(2) | Y(4) | N | Y(7) | Y(8) | N | @@ -79,7 +79,7 @@ Table 1. List of supported cipher algorithms and their implementations. | KASUMI-F8 | Y | N | N | N | N | N | N | | ZUC-EEA3 | N | Y x4 | Y x4 | Y x8 | Y x16 | Y x16 | Y x4 | | ZUC-EEA3-256 | N | Y x4 | Y x4 | Y x8 | Y x16 | Y x16 | Y x4 | -| SNOW3G-UEA2 | N | Y | Y | Y | Y x16 | Y x16 | Y | +| SNOW3G-UEA2 | N | Y x4 | Y | Y | Y x16 | Y x16 | Y | | AES128-CBCS(9) | N | Y(1) | Y(3) | N | N | Y(6) | N | | Chacha20 | N | Y | Y | Y | Y | N | N | | Chacha20 AEAD | N | Y | Y | Y | Y | N | N | @@ -97,6 +97,8 @@ Notes: (7) - same as AES128-CBC for AVX, combines cipher and CRC32 (8) - decryption is by16 and encryption is x16 (9) - currently 1:9 crypt:skip pattern supported +(10) - by default, decryption and encryption are AVX by8. + On CPUs supporting VAES, decryption and encryption are AVX2-VAES by16. Legend: ` byY` - single buffer Y blocks at a time @@ -122,6 +124,11 @@ Table 2. List of supported integrity algorithms and their implementations. | HMAC-SHA2-256_128 | N | Y(2)x4 | Y x4 | Y x8 | Y x16 | N | N | | HMAC-SHA2-384_192 | N | Y x2 | Y x2 | Y x4 | Y x8 | N | N | | HMAC-SHA2-512_256 | N | Y x2 | Y x2 | Y x4 | Y x8 | N | N | +| SHA1 | N | Y(2)x4 | Y x4 | Y x8 | Y x16 | N | N | +| SHA2-224 | N | Y(2)x4 | Y x4 | Y x8 | Y x16 | N | N | +| SHA2-256 | N | Y(2)x4 | Y x4 | Y x8 | Y x16 | N | N | +| SHA2-384 | N | Y x2 | Y x2 | Y x4 | Y x8 | N | N | +| SHA2-512 | N | Y x2 | Y x2 | Y x4 | Y x8 | N | N | | AES128-GMAC | N | Y by8 | Y by8 | Y by8 | Y by8 | Y by48 | N | | AES192-GMAC | N | Y by8 | Y by8 | Y by8 | Y by8 | Y by48 | N | | AES256-GMAC | N | Y by8 | Y by8 | Y by8 | Y by8 | Y by48 | N | @@ -159,8 +166,8 @@ Notes: - CRC8: WIMAX OFDMA HCS - CRC7: FP header - CRC6: IUUP header -(7) - used only with PON-AES128-CTR cipher -(8) - x16 for init keystream generation, then by32 +(7) - used only with PON-AES128-CTR cipher +(8) - x4/x16 for init keystream generation, then by4/by32 Legend: ` byY`- single buffer Y blocks at a time @@ -246,17 +253,19 @@ Legacy or to be avoided algorithms listed in the table below are implemented in the library in order to support legacy applications. Please use corresponding alternative algorithms instead. ``` -+-------------------------------------------------------------+ -| # | Algorithm | Recommendation | Alternative | -|---+--------------------+----------------+-------------------| -| 1 | DES encryption | Avoid | AES encryption | -|---+--------------------+----------------+-------------------| -| 2 | 3DES encryption | Avoid | AES encryption | -|---+--------------------+----------------+-------------------| -| 3 | HMAC-MD5 integrity | Legacy | HMAC-SHA1 | -|---+--------------------+----------------+-------------------| -| 4 | AES-ECB encryption | Avoid | AES-CBC, AES-CNTR | -+-------------------------------------------------------------+ ++--------------------------------------------------------------+ +| # | Algorithm | Recommendation | Alternative | +|---+---------------------+----------------+-------------------| +| 1 | DES encryption | Avoid | AES encryption | +|---+---------------------+----------------+-------------------| +| 2 | 3DES encryption | Avoid | AES encryption | +|---+---------------------+----------------+-------------------| +| 3 | HMAC-MD5 integrity | Legacy | HMAC-SHA256 | +|---+---------------------+----------------+-------------------| +| 4 | AES-ECB encryption | Avoid | AES-CBC, AES-CNTR | +|---+---------------------+----------------+-------------------| +| 3 | HMAC-SHA1 integrity | Avoid | HMAC-SHA256 | ++--------------------------------------------------------------+ ``` Multi-Buffer Crypto for IPsec Library depends on C library and it is recommended to use its latest version. @@ -282,8 +291,15 @@ security updates. **Note:** There is just one branch used in the project. All development is done on the main branch. +5\. Documentation +=============== + +Full documentation can be found at: https://intel.github.io/intel-ipsec-mb + +To generate documentation locally, run: +`> make doxy` -5\. Compilation +6\. Compilation =============== Linux (64-bit only) @@ -308,8 +324,8 @@ or Build with debugging information: `> make DEBUG=y` -Build with AESNI emulation support (disabled by default): - > make AESNI_EMU=y +Build with AESNI emulation support (disabled by default): +`> make AESNI_EMU=y` **Note:** Building with debugging information is not advised for production use. @@ -343,7 +359,8 @@ Build without safety features: - SAFE_DATA clears sensitive information stored temporarily on stack, registers or internal data structures - SAFE_PARAM adds extra checks on input parameters - SAFE_LOOKUP uses constant-time lookups (enabled by default) -- SAFE_OPTIONS additional option to disable all safe options. disable to turn off SAFE_DATA, SAFE_PARAM, SAFE_LOOKUP (enabled by default) +- SAFE_OPTIONS additional option to disable all safe options. Enabled by default. + Disable to turn off: SAFE_DATA, SAFE_PARAM and SAFE_LOOKUP. `> nmake /f win_x64.mak SAFE_DATA=n SAFE_PARAM=n` `> nmake /f win_x64.mak SAFE_OPTIONS=n` @@ -351,8 +368,8 @@ Build without safety features: Build with debugging information: `> nmake /f win_x64.mak DEBUG=y` -Build with AESNI emulation support (disabled by default): - > nmake /f win_x64.mak AESNI_EMU=y +Build with AESNI emulation support (disabled by default): +`> nmake /f win_x64.mak AESNI_EMU=y` **Note:** Building with debugging information is not advised for production use. @@ -413,7 +430,7 @@ Build with debugging information: For more build options and their explanation run: `> gmake help` -6\. Security Considerations & Options for Increased Security +7\. Security Considerations & Options for Increased Security ============================================================ ### Security Considerations @@ -483,9 +500,10 @@ algorithms listed above may be susceptible to timing attacks which could expose the cryptographic key. ### SAFE_OPTIONS -SAFE_OPTIONS is a parameter that can be used to disable -all other safe options(SAFE_DATA, SAFE_PARAM, SAFE_LOOKUP). By just -setting this parameter (e.g. SAFE_OPTIONS=n). +SAFE_OPTIONS is a parameter that can be used to disable/enable +all supported safe options (i.e. SAFE_DATA, SAFE_PARAM, SAFE_LOOKUP). +It is set to `y` by default and all safe options are enabled. +`SAFE_OPTIONS=n` disables all safe options. ### Security API **Force clearing/zeroing of memory** @@ -502,7 +520,7 @@ The library GCM and GMAC implementation provides flexibility as to tag size sele As explained in [NIST Special Publication 800-38D](https://csrc.nist.gov/publications/detail/sp/800-38d/final) section 5.2.1.2 and Appendix C, using tag sizes shorter than 96 bits can be insecure. Please refer to the aforementioned sections to understand the details, trade offs and mitigations of using shorter tag sizes. -7\. Installation +8\. Installation ================ Linux (64-bit only) @@ -559,7 +577,7 @@ If there is no need to run ldconfig at install stage please use NOLDCONFIG=y opt If library was compiled as an archive (not a default option) then install it using SHARED=n option: `> sudo gmake install SHARED=n` -8\. Backwards compatibility +9\. Backwards compatibility =========================== In version 0.54, some symbols have been renamed to avoid too generic names (such as cipher modes or @@ -575,7 +593,7 @@ For applications which face symbol conflicts due to these old generic names, they should be compiled with the flag -DNO_COMPAT_IMB_API_053, which will not export the old symbols. -9\. Disclaimer (ZUC, KASUMI, SNOW3G) +10\. Disclaimer (ZUC, KASUMI, SNOW3G) ==================================== Please note that cryptographic material, such as ciphering algorithms, may be diff --git a/docs/index.html b/docs/index.html index 61b40c7a6007131278cef39b63b85e7564334b57..867ea8d08482b147735604bf5a0708c133bdbbf5 100644 --- a/docs/index.html +++ b/docs/index.html @@ -14,6 +14,6 @@
For more information, please visit the ipsec-mb GitLab project. -
+ diff --git a/lib/Makefile b/lib/Makefile index 4168b45dd09dc2304016af6caffbfb74c21ddb33..f4c61c3d6a49f560e8272e09866b95d8ba82bffd 100644 --- a/lib/Makefile +++ b/lib/Makefile @@ -96,21 +96,28 @@ CC_HAS_CET = $(and $(shell $(CC) --target-help 2> /dev/null | grep -m1 -e "-z ib CET_LDFLAGS=-r -z ibt -z shstk endif # MINGW endif # x86_64 -CFLAGS := -DNO_COMPAT_IMB_API_053 $(EXTRA_CFLAGS) $(INCLUDES) \ +CFLAGS := -fPIC -DNO_COMPAT_IMB_API_053 $(EXTRA_CFLAGS) $(INCLUDES) \ -W -Wall -Wextra -Wmissing-declarations -Wpointer-arith \ -Wcast-qual -Wundef -Wwrite-strings \ -Wformat -Wformat-security \ -Wunreachable-code -Wmissing-noreturn -Wsign-compare -Wno-endif-labels \ -Wstrict-prototypes -Wmissing-prototypes -Wold-style-definition \ - -fno-strict-overflow -fno-delete-null-pointer-checks -fwrapv + -fno-delete-null-pointer-checks -fwrapv + +# -fno-strict-overflow is not supported by clang +ifneq ($(CC),clang) +CFLAGS += -fno-strict-overflow +endif ifeq ($(MINGW),0) CFLAGS += -DLINUX endif +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) CFLAGS += -fcf-protection=full endif +endif # x86_64 ASM_INCLUDE_DIRS := . @@ -227,7 +234,6 @@ endif # CC # so or static build ifeq ($(SHARED),y) -CFLAGS += -fPIC ifneq ($(MINGW),0) LIBNAME = $(LIB).dll else @@ -235,17 +241,20 @@ LIBNAME = $(LIB).so.$(VERSION) LDFLAGS += -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now endif # MINGW LIBPERM = 0755 +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) LDFLAGS += -fcf-protection=full -Wl,-z,ibt -Wl,-z,shstk -Wl,-z,cet-report=error endif +endif else -CFLAGS += -fPIE LIBNAME = $(LIB).a LIBPERM = 0644 LDFLAGS += -g +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) LDFLAGS += -fcf-protection=full endif +endif endif # shared # warning messages @@ -291,14 +300,28 @@ asm_generic_lib_objs := \ else c_lib_objs := \ mb_mgr_avx.o \ + mb_mgr_avx_t1.o \ + mb_mgr_avx_t2.o \ mb_mgr_avx2.o \ + mb_mgr_avx2_t1.o \ + mb_mgr_avx2_t2.o \ mb_mgr_avx512.o \ + mb_mgr_avx512_t1.o \ + mb_mgr_avx512_t2.o \ mb_mgr_sse.o \ + mb_mgr_sse_t1.o \ + mb_mgr_sse_t2.o \ + mb_mgr_sse_t3.o \ alloc.o \ aes_xcbc_expand_key.o \ md5_one_block.o \ sha_sse.o \ + sha_mb_sse.o \ + sha_ni_mb_sse.o \ sha_avx.o \ + sha_mb_avx.o \ + sha_mb_avx2.o \ + sha_mb_avx512.o \ des_key.o \ des_basic.o \ version.o \ @@ -318,7 +341,9 @@ c_lib_objs := \ snow3g_tables.o \ snow3g_iv.o \ mb_mgr_auto.o \ - error.o + error.o \ + ooo_mgr_reset.o \ + self_test.o ifeq ($(AESNI_EMU), y) c_lib_objs := $(c_lib_objs) \ @@ -421,6 +446,9 @@ asm_sse_lib_objs := \ aes128_ecb_by4_sse.o \ aes192_ecb_by4_sse.o \ aes256_ecb_by4_sse.o \ + aes128_ecb_by8_sse.o \ + aes192_ecb_by8_sse.o \ + aes256_ecb_by8_sse.o \ aes128_cntr_ccm_by8_sse.o \ aes256_cntr_ccm_by8_sse.o \ aes_cfb_sse.o \ @@ -439,7 +467,9 @@ asm_sse_lib_objs := \ sha512_x2_sse.o \ sha256_mult_sse.o \ sha1_ni_x2_sse.o \ + sha1_ni_x1_sse.o \ sha256_ni_x2_sse.o \ + sha256_ni_x1_sse.o \ zuc_x4_sse.o \ zuc_x4_gfni_sse.o \ mb_mgr_aes128_cbc_enc_flush_x4_sse.o \ @@ -486,6 +516,8 @@ asm_sse_lib_objs := \ mb_mgr_hmac_sha256_submit_ni_sse.o \ mb_mgr_zuc_submit_flush_sse.o \ mb_mgr_zuc_submit_flush_gfni_sse.o \ + mb_mgr_snow3g_uea2_submit_flush_x4_sse.o \ + mb_mgr_snow3g_uia2_submit_flush_x4_sse.o \ ethernet_fcs_sse.o \ crc16_x25_sse.o \ crc32_sctp_sse.o \ @@ -498,7 +530,7 @@ asm_sse_lib_objs := \ crc32_iuup_sse.o \ crc32_wimax_sse.o \ chacha20_sse.o \ - memcpy_sse.o \ + memcpy_sse.o \ snow_v_sse.o \ snow3g_uia2_by4_sse.o @@ -518,9 +550,9 @@ asm_avx_lib_objs := \ aes256_cntr_by8_avx.o \ aes128_cntr_ccm_by8_avx.o \ aes256_cntr_ccm_by8_avx.o \ - aes128_ecb_by4_avx.o \ - aes192_ecb_by4_avx.o \ - aes256_ecb_by4_avx.o \ + aes128_ecb_by8_avx.o \ + aes192_ecb_by8_avx.o \ + aes256_ecb_by8_avx.o \ aes_cfb_avx.o \ aes128_cbc_mac_x8_avx.o \ aes256_cbc_mac_x8_avx.o \ @@ -587,6 +619,9 @@ asm_avx2_lib_objs := \ sha256_oct_avx2.o \ sha512_x4_avx2.o \ zuc_x8_avx2.o \ + aes128_ecb_vaes_avx2.o \ + aes192_ecb_vaes_avx2.o \ + aes256_ecb_vaes_avx2.o \ mb_mgr_hmac_md5_flush_avx2.o \ mb_mgr_hmac_md5_submit_avx2.o \ mb_mgr_hmac_sha1_flush_avx2.o \ @@ -610,6 +645,7 @@ asm_avx512_lib_objs := \ sha256_x16_avx512.o \ sha512_x8_avx512.o \ des_x16_avx512.o \ + aes_ecb_vaes_avx512.o \ aes_cntr_api_by16_vaes_avx512.o \ aes_cntr_bit_api_by16_vaes_avx512.o \ aes_cntr_ccm_api_by16_vaes_avx512.o \ @@ -757,8 +793,8 @@ else ln -f -s $(LIB).so.$(SO_VERSION) $(LIB_DIR)/$(LIB).so endif else - $(AR) -qcs $@ $^ -endif # SHARED + $(AR) -qcs $@ $(target_obj_files) +endif ifeq ($(SAFE_PARAM), n) @echo "NOTE:" $(SAFE_PARAM_MSG1) $(SAFE_PARAM_MSG2) endif @@ -841,10 +877,52 @@ ifeq ($(CC_HAS_CET),1) mv $@.tmp $@ endif -$(OBJ_DIR)/%.o:sse/%.c +$(OBJ_DIR)/%.o:sse_t1/%.c $(CC) -MMD $(OPT_SSE) -c $(CFLAGS) $< -o $@ -$(OBJ_DIR)/%.o:sse/%.asm +$(OBJ_DIR)/%.o:sse_t1/%.asm +ifeq ($(USE_YASM),y) + $(YASM) $(YASM_FLAGS) $< -o $@ +else + $(NASM) -MD $(@:.o=.d) -MT $@ -o $@ $(NASM_FLAGS) $< +endif +ifeq ($(CC_HAS_CET),1) + $(LD) $(CET_LDFLAGS) -o $@.tmp $@ + mv $@.tmp $@ +endif + +$(OBJ_DIR)/%.o:sse_t2/%.c + $(CC) -MMD $(OPT_SSE) -c $(CFLAGS) $< -o $@ + +$(OBJ_DIR)/%.o:sse_t2/%.asm +ifeq ($(USE_YASM),y) + $(YASM) $(YASM_FLAGS) $< -o $@ +else + $(NASM) -MD $(@:.o=.d) -MT $@ -o $@ $(NASM_FLAGS) $< +endif +ifeq ($(CC_HAS_CET),1) + $(LD) $(CET_LDFLAGS) -o $@.tmp $@ + mv $@.tmp $@ +endif + +$(OBJ_DIR)/%.o:sse_t3/%.c + $(CC) -MMD $(OPT_SSE) -c $(CFLAGS) $< -o $@ + +$(OBJ_DIR)/%.o:sse_t3/%.asm +ifeq ($(USE_YASM),y) + $(YASM) $(YASM_FLAGS) $< -o $@ +else + $(NASM) -MD $(@:.o=.d) -MT $@ -o $@ $(NASM_FLAGS) $< +endif +ifeq ($(CC_HAS_CET),1) + $(LD) $(CET_LDFLAGS) -o $@.tmp $@ + mv $@.tmp $@ +endif + +$(OBJ_DIR)/%.o:avx_t1/%.c + $(CC) -MMD $(OPT_AVX) -c $(CFLAGS) $< -o $@ + +$(OBJ_DIR)/%.o:avx_t1/%.asm ifeq ($(USE_YASM),y) $(YASM) $(YASM_FLAGS) $< -o $@ else @@ -855,10 +933,10 @@ ifeq ($(CC_HAS_CET),1) mv $@.tmp $@ endif -$(OBJ_DIR)/%.o:avx/%.c +$(OBJ_DIR)/%.o:avx_t2/%.c $(CC) -MMD $(OPT_AVX) -c $(CFLAGS) $< -o $@ -$(OBJ_DIR)/%.o:avx/%.asm +$(OBJ_DIR)/%.o:avx_t2/%.asm ifeq ($(USE_YASM),y) $(YASM) $(YASM_FLAGS) $< -o $@ else @@ -869,10 +947,38 @@ ifeq ($(CC_HAS_CET),1) mv $@.tmp $@ endif -$(OBJ_DIR)/%.o:avx2/%.c +$(OBJ_DIR)/%.o:avx2_t1/%.c $(CC) -MMD $(OPT_AVX2) -c $(CFLAGS) $< -o $@ -$(OBJ_DIR)/%.o:avx2/%.asm +$(OBJ_DIR)/%.o:avx2_t1/%.asm +ifeq ($(USE_YASM),y) + $(YASM) $(YASM_FLAGS) $< -o $@ +else + $(NASM) -MD $(@:.o=.d) -MT $@ -o $@ $(NASM_FLAGS) $< +endif +ifeq ($(CC_HAS_CET),1) + $(LD) $(CET_LDFLAGS) -o $@.tmp $@ + mv $@.tmp $@ +endif + +$(OBJ_DIR)/%.o:avx2_t2/%.c + $(CC) -MMD $(OPT_AVX2) -c $(CFLAGS) $< -o $@ + +$(OBJ_DIR)/%.o:avx2_t2/%.asm +ifeq ($(USE_YASM),y) + $(YASM) $(YASM_FLAGS) $< -o $@ +else + $(NASM) -MD $(@:.o=.d) -MT $@ -o $@ $(NASM_FLAGS) $< +endif +ifeq ($(CC_HAS_CET),1) + $(LD) $(CET_LDFLAGS) -o $@.tmp $@ + mv $@.tmp $@ +endif + +$(OBJ_DIR)/%.o:avx512_t1/%.c + $(CC) -MMD $(OPT_AVX512) -c $(CFLAGS) $< -o $@ + +$(OBJ_DIR)/%.o:avx512_t1/%.asm ifeq ($(USE_YASM),y) $(YASM) $(YASM_FLAGS) $< -o $@ else @@ -883,10 +989,10 @@ ifeq ($(CC_HAS_CET),1) mv $@.tmp $@ endif -$(OBJ_DIR)/%.o:avx512/%.c +$(OBJ_DIR)/%.o:avx512_t2/%.c $(CC) -MMD $(OPT_AVX512) -c $(CFLAGS) $< -o $@ -$(OBJ_DIR)/%.o:avx512/%.asm +$(OBJ_DIR)/%.o:avx512_t2/%.asm ifeq ($(USE_YASM),y) $(YASM) $(YASM_FLAGS) $< -o $@ else diff --git a/lib/aarch64/mb_mgr_code_aarch64.h b/lib/aarch64/mb_mgr_code_aarch64.h index 27b9e40dbd3516fe5257db9bcb74fed628bcf824..45b4545e2d9702f8c60b6a746cadd16fa8365255 100644 --- a/lib/aarch64/mb_mgr_code_aarch64.h +++ b/lib/aarch64/mb_mgr_code_aarch64.h @@ -330,6 +330,13 @@ is_job_invalid(IMB_MGR *state, const IMB_JOB *job) 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ }; + // Todo: is_job_invalid was changed, arm should follow that change + if (job->cipher_direction != IMB_DIR_DECRYPT && + job->cipher_direction != IMB_DIR_ENCRYPT && + job->cipher_mode != IMB_CIPHER_NULL) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_DIR); + return 1; + } switch (job->cipher_mode) { case IMB_CIPHER_NULL: /* diff --git a/lib/aarch64/mb_mgr_zuc_submit_flush_aarch64.c b/lib/aarch64/mb_mgr_zuc_submit_flush_aarch64.c index 92ffb24cca447ca35c94058048b4dd777c6359c2..66787469a66fa882a3ba25e188d82352ff0137d6 100644 --- a/lib/aarch64/mb_mgr_zuc_submit_flush_aarch64.c +++ b/lib/aarch64/mb_mgr_zuc_submit_flush_aarch64.c @@ -50,23 +50,18 @@ static void expand_from_6_to_8_bytes(uint8_t *pOutput, const uint8_t *pInput) 0x3f000000000, 0xfc0000000000}; uint8_t inputarr[8] = {0}; uint64_t num64bit; - uint64_t result = 0; int i; - // store 6 bytes input to 8 bytes array + // store 6 bytes input to 8 bytes array in reverse order, inputarr[i] = 0xfedcba00 for (i = 0; i <= 5; i++) - inputarr[i] = *(pInput + i); + inputarr[i] = *(pInput + (5 - i)); - // cast 8 bytes array to uint64 number + // cast 8 bytes array to uint64 number, num64bit=0xabcdef num64bit = *(uint64_t *)(&inputarr[0]); - result = num64bit & bit_mask[0]; - - for (i = 1; i <= 7; i++) { - result |= ((num64bit & bit_mask[i]) << 2 * i); + for (i = 0; i <= 7; i++) { + *(pOutput + i) = (num64bit & bit_mask[7 - i]) >> (48 - 6 * (i+1)); } - - *(uint64_t *)pOutput = result; } static void zuc_mb_mgr_insert_eea3_job(MB_MGR_ZUC_OOO *state, IMB_JOB *job, ZUC_TYPE zuc) diff --git a/lib/api_doxygen.conf b/lib/api_doxygen.conf index 46d6d7cb709f676b7772e9d680bbcd43ce67be37..9c7a10663aec7cd57af63bdf9e9fe38224988e95 100644 --- a/lib/api_doxygen.conf +++ b/lib/api_doxygen.conf @@ -18,7 +18,7 @@ PROJECT_NUMBER = # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. -PROJECT_BRIEF = +PROJECT_BRIEF = "Documentation of IPSec Multi-Buffer library" # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 @@ -114,7 +114,7 @@ INLINE_INHERITED_MEMB = NO # shortest path that makes the file name unique will be used # The default value is: YES. -FULL_PATH_NAMES = YES +FULL_PATH_NAMES = NO # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand @@ -204,12 +204,6 @@ TAB_SIZE = 8 ALIASES = -# This tag can be used to specify a number of word-keyword mappings (TCL only). -# A mapping has the form "name=value". For example adding "class=itcl::class" -# will allow you to use the command class in the itcl::class meaning. - -TCL_SUBST = - # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all @@ -745,7 +739,7 @@ WARN_LOGFILE = # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. -INPUT = ./ipsec-mb.h +INPUT = ./ipsec-mb.h ../README.md # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses @@ -894,7 +888,7 @@ FILTER_SOURCE_PATTERNS = # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. -USE_MDFILE_AS_MAINPAGE = +USE_MDFILE_AS_MAINPAGE = README.md #--------------------------------------------------------------------------- # Configuration options related to source browsing @@ -982,25 +976,6 @@ USE_HTAGS = NO VERBATIM_HEADERS = YES -# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the -# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the -# cost of reduced performance. This can be particularly helpful with template -# rich C++ code for which doxygen's built-in parser lacks the necessary type -# information. -# Note: The availability of this option depends on whether or not doxygen was -# generated with the -Duse-libclang=ON option for CMake. -# The default value is: NO. - -CLANG_ASSISTED_PARSING = NO - -# If clang assisted parsing is enabled you can provide the compiler with command -# line options that you would normally use when invoking the compiler. Note that -# the include paths will already be set by doxygen for the files and directories -# specified with INPUT and INCLUDE_PATH. -# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. - -CLANG_OPTIONS = - #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- @@ -1012,13 +987,6 @@ CLANG_OPTIONS = ALPHABETICAL_INDEX = YES -# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in -# which the alphabetical index list will be split. -# Minimum value: 1, maximum value: 20, default value: 5. -# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. - -COLS_IN_ALPHA_INDEX = 5 - # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored @@ -1373,7 +1341,7 @@ ECLIPSE_DOC_ID = org.doxygen.Project # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. -DISABLE_INDEX = NO +DISABLE_INDEX = YES # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag @@ -1390,7 +1358,7 @@ DISABLE_INDEX = NO # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. -GENERATE_TREEVIEW = NO +GENERATE_TREEVIEW = YES # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. @@ -1714,16 +1682,6 @@ LATEX_BATCHMODE = NO LATEX_HIDE_INDICES = NO -# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source -# code with syntax highlighting in the LaTeX output. -# -# Note that which sources are shown also depends on other settings such as -# SOURCE_BROWSER. -# The default value is: NO. -# This tag requires that the tag GENERATE_LATEX is set to YES. - -LATEX_SOURCE_CODE = NO - # The LATEX_BIB_STYLE tag can be used to specify the style to use for the # bibliography, e.g. plainnat, or ieeetr. See # http://en.wikipedia.org/wiki/BibTeX and \cite for more info. @@ -1796,16 +1754,6 @@ RTF_STYLESHEET_FILE = RTF_EXTENSIONS_FILE = -# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code -# with syntax highlighting in the RTF output. -# -# Note that which sources are shown also depends on other settings such as -# SOURCE_BROWSER. -# The default value is: NO. -# This tag requires that the tag GENERATE_RTF is set to YES. - -RTF_SOURCE_CODE = NO - #--------------------------------------------------------------------------- # Configuration options related to the man page output #--------------------------------------------------------------------------- @@ -1895,15 +1843,6 @@ GENERATE_DOCBOOK = NO DOCBOOK_OUTPUT = docbook -# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the -# program listings (including syntax highlighting and cross-referencing -# information) to the DOCBOOK output. Note that enabling this will significantly -# increase the size of the DOCBOOK output. -# The default value is: NO. -# This tag requires that the tag GENERATE_DOCBOOK is set to YES. - -DOCBOOK_PROGRAMLISTING = NO - #--------------------------------------------------------------------------- # Configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- @@ -2082,15 +2021,6 @@ EXTERNAL_PAGES = YES # Configuration options related to the dot tool #--------------------------------------------------------------------------- -# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram -# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to -# NO turns the diagrams off. Note that this option also works with HAVE_DOT -# disabled, but it is recommended to install and use dot, since it yields more -# powerful graphs. -# The default value is: YES. - -CLASS_DIAGRAMS = YES - # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The # DIA_PATH tag allows you to specify the directory where the dia binary resides. diff --git a/lib/avx/aes128_ecb_by4_avx.asm b/lib/avx/aes128_ecb_by4_avx.asm deleted file mode 100644 index fbe29765a1460b6747c0da0d884e50a0f123d3ae..0000000000000000000000000000000000000000 --- a/lib/avx/aes128_ecb_by4_avx.asm +++ /dev/null @@ -1,672 +0,0 @@ -;; -;; Copyright (c) 2019-2022, Intel Corporation -;; -;; Redistribution and use in source and binary forms, with or without -;; modification, are permitted provided that the following conditions are met: -;; -;; * Redistributions of source code must retain the above copyright notice, -;; this list of conditions and the following disclaimer. -;; * Redistributions in binary form must reproduce the above copyright -;; notice, this list of conditions and the following disclaimer in the -;; documentation and/or other materials provided with the distribution. -;; * Neither the name of Intel Corporation nor the names of its contributors -;; may be used to endorse or promote products derived from this software -;; without specific prior written permission. -;; -;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -;; - -; routine to do AES ECB encrypt/decrypt on 16n bytes doing AES by 4 - -; XMM registers are clobbered. Saving/restoring must be done at a higher level - -; void aes_ecb_x_y_avx(void *in, -; UINT128 keys[], -; void *out, -; UINT64 len_bytes); -; -; x = direction (enc/dec) -; y = key size (128/192/256) -; arg 1: IN: pointer to input (cipher text) -; arg 2: KEYS: pointer to keys -; arg 3: OUT: pointer to output (plain text) -; arg 4: LEN: length in bytes (multiple of 16) -; - -%include "include/os.asm" -%include "include/clear_regs.asm" - -%ifndef AES_ECB_ENC_256 -%ifndef AES_ECB_ENC_192 -%ifndef AES_ECB_ENC_128 -%define AES_ECB_ENC_128 aes_ecb_enc_128_avx -%define AES_ECB_DEC_128 aes_ecb_dec_128_avx -%endif -%endif -%endif - -%ifdef LINUX -%define IN rdi -%define KEYS rsi -%define OUT rdx -%define LEN rcx -%else -%define IN rcx -%define KEYS rdx -%define OUT r8 -%define LEN r9 -%endif - -%define IDX rax -%define TMP IDX -%define XDATA0 xmm0 -%define XDATA1 xmm1 -%define XDATA2 xmm2 -%define XDATA3 xmm3 -%define XKEY0 xmm4 -%define XKEY2 xmm5 -%define XKEY4 xmm6 -%define XKEY6 xmm7 -%define XKEY10 xmm8 -%define XKEY_A xmm9 -%define XKEY_B xmm10 - -mksection .text - -%macro AES_ECB 2 -%define %%NROUNDS %1 ; [in] Number of AES rounds, numerical value -%define %%DIR %2 ; [in] Direction (encrypt/decrypt) - -%ifidn %%DIR, ENC -%define AES vaesenc -%define AES_LAST vaesenclast -%else ; DIR = DEC -%define AES vaesdec -%define AES_LAST vaesdeclast -%endif - mov TMP, LEN - and TMP, 3*16 - jz %%initial_4 - cmp TMP, 2*16 - jb %%initial_1 - ja %%initial_3 - -%%initial_2: - ; load plain/cipher text - vmovdqu XDATA0, [IN + 0*16] - vmovdqu XDATA1, [IN + 1*16] - - vmovdqa XKEY0, [KEYS + 0*16] - - vpxor XDATA0, XKEY0 ; 0. ARK - vpxor XDATA1, XKEY0 - - vmovdqa XKEY2, [KEYS + 2*16] - - AES XDATA0, [KEYS + 1*16] ; 1. ENC - AES XDATA1, [KEYS + 1*16] - - mov IDX, 2*16 - - AES XDATA0, XKEY2 ; 2. ENC - AES XDATA1, XKEY2 - - vmovdqa XKEY4, [KEYS + 4*16] - - AES XDATA0, [KEYS + 3*16] ; 3. ENC - AES XDATA1, [KEYS + 3*16] - - AES XDATA0, XKEY4 ; 4. ENC - AES XDATA1, XKEY4 - - vmovdqa XKEY6, [KEYS + 6*16] - - AES XDATA0, [KEYS + 5*16] ; 5. ENC - AES XDATA1, [KEYS + 5*16] - - AES XDATA0, XKEY6 ; 6. ENC - AES XDATA1, XKEY6 - - vmovdqa XKEY_B, [KEYS + 8*16] - - AES XDATA0, [KEYS + 7*16] ; 7. ENC - AES XDATA1, [KEYS + 7*16] - - AES XDATA0, XKEY_B ; 8. ENC - AES XDATA1, XKEY_B - - vmovdqa XKEY10, [KEYS + 10*16] - - AES XDATA0, [KEYS + 9*16] ; 9. ENC - AES XDATA1, [KEYS + 9*16] - -%if %%NROUNDS >= 12 - AES XDATA0, XKEY10 ; 10. ENC - AES XDATA1, XKEY10 - - AES XDATA0, [KEYS + 11*16] ; 11. ENC - AES XDATA1, [KEYS + 11*16] -%endif - -%if %%NROUNDS == 14 - AES XDATA0, [KEYS + 12*16] ; 12. ENC - AES XDATA1, [KEYS + 12*16] - - AES XDATA0, [KEYS + 13*16] ; 13. ENC - AES XDATA1, [KEYS + 13*16] -%endif - -%if %%NROUNDS == 10 - AES_LAST XDATA0, XKEY10 ; 10. ENC - AES_LAST XDATA1, XKEY10 -%elif %%NROUNDS == 12 - AES_LAST XDATA0, [KEYS + 12*16] ; 12. ENC - AES_LAST XDATA1, [KEYS + 12*16] -%else - AES_LAST XDATA0, [KEYS + 14*16] ; 14. ENC - AES_LAST XDATA1, [KEYS + 14*16] -%endif - vmovdqu [OUT + 0*16], XDATA0 - vmovdqu [OUT + 1*16], XDATA1 - - cmp LEN, 2*16 - je %%done - jmp %%main_loop - - align 16 -%%initial_1: - ; load plain/cipher text - vmovdqu XDATA0, [IN + 0*16] - - vmovdqa XKEY0, [KEYS + 0*16] - - vpxor XDATA0, XKEY0 ; 0. ARK - - vmovdqa XKEY2, [KEYS + 2*16] - - AES XDATA0, [KEYS + 1*16] ; 1. ENC - - mov IDX, 1*16 - - AES XDATA0, XKEY2 ; 2. ENC - - vmovdqa XKEY4, [KEYS + 4*16] - - AES XDATA0, [KEYS + 3*16] ; 3. ENC - - AES XDATA0, XKEY4 ; 4. ENC - - vmovdqa XKEY6, [KEYS + 6*16] - - AES XDATA0, [KEYS + 5*16] ; 5. ENC - - AES XDATA0, XKEY6 ; 6. ENC - - vmovdqa XKEY_B, [KEYS + 8*16] - - AES XDATA0, [KEYS + 7*16] ; 7. ENC - - AES XDATA0, XKEY_B ; 8. ENC - - vmovdqa XKEY10, [KEYS + 10*16] - - AES XDATA0, [KEYS + 9*16] ; 9. ENC - -%if %%NROUNDS >= 12 - AES XDATA0, XKEY10 ; 10. ENC - - AES XDATA0, [KEYS + 11*16] ; 11. ENC -%endif - -%if %%NROUNDS == 14 - AES XDATA0, [KEYS + 12*16] ; 12. ENC - - AES XDATA0, [KEYS + 13*16] ; 13. ENC -%endif - -%if %%NROUNDS == 10 - - AES_LAST XDATA0, XKEY10 ; 10. ENC -%elif %%NROUNDS == 12 - AES_LAST XDATA0, [KEYS + 12*16] ; 12. ENC -%else - AES_LAST XDATA0, [KEYS + 14*16] ; 14. ENC -%endif - - vmovdqu [OUT + 0*16], XDATA0 - - cmp LEN, 1*16 - je %%done - jmp %%main_loop - -%%initial_3: - ; load plain/cipher text - vmovdqu XDATA0, [IN + 0*16] - vmovdqu XDATA1, [IN + 1*16] - vmovdqu XDATA2, [IN + 2*16] - - vmovdqa XKEY0, [KEYS + 0*16] - - vmovdqa XKEY_A, [KEYS + 1*16] - - vpxor XDATA0, XKEY0 ; 0. ARK - vpxor XDATA1, XKEY0 - vpxor XDATA2, XKEY0 - - vmovdqa XKEY2, [KEYS + 2*16] - - AES XDATA0, XKEY_A ; 1. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - - vmovdqa XKEY_A, [KEYS + 3*16] - mov IDX, 3*16 - - AES XDATA0, XKEY2 ; 2. ENC - AES XDATA1, XKEY2 - AES XDATA2, XKEY2 - - vmovdqa XKEY4, [KEYS + 4*16] - - AES XDATA0, XKEY_A ; 3. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - - vmovdqa XKEY_A, [KEYS + 5*16] - - AES XDATA0, XKEY4 ; 4. ENC - AES XDATA1, XKEY4 - AES XDATA2, XKEY4 - - vmovdqa XKEY6, [KEYS + 6*16] - - AES XDATA0, XKEY_A ; 5. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - - vmovdqa XKEY_A, [KEYS + 7*16] - - AES XDATA0, XKEY6 ; 6. ENC - AES XDATA1, XKEY6 - AES XDATA2, XKEY6 - - vmovdqa XKEY_B, [KEYS + 8*16] - - AES XDATA0, XKEY_A ; 7. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - - vmovdqa XKEY_A, [KEYS + 9*16] - - AES XDATA0, XKEY_B ; 8. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - - vmovdqa XKEY_B, [KEYS + 10*16] - - AES XDATA0, XKEY_A ; 9. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - -%if %%NROUNDS >= 12 - vmovdqa XKEY_A, [KEYS + 11*16] - - AES XDATA0, XKEY_B ; 10. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - - vmovdqa XKEY_B, [KEYS + 12*16] - - AES XDATA0, XKEY_A ; 11. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - -%endif - -%if %%NROUNDS == 14 - vmovdqa XKEY_A, [KEYS + 13*16] - - AES XDATA0, XKEY_B ; 12. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - - vmovdqa XKEY_B, [KEYS + 14*16] - - AES XDATA0, XKEY_A ; 13. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A -%endif - - AES_LAST XDATA0, XKEY_B ; 10/12/14. ENC (depending on key size) - AES_LAST XDATA1, XKEY_B - AES_LAST XDATA2, XKEY_B - - vmovdqu [OUT + 0*16], XDATA0 - vmovdqu [OUT + 1*16], XDATA1 - vmovdqu [OUT + 2*16], XDATA2 - - cmp LEN, 3*16 - je %%done - jmp %%main_loop - - align 16 -%%initial_4: - ; load plain/cipher text - vmovdqu XDATA0, [IN + 0*16] - vmovdqu XDATA1, [IN + 1*16] - vmovdqu XDATA2, [IN + 2*16] - vmovdqu XDATA3, [IN + 3*16] - - vmovdqa XKEY0, [KEYS + 0*16] - - vmovdqa XKEY_A, [KEYS + 1*16] - - vpxor XDATA0, XKEY0 ; 0. ARK - vpxor XDATA1, XKEY0 - vpxor XDATA2, XKEY0 - vpxor XDATA3, XKEY0 - - vmovdqa XKEY2, [KEYS + 2*16] - - AES XDATA0, XKEY_A ; 1. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 3*16] - - mov IDX, 4*16 - - AES XDATA0, XKEY2 ; 2. ENC - AES XDATA1, XKEY2 - AES XDATA2, XKEY2 - AES XDATA3, XKEY2 - - vmovdqa XKEY4, [KEYS + 4*16] - - AES XDATA0, XKEY_A ; 3. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 5*16] - - AES XDATA0, XKEY4 ; 4. ENC - AES XDATA1, XKEY4 - AES XDATA2, XKEY4 - AES XDATA3, XKEY4 - - vmovdqa XKEY6, [KEYS + 6*16] - - AES XDATA0, XKEY_A ; 5. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 7*16] - - AES XDATA0, XKEY6 ; 6. ENC - AES XDATA1, XKEY6 - AES XDATA2, XKEY6 - AES XDATA3, XKEY6 - - vmovdqa XKEY_B, [KEYS + 8*16] - - AES XDATA0, XKEY_A ; 7. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 9*16] - - AES XDATA0, XKEY_B ; 8. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - AES XDATA3, XKEY_B - - vmovdqa XKEY_B, [KEYS + 10*16] - - AES XDATA0, XKEY_A ; 9. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - -%if %%NROUNDS >= 12 - vmovdqa XKEY_A, [KEYS + 11*16] - - AES XDATA0, XKEY_B ; 10. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - AES XDATA3, XKEY_B - - vmovdqa XKEY_B, [KEYS + 12*16] - - AES XDATA0, XKEY_A ; 11. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A -%endif - -%if %%NROUNDS == 14 - vmovdqa XKEY_A, [KEYS + 13*16] - - AES XDATA0, XKEY_B ; 12. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - AES XDATA3, XKEY_B - - vmovdqa XKEY_B, [KEYS + 14*16] - - AES XDATA0, XKEY_A ; 13. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A -%endif - - AES_LAST XDATA0, XKEY_B ; 10/12/14. ENC (depending on key size) - AES_LAST XDATA1, XKEY_B - AES_LAST XDATA2, XKEY_B - AES_LAST XDATA3, XKEY_B - - vmovdqu [OUT + 0*16], XDATA0 - vmovdqu [OUT + 1*16], XDATA1 - vmovdqu [OUT + 2*16], XDATA2 - vmovdqu [OUT + 3*16], XDATA3 - - cmp LEN, 4*16 - jz %%done - jmp %%main_loop - - align 16 -%%main_loop: - ; load plain/cipher text - vmovdqu XDATA0, [IN + IDX + 0*16] - vmovdqu XDATA1, [IN + IDX + 1*16] - vmovdqu XDATA2, [IN + IDX + 2*16] - vmovdqu XDATA3, [IN + IDX + 3*16] - - vmovdqa XKEY_A, [KEYS + 1*16] - - vpxor XDATA0, XKEY0 ; 0. ARK - vpxor XDATA1, XKEY0 - vpxor XDATA2, XKEY0 - vpxor XDATA3, XKEY0 - - add IDX, 4*16 - - AES XDATA0, XKEY_A ; 1. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 3*16] - - AES XDATA0, XKEY2 ; 2. ENC - AES XDATA1, XKEY2 - AES XDATA2, XKEY2 - AES XDATA3, XKEY2 - - AES XDATA0, XKEY_A ; 3. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 5*16] - - AES XDATA0, XKEY4 ; 4. ENC - AES XDATA1, XKEY4 - AES XDATA2, XKEY4 - AES XDATA3, XKEY4 - - AES XDATA0, XKEY_A ; 5. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 7*16] - - AES XDATA0, XKEY6 ; 6. ENC - AES XDATA1, XKEY6 - AES XDATA2, XKEY6 - AES XDATA3, XKEY6 - - vmovdqa XKEY_B, [KEYS + 8*16] - - AES XDATA0, XKEY_A ; 7. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - - vmovdqa XKEY_A, [KEYS + 9*16] - - AES XDATA0, XKEY_B ; 8. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - AES XDATA3, XKEY_B - - vmovdqa XKEY_B, [KEYS + 10*16] - - AES XDATA0, XKEY_A ; 9. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A - -%if %%NROUNDS >= 12 - vmovdqa XKEY_A, [KEYS + 11*16] - - AES XDATA0, XKEY_B ; 10. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - AES XDATA3, XKEY_B - - vmovdqa XKEY_B, [KEYS + 12*16] - - AES XDATA0, XKEY_A ; 11. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A -%endif - -%if %%NROUNDS == 14 - vmovdqa XKEY_A, [KEYS + 13*16] - - AES XDATA0, XKEY_B ; 12. ENC - AES XDATA1, XKEY_B - AES XDATA2, XKEY_B - AES XDATA3, XKEY_B - - vmovdqa XKEY_B, [KEYS + 14*16] - - AES XDATA0, XKEY_A ; 13. ENC - AES XDATA1, XKEY_A - AES XDATA2, XKEY_A - AES XDATA3, XKEY_A -%endif - - AES_LAST XDATA0, XKEY_B ; 10/12/14. ENC (depending on key size) - AES_LAST XDATA1, XKEY_B - AES_LAST XDATA2, XKEY_B - AES_LAST XDATA3, XKEY_B - - vmovdqu [OUT + IDX + 0*16 - 4*16], XDATA0 - vmovdqu [OUT + IDX + 1*16 - 4*16], XDATA1 - vmovdqu [OUT + IDX + 2*16 - 4*16], XDATA2 - vmovdqu [OUT + IDX + 3*16 - 4*16], XDATA3 - - cmp IDX, LEN - jne %%main_loop - -%%done: - -%ifdef SAFE_DATA - clear_all_xmms_avx_asm -%endif ;; SAFE_DATA - - ret - -%endmacro - -;; -;; AES-ECB 128 functions -;; -%ifdef AES_ECB_ENC_128 -align 16 -MKGLOBAL(AES_ECB_ENC_128,function,internal) -AES_ECB_ENC_128: - - AES_ECB 10, ENC - -align 16 -MKGLOBAL(AES_ECB_DEC_128,function,internal) -AES_ECB_DEC_128: - - AES_ECB 10, DEC - -%endif - -;; -;; AES-ECB 192 functions -;; -%ifdef AES_ECB_ENC_192 -align 16 -MKGLOBAL(AES_ECB_ENC_192,function,internal) -AES_ECB_ENC_192: - - AES_ECB 12, ENC - -align 16 -MKGLOBAL(AES_ECB_DEC_192,function,internal) -AES_ECB_DEC_192: - - AES_ECB 12, DEC - -%endif - -;; -;; AES-ECB 256 functions -;; -%ifdef AES_ECB_ENC_256 -align 16 -MKGLOBAL(AES_ECB_ENC_256,function,internal) -AES_ECB_ENC_256: - - AES_ECB 14, ENC - -align 16 -MKGLOBAL(AES_ECB_DEC_256,function,internal) -AES_ECB_DEC_256: - - AES_ECB 14, DEC - -%endif - -mksection stack-noexec diff --git a/lib/avx/zuc_x4_avx.asm b/lib/avx/zuc_x4_avx.asm deleted file mode 100755 index 025578d074d187e4cf8316876107ae38ad657d55..0000000000000000000000000000000000000000 --- a/lib/avx/zuc_x4_avx.asm +++ /dev/null @@ -1,1758 +0,0 @@ -;; -;; Copyright (c) 2009-2022, Intel Corporation -;; -;; Redistribution and use in source and binary forms, with or without -;; modification, are permitted provided that the following conditions are met: -;; -;; * Redistributions of source code must retain the above copyright notice, -;; this list of conditions and the following disclaimer. -;; * Redistributions in binary form must reproduce the above copyright -;; notice, this list of conditions and the following disclaimer in the -;; documentation and/or other materials provided with the distribution. -;; * Neither the name of Intel Corporation nor the names of its contributors -;; may be used to endorse or promote products derived from this software -;; without specific prior written permission. -;; -;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -;; - -%include "include/os.asm" -%include "include/reg_sizes.asm" -%include "include/zuc_sbox.inc" -%include "include/memcpy.asm" -%include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" -%include "include/const.inc" - -%ifdef LINUX -%define arg1 rdi -%define arg2 rsi -%define arg3 rdx -%define arg4 rcx -%define arg5 r8 -%else -%define arg1 rcx -%define arg2 rdx -%define arg3 r8 -%define arg4 r9 -%define arg5 qword [rsp + 40] -%endif - -%define APPEND(a,b) a %+ b - -mksection .rodata -default rel - -align 16 -Ek_d: -dd 0x0044D700, 0x0026BC00, 0x00626B00, 0x00135E00, -dd 0x00578900, 0x0035E200, 0x00713500, 0x0009AF00 -dd 0x004D7800, 0x002F1300, 0x006BC400, 0x001AF100, -dd 0x005E2600, 0x003C4D00, 0x00789A00, 0x0047AC00 - -; Constants to be used to initialize the LFSR registers -; This table contains four different sets of constants: -; 0-63 bytes: Encryption -; 64-127 bytes: Authentication with tag size = 4 -; 128-191 bytes: Authentication with tag size = 8 -; 192-255 bytes: Authentication with tag size = 16 -align 16 -EK256_d64: -dd 0x00220000, 0x002F0000, 0x00240000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 -dd 0x00220000, 0x002F0000, 0x00250000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 -dd 0x00230000, 0x002F0000, 0x00240000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 -dd 0x00230000, 0x002F0000, 0x00250000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 16 -shuf_mask_key: -dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, -dd 0x04FFFFFF, 0x05FFFFFF, 0x06FFFFFF, 0x07FFFFFF, -dd 0x08FFFFFF, 0x09FFFFFF, 0x0AFFFFFF, 0x0BFFFFFF, -dd 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, - -align 16 -shuf_mask_iv: -dd 0xFFFFFF00, 0xFFFFFF01, 0xFFFFFF02, 0xFFFFFF03, -dd 0xFFFFFF04, 0xFFFFFF05, 0xFFFFFF06, 0xFFFFFF07, -dd 0xFFFFFF08, 0xFFFFFF09, 0xFFFFFF0A, 0xFFFFFF0B, -dd 0xFFFFFF0C, 0xFFFFFF0D, 0xFFFFFF0E, 0xFFFFFF0F, - -align 16 -shuf_mask_iv_17_19: -db 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF - -align 16 -clear_iv_mask: -db 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00 - -align 16 -shuf_mask_iv_20_23: -db 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF, 0xFF, 0xFF, 0x03, 0xFF - -align 16 -mask31: -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF - -align 16 -bit_reverse_table_l: -db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f - -align 16 -bit_reverse_table_h: -db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 - -align 16 -bit_reverse_and_table: -db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f - -align 16 -data_mask_64bits: -dd 0xffffffff, 0xffffffff, 0x00000000, 0x00000000 - -align 16 -swap_mask: -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c - -align 16 -S1_S0_shuf: -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F - -align 16 -S0_S1_shuf: -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E - -align 16 -rev_S1_S0_shuf: -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F - -align 16 -rev_S0_S1_shuf: -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 - -align 16 -rot8_mod32: -db 0x03, 0x00, 0x01, 0x02, 0x07, 0x04, 0x05, 0x06, -db 0x0B, 0x08, 0x09, 0x0A, 0x0F, 0x0C, 0x0D, 0x0E - -align 16 -rot16_mod32: -db 0x02, 0x03, 0x00, 0x01, 0x06, 0x07, 0x04, 0x05, -db 0x0A, 0x0B, 0x08, 0x09, 0x0E, 0x0F, 0x0C, 0x0D - -align 16 -rot24_mod32: -db 0x01, 0x02, 0x03, 0x00, 0x05, 0x06, 0x07, 0x04, -db 0x09, 0x0A, 0x0B, 0x08, 0x0D, 0x0E, 0x0F, 0x0C - -align 16 -broadcast_word: -db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 -db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 - -align 16 -all_ffs: -dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff - -align 16 -all_threes: -dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 - -align 16 -all_fffcs: -dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc - -align 16 -all_0fs: -dw 0x000f, 0x000f, 0x000f, 0x000f, 0x000f, 0x000f, 0x000f, 0x000f - -align 16 -all_10s: -dw 0x0010, 0x0010, 0x0010, 0x0010, 0x0010, 0x0010, 0x0010, 0x0010 - -align 16 -bit_mask_table: -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x80 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc0 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe0 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf0 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe - -align 16 -shuf_mask_dw0_0_dw1_0: -db 0x00, 0x01, 0x02, 0x03, 0xff, 0xff, 0xff, 0xff -db 0x04, 0x05, 0x06, 0x07, 0xff, 0xff, 0xff, 0xff - -align 16 -shuf_mask_dw2_0_dw3_0: -db 0x08, 0x09, 0x0a, 0x0b, 0xff, 0xff, 0xff, 0xff -db 0x0c, 0x0d, 0x0e, 0x0f, 0xff, 0xff, 0xff, 0xff - -; Stack frame for ZucCipher function -struc STACK -_keystr_save resq 2*4 ; Space for 4 keystreams -_rsp_save: resq 1 ; Space for rsp pointer -_gpr_save: resq 2 ; Space for GP registers -_rem_bytes_save resq 1 ; Space for number of remaining bytes -endstruc - -mksection .text -align 64 - -%define MASK31 xmm12 - -%define OFS_R1 (16*16) -%define OFS_R2 (OFS_R1 + 16) -%define OFS_X0 (OFS_R2 + 16) -%define OFS_X1 (OFS_X0 + 16) -%define OFS_X2 (OFS_X1 + 16) - -%ifidn __OUTPUT_FORMAT__, win64 - %define XMM_STORAGE 16*10 - %define GP_STORAGE 8*8 -%else - %define XMM_STORAGE 0 - %define GP_STORAGE 6*8 -%endif - -%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE -%define GP_OFFSET XMM_STORAGE - -%macro FUNC_SAVE 0 - mov r11, rsp - sub rsp, VARIABLE_OFFSET - and rsp, ~15 - -%ifidn __OUTPUT_FORMAT__, win64 - ; xmm6:xmm15 need to be maintained for Windows - vmovdqa [rsp + 0*16], xmm6 - vmovdqa [rsp + 1*16], xmm7 - vmovdqa [rsp + 2*16], xmm8 - vmovdqa [rsp + 3*16], xmm9 - vmovdqa [rsp + 4*16], xmm10 - vmovdqa [rsp + 5*16], xmm11 - vmovdqa [rsp + 6*16], xmm12 - vmovdqa [rsp + 7*16], xmm13 - vmovdqa [rsp + 8*16], xmm14 - vmovdqa [rsp + 9*16], xmm15 - mov [rsp + GP_OFFSET + 48], rdi - mov [rsp + GP_OFFSET + 56], rsi -%endif - mov [rsp + GP_OFFSET], r12 - mov [rsp + GP_OFFSET + 8], r13 - mov [rsp + GP_OFFSET + 16], r14 - mov [rsp + GP_OFFSET + 24], r15 - mov [rsp + GP_OFFSET + 32], rbx - mov [rsp + GP_OFFSET + 40], r11 ;; rsp pointer -%endmacro - -%macro FUNC_RESTORE 0 - -%ifidn __OUTPUT_FORMAT__, win64 - vmovdqa xmm6, [rsp + 0*16] - vmovdqa xmm7, [rsp + 1*16] - vmovdqa xmm8, [rsp + 2*16] - vmovdqa xmm9, [rsp + 3*16] - vmovdqa xmm10, [rsp + 4*16] - vmovdqa xmm11, [rsp + 5*16] - vmovdqa xmm12, [rsp + 6*16] - vmovdqa xmm13, [rsp + 7*16] - vmovdqa xmm14, [rsp + 8*16] - vmovdqa xmm15, [rsp + 9*16] - mov rdi, [rsp + GP_OFFSET + 48] - mov rsi, [rsp + GP_OFFSET + 56] -%endif - mov r12, [rsp + GP_OFFSET] - mov r13, [rsp + GP_OFFSET + 8] - mov r14, [rsp + GP_OFFSET + 16] - mov r15, [rsp + GP_OFFSET + 24] - mov rbx, [rsp + GP_OFFSET + 32] - mov rsp, [rsp + GP_OFFSET + 40] -%endmacro - -%macro TRANSPOSE4_U32 6 -%define %%r0 %1 -%define %%r1 %2 -%define %%r2 %3 -%define %%r3 %4 -%define %%t0 %5 -%define %%t1 %6 - - vshufps %%t0, %%r0, %%r1, 0x44 ; t0 = {b1 b0 a1 a0} - vshufps %%r0, %%r0, %%r1, 0xEE ; r0 = {b3 b2 a3 a2} - vshufps %%t1, %%r2, %%r3, 0x44 ; t1 = {d1 d0 c1 c0} - vshufps %%r2, %%r2, %%r3, 0xEE ; r2 = {d3 d2 c3 c2} - - vshufps %%r1, %%t0, %%t1, 0xDD ; r1 = {d1 c1 b1 a1} - vshufps %%r3, %%r0, %%r2, 0xDD ; r3 = {d3 c3 b3 a3} - vshufps %%r2, %%r0, %%r2, 0x88 ; r2 = {d2 c2 b2 a2} - vshufps %%r0, %%t0, %%t1, 0x88 ; r0 = {d0 c0 b0 a0} -%endmacro - -;; -;; make_u31() -;; -%macro make_u31 4 - -%define %%Rt %1 -%define %%Ke %2 -%define %%Ek %3 -%define %%Iv %4 - xor %%Rt, %%Rt - shrd %%Rt, %%Iv, 8 - shrd %%Rt, %%Ek, 15 - shrd %%Rt, %%Ke, 9 -%endmacro - -; -; bits_reorg4() -; -; params -; %1 - round number -; %2 - XMM register storing X3 -; rax - LFSR pointer -; uses -; -; return -; -%macro bits_reorg4 2-3 -%define %%STATE %1 ; [in] ZUC state -%define %%ROUND_NUM %2 ; [in] Round number -%define %%X3 %3 ; [out] XMM register containing X3 of all lanes - ; - ; - ; xmm15 = LFSR_S15 - ; xmm14 = LFSR_S14 - ; xmm11 = LFSR_S11 - ; xmm9 = LFSR_S9 - ; xmm7 = LFSR_S7 - ; xmm5 = LFSR_S5 - ; xmm2 = LFSR_S2 - ; xmm0 = LFSR_S0 - ; - vmovdqa xmm15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm14, [%%STATE + ((14 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm11, [%%STATE + ((11 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm9, [%%STATE + (( 9 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm7, [%%STATE + (( 7 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm5, [%%STATE + (( 5 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm2, [%%STATE + (( 2 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*16] - - vpxor xmm1, xmm1 - vpslld xmm15, 1 - vpblendw xmm3, xmm14, xmm1, 0xAA - vpblendw xmm15, xmm3, xmm15, 0xAA - - vmovdqa [%%STATE + OFS_X0], xmm15 ; BRC_X0 - vpslld xmm11, 16 - vpsrld xmm9, 15 - vpor xmm11, xmm9 - vmovdqa [%%STATE + OFS_X1], xmm11 ; BRC_X1 - vpslld xmm7, 16 - vpsrld xmm5, 15 - vpor xmm7, xmm5 - vmovdqa [%%STATE + OFS_X2], xmm7 ; BRC_X2 -%if (%0 == 3) - vpslld xmm2, 16 - vpsrld xmm0, 15 - vpor %%X3, xmm2, xmm0 -%endif -%endmacro - -; -; rot_mod32() -; -; uses xmm7 -; -%macro rot_mod32 3 -%if (%3 == 8) - vpshufb %1, %2, [rel rot8_mod32] -%elif (%3 == 16) - vpshufb %1, %2, [rel rot16_mod32] -%elif (%3 == 24) - vpshufb %1, %2, [rel rot24_mod32] -%else - vpslld %1, %2, %3 - vpsrld xmm7, %2, (32 - %3) - - vpor %1, xmm7 -%endif -%endmacro - -; -; nonlin_fun4() -; -; return -; W value, updates F_R1[] / F_R2[] -; -%macro nonlin_fun4 1-2 -%define %%STATE %1 ; [in] ZUC state -%define %%W %2 ; [out] XMM register to contain W for all lanes - -%if (%0 == 2) - vmovdqa %%W, [%%STATE + OFS_X0] - vpxor %%W, [%%STATE + OFS_R1] - vpaddd %%W, [%%STATE + OFS_R2] ; W = (BRC_X0 ^ F_R1) + F_R2 -%endif - - vmovdqa xmm1, [%%STATE + OFS_R1] - vmovdqa xmm2, [%%STATE + OFS_R2] - vpaddd xmm1, [%%STATE + OFS_X1] ; W1 = F_R1 + BRC_X1 - vpxor xmm2, [%%STATE + OFS_X2] ; W2 = F_R2 ^ BRC_X2 - - vpslld xmm3, xmm1, 16 - vpsrld xmm4, xmm1, 16 - vpslld xmm5, xmm2, 16 - vpsrld xmm6, xmm2, 16 - vpor xmm1, xmm3, xmm6 - vpor xmm2, xmm4, xmm5 - - rot_mod32 xmm3, xmm1, 2 - rot_mod32 xmm4, xmm1, 10 - rot_mod32 xmm5, xmm1, 18 - rot_mod32 xmm6, xmm1, 24 - vpxor xmm1, xmm3 - vpxor xmm1, xmm4 - vpxor xmm1, xmm5 - vpxor xmm1, xmm6 ; XMM1 = U = L1(P) - - rot_mod32 xmm3, xmm2, 8 - rot_mod32 xmm4, xmm2, 14 - rot_mod32 xmm5, xmm2, 22 - rot_mod32 xmm6, xmm2, 30 - vpxor xmm2, xmm3 - vpxor xmm2, xmm4 - vpxor xmm2, xmm5 - vpxor xmm2, xmm6 ; XMM2 = V = L2(Q) - - ; Shuffle U and V to have all S0 lookups in XMM1 and all S1 lookups in XMM2 - - ; Compress all S0 and S1 input values in each register - vpshufb xmm1, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 - vpshufb xmm2, [rel S1_S0_shuf] ; S1: Bytes 0-7, S0: Bytes 8-15 - - vshufpd xmm3, xmm1, xmm2, 0x2 ; All S0 input values - vshufpd xmm4, xmm2, xmm1, 0x2 ; All S1 input values - - ; Compute S0 and S1 values - S0_comput_AVX xmm3, xmm1, xmm2 - S1_comput_AVX xmm4, xmm1, xmm2, xmm5 - - ; Need to shuffle back xmm1 & xmm2 before storing output - ; (revert what was done before S0 and S1 computations) - vshufpd xmm1, xmm3, xmm4, 0x2 - vshufpd xmm2, xmm4, xmm3, 0x2 - - vpshufb xmm1, [rel rev_S0_S1_shuf] - vpshufb xmm2, [rel rev_S1_S0_shuf] - - vmovdqa [%%STATE + OFS_R1], xmm1 - vmovdqa [%%STATE + OFS_R2], xmm2 -%endmacro - -; -; store16B_kstr4() -; -%macro store16B_kstr4 4 -%define %%DATA16B_L0 %1 ; [in] 16 bytes of keystream for lane 0 -%define %%DATA16B_L1 %2 ; [in] 16 bytes of keystream for lane 1 -%define %%DATA16B_L2 %3 ; [in] 16 bytes of keystream for lane 2 -%define %%DATA16B_L3 %4 ; [in] 16 bytes of keystream for lane 3 - - mov rcx, [rsp] - mov rdx, [rsp + 8] - mov r8, [rsp + 16] - mov r9, [rsp + 24] - vmovdqu [rcx], %%DATA16B_L0 - vmovdqu [rdx], %%DATA16B_L1 - vmovdqu [r8], %%DATA16B_L2 - vmovdqu [r9], %%DATA16B_L3 -%endmacro - -; -; store4B_kstr4() -; -; params -; -; %1 - XMM register with OFS_X3 -; return -; -%macro store4B_kstr4 1 - mov rcx, [rsp] - mov rdx, [rsp + 8] - mov r8, [rsp + 16] - mov r9, [rsp + 24] - vpextrd [r9], %1, 3 - vpextrd [r8], %1, 2 - vpextrd [rdx], %1, 1 - vmovd [rcx], %1 - add rcx, 4 - add rdx, 4 - add r8, 4 - add r9, 4 - mov [rsp], rcx - mov [rsp + 8], rdx - mov [rsp + 16], r8 - mov [rsp + 24], r9 -%endmacro - -; -; add_mod31() -; add two 32-bit args and reduce mod (2^31-1) -; params -; %1 - arg1/res -; %2 - arg2 -; uses -; xmm2 -; return -; %1 -%macro add_mod31 2 - vpaddd %1, %2 - vpsrld xmm2, %1, 31 - vpand %1, MASK31 - vpaddd %1, xmm2 -%endmacro - -; -; rot_mod31() -; rotate (mult by pow of 2) 32-bit arg and reduce mod (2^31-1) -; params -; %1 - arg -; %2 - # of bits -; uses -; xmm2 -; return -; %1 -%macro rot_mod31 2 - - vpslld xmm2, %1, %2 - vpsrld %1, %1, (31 - %2) - - vpor %1, xmm2 - vpand %1, MASK31 -%endmacro - -; -; lfsr_updt4() -; -%macro lfsr_updt4 3 -%define %%STATE %1 ; [in] ZUC state -%define %%ROUND_NUM %2 ; [in] Round number -%define %%W %3 ; [in/clobbered] XMM register to contain W for all lanes - ; - ; xmm1 = LFSR_S0 - ; xmm4 = LFSR_S4 - ; xmm10 = LFSR_S10 - ; xmm13 = LFSR_S13 - ; xmm15 = LFSR_S15 - ; - vmovdqa xmm1, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm4, [%%STATE + (( 4 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm10, [%%STATE + ((10 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm13, [%%STATE + ((13 + %%ROUND_NUM) % 16)*16] - vmovdqa xmm15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*16] - - ; Calculate LFSR feedback - add_mod31 %%W, xmm1 - rot_mod31 xmm1, 8 - add_mod31 %%W, xmm1 - rot_mod31 xmm4, 20 - add_mod31 %%W, xmm4 - rot_mod31 xmm10, 21 - add_mod31 %%W, xmm10 - rot_mod31 xmm13, 17 - add_mod31 %%W, xmm13 - rot_mod31 xmm15, 15 - add_mod31 %%W, xmm15 - - vmovdqa [%%STATE + (( 0 + %%ROUND_NUM) % 16)*16], %%W - - ; LFSR_S16 = (LFSR_S15++) = eax -%endmacro - -; -; Initialize LFSR registers for a single lane, for ZUC-128 -; -; This macro initializes 4 LFSR registers at a time. -; so it needs to be called four times. -; -; From spec, s_i (LFSR) registers need to be loaded as follows: -; -; For 0 <= i <= 15, let s_i= k_i || d_i || iv_i. -; Where k_i is each byte of the key, d_i is a 15-bit constant -; and iv_i is each byte of the IV. -; -%macro INIT_LFSR_128 7 -%define %%KEY %1 ;; [in] XMM register containing 16-byte key -%define %%IV %2 ;; [in] XMM register containing 16-byte IV -%define %%SHUF_KEY %3 ;; [in] Shuffle key mask -%define %%SHUF_IV %4 ;; [in] Shuffle key mask -%define %%EKD_MASK %5 ;; [in] Shuffle key mask -%define %%LFSR %6 ;; [out] XMM register to contain initialized LFSR regs -%define %%XTMP %7 ;; [clobbered] XMM temporary register - - vpshufb %%LFSR, %%KEY, %%SHUF_KEY - vpsrld %%LFSR, 1 - vpshufb %%XTMP, %%IV, %%SHUF_IV - vpor %%LFSR, %%XTMP - vpor %%LFSR, %%EKD_MASK - -%endmacro - -; -; Initialize LFSR registers for a single lane, for ZUC-256 -; -%macro INIT_LFSR_256 9 -%define %%KEY %1 ;; [in] Key pointer -%define %%IV %2 ;; [in] IV pointer -%define %%LFSR0_3 %3 ;; [out] XMM register to contain initialized LFSR regs 0-3 -%define %%LFSR4_7 %4 ;; [out] XMM register to contain initialized LFSR regs 4-7 -%define %%LFSR8_11 %5 ;; [out] XMM register to contain initialized LFSR regs 8-11 -%define %%LFSR12_15 %6 ;; [out] XMM register to contain initialized LFSR regs 12-15 -%define %%XTMP %7 ;; [clobbered] XMM temporary register -%define %%TMP %8 ;; [clobbered] GP temporary register -%define %%CONSTANTS %9 ;; [in] Address to constants - - ; s0 - s3 - vpxor %%LFSR0_3, %%LFSR0_3 - vpinsrb %%LFSR0_3, [%%KEY], 3 ; s0 - vpinsrb %%LFSR0_3, [%%KEY + 1], 7 ; s1 - vpinsrb %%LFSR0_3, [%%KEY + 2], 11 ; s2 - vpinsrb %%LFSR0_3, [%%KEY + 3], 15 ; s3 - - vpsrld %%LFSR0_3, 1 - - vpor %%LFSR0_3, [%%CONSTANTS] ; s0 - s3 - - vpinsrb %%LFSR0_3, [%%KEY + 21], 1 ; s0 - vpinsrb %%LFSR0_3, [%%KEY + 16], 0 ; s0 - - vpinsrb %%LFSR0_3, [%%KEY + 22], 5 ; s1 - vpinsrb %%LFSR0_3, [%%KEY + 17], 4 ; s1 - - vpinsrb %%LFSR0_3, [%%KEY + 23], 9 ; s2 - vpinsrb %%LFSR0_3, [%%KEY + 18], 8 ; s2 - - vpinsrb %%LFSR0_3, [%%KEY + 24], 13 ; s3 - vpinsrb %%LFSR0_3, [%%KEY + 19], 12 ; s3 - - ; s4 - s7 - vpxor %%LFSR4_7, %%LFSR4_7 - vpinsrb %%LFSR4_7, [%%KEY + 4], 3 ; s4 - vpinsrb %%LFSR4_7, [%%IV], 7 ; s5 - vpinsrb %%LFSR4_7, [%%IV + 1], 11 ; s6 - vpinsrb %%LFSR4_7, [%%IV + 10], 15 ; s7 - - vpsrld %%LFSR4_7, 1 - - vpinsrb %%LFSR4_7, [%%KEY + 25], 1 ; s4 - vpinsrb %%LFSR4_7, [%%KEY + 20], 0 ; s4 - - vpinsrb %%LFSR4_7, [%%KEY + 5], 5 ; s5 - vpinsrb %%LFSR4_7, [%%KEY + 26], 4 ; s5 - - vpinsrb %%LFSR4_7, [%%KEY + 6], 9 ; s6 - vpinsrb %%LFSR4_7, [%%KEY + 27], 8 ; s6 - - vpinsrb %%LFSR4_7, [%%KEY + 7], 13 ; s7 - vpinsrb %%LFSR4_7, [%%IV + 2], 12 ; s7 - - vpor %%LFSR4_7, [%%CONSTANTS + 16] ; s4 - s7 - - vmovd %%XTMP, [%%IV + 17] - vpshufb %%XTMP, [rel shuf_mask_iv_17_19] - vpand %%XTMP, [rel clear_iv_mask] - - vpor %%LFSR4_7, %%XTMP - - ; s8 - s11 - vpxor %%LFSR8_11, %%LFSR8_11 - vpinsrb %%LFSR8_11, [%%KEY + 8], 3 ; s8 - vpinsrb %%LFSR8_11, [%%KEY + 9], 7 ; s9 - vpinsrb %%LFSR8_11, [%%IV + 5], 11 ; s10 - vpinsrb %%LFSR8_11, [%%KEY + 11], 15 ; s11 - - vpsrld %%LFSR8_11, 1 - - vpinsrb %%LFSR8_11, [%%IV + 3], 1 ; s8 - vpinsrb %%LFSR8_11, [%%IV + 11], 0 ; s8 - - vpinsrb %%LFSR8_11, [%%IV + 12], 5 ; s9 - vpinsrb %%LFSR8_11, [%%IV + 4], 4 ; s9 - - vpinsrb %%LFSR8_11, [%%KEY + 10], 9 ; s10 - vpinsrb %%LFSR8_11, [%%KEY + 28], 8 ; s10 - - vpinsrb %%LFSR8_11, [%%IV + 6], 13 ; s11 - vpinsrb %%LFSR8_11, [%%IV + 13], 12 ; s11 - - vpor %%LFSR8_11, [%%CONSTANTS + 32] ; s8 - s11 - - vmovd %%XTMP, [%%IV + 20] - vpshufb %%XTMP, [rel shuf_mask_iv_20_23] - vpand %%XTMP, [rel clear_iv_mask] - - vpor %%LFSR8_11, %%XTMP - - ; s12 - s15 - vpxor %%LFSR12_15, %%LFSR12_15 - vpinsrb %%LFSR12_15, [%%KEY + 12], 3 ; s12 - vpinsrb %%LFSR12_15, [%%KEY + 13], 7 ; s13 - vpinsrb %%LFSR12_15, [%%KEY + 14], 11 ; s14 - vpinsrb %%LFSR12_15, [%%KEY + 15], 15 ; s15 - - vpsrld %%LFSR12_15, 1 - - vpinsrb %%LFSR12_15, [%%IV + 7], 1 ; s12 - vpinsrb %%LFSR12_15, [%%IV + 14], 0 ; s12 - - vpinsrb %%LFSR12_15, [%%IV + 15], 5 ; s13 - vpinsrb %%LFSR12_15, [%%IV + 8], 4 ; s13 - - vpinsrb %%LFSR12_15, [%%IV + 16], 9 ; s14 - vpinsrb %%LFSR12_15, [%%IV + 9], 8 ; s14 - - vpinsrb %%LFSR12_15, [%%KEY + 30], 13 ; s15 - vpinsrb %%LFSR12_15, [%%KEY + 29], 12 ; s15 - - vpor %%LFSR12_15, [%%CONSTANTS + 48] ; s12 - s15 - - movzx DWORD(%%TMP), byte [%%IV + 24] - and DWORD(%%TMP), 0x0000003f - shl DWORD(%%TMP), 16 - vmovd %%XTMP, DWORD(%%TMP) - - movzx DWORD(%%TMP), byte [%%KEY + 31] - shl DWORD(%%TMP), 12 - and DWORD(%%TMP), 0x000f0000 ; high nibble of K_31 - vpinsrd %%XTMP, DWORD(%%TMP), 2 - - movzx DWORD(%%TMP), byte [%%KEY + 31] - shl DWORD(%%TMP), 16 - and DWORD(%%TMP), 0x000f0000 ; low nibble of K_31 - vpinsrd %%XTMP, DWORD(%%TMP), 3 - - vpor %%LFSR12_15, %%XTMP -%endmacro - -%macro ZUC_INIT_4 1 -%define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) - -%ifdef LINUX - %define pKe rdi - %define pIv rsi - %define pState rdx - %define tag_sz rcx ; Only used in ZUC-256 -%else - %define pKe rcx - %define pIv rdx - %define pState r8 - %define tag_sz r9 ; Only used in ZUC-256 -%endif - - FUNC_SAVE - - mov rax, pState - - ; Zero out R1-R2 (only lower 128 bits) - vpxor xmm0, xmm0 -%assign I 0 -%rep 2 - vmovdqa [pState + OFS_R1 + I*16], xmm0 -%assign I (I + 1) -%endrep - -%if %%KEY_SIZE == 128 - - ;; Load key and IVs -%assign off 0 -%assign i 4 -%assign j 8 -%rep 4 - mov r9, [pKe + off] - vmovdqu APPEND(xmm,i), [r9] - ; Read 16 bytes of IV - vmovdqa APPEND(xmm,j), [pIv + off*4] -%assign off (off + 8) -%assign i (i + 1) -%assign j (j + 1) -%endrep - - ;;; Initialize all LFSR registers in four steps: - ;;; first, registers 0-3, then registers 4-7, 8-11, 12-15 -%assign off 0 -%rep 4 - ; Set read-only registers for shuffle masks for key, IV and Ek_d for 8 registers - vmovdqa xmm13, [rel shuf_mask_key + off] - vmovdqa xmm14, [rel shuf_mask_iv + off] - vmovdqa xmm15, [rel Ek_d + off] - - ; Set 4xLFSR registers for all packets -%assign idx 0 -%assign i 4 -%assign j 8 -%rep 4 - INIT_LFSR_128 APPEND(xmm,i), APPEND(xmm,j), xmm13, xmm14, xmm15, APPEND(xmm, idx), xmm12 -%assign idx (idx + 1) -%assign i (i + 1) -%assign j (j + 1) -%endrep - - ; Store 4xLFSR registers in memory (reordering first, - ; so all SX registers are together) - TRANSPOSE4_U32 xmm0, xmm1, xmm2, xmm3, xmm13, xmm14 - -%assign i 0 -%rep 4 - vmovdqa [pState + 4*off + 16*i], APPEND(xmm, i) -%assign i (i+1) -%endrep - -%assign off (off + 16) -%endrep - -%else ;; %%KEY_SIZE == 256 - - ; Get pointer to constants (depending on tag size, this will point at - ; constants for encryption, authentication with 4-byte, 8-byte or 16-byte tags) - lea r13, [rel EK256_d64] - bsf DWORD(tag_sz), DWORD(tag_sz) - dec DWORD(tag_sz) - shl DWORD(tag_sz), 6 - add r13, tag_sz - - ;;; Initialize all LFSR registers -%assign off 0 -%rep 4 - ;; Load key and IV for each packet - mov r12, [pKe + off] - lea r10, [pIv + off*4] - - ; Initialize S0-15 for each packet - INIT_LFSR_256 r12, r10, xmm0, xmm1, xmm2, xmm3, xmm4, r11, r13 - -%assign i 0 -%rep 4 - vmovdqa [pState + 64*i + 2*off], APPEND(xmm, i) -%assign i (i+1) -%endrep - -%assign off (off + 8) -%endrep - - ; Read, transpose and store, so all S_X from the 4 packets are in the same register -%assign off 0 -%rep 4 - -%assign i 0 -%rep 4 - vmovdqa APPEND(xmm, i), [pState + 16*i + off] -%assign i (i+1) -%endrep - - TRANSPOSE4_U32 xmm0, xmm1, xmm2, xmm3, xmm14, xmm15 - -%assign i 0 -%rep 4 - vmovdqa [pState + 16*i + off], APPEND(xmm, i) -%assign i (i+1) -%endrep - -%assign off (off + 64) -%endrep -%endif ;; %%KEY_SIZE == 256 - - ; Load read-only registers - vmovdqa xmm12, [rel mask31] - - ; Shift LFSR 32-times, update state variables -%assign N 0 -%rep 32 - bits_reorg4 rax, N - nonlin_fun4 rax, xmm0 - vpsrld xmm0,1 ; Shift out LSB of W - lfsr_updt4 rax, N, xmm0 ; W (xmm0) used in LFSR update - not set to zero -%assign N N+1 -%endrep - - ; And once more, initial round from keygen phase = 33 times - bits_reorg4 rax, 0 - nonlin_fun4 rax - vpxor xmm0, xmm0 - lfsr_updt4 rax, 0, xmm0 - - FUNC_RESTORE - - ret -%endmacro - -MKGLOBAL(asm_ZucInitialization_4_avx,function,internal) -asm_ZucInitialization_4_avx: - ZUC_INIT_4 128 - -MKGLOBAL(asm_Zuc256Initialization_4_avx,function,internal) -asm_Zuc256Initialization_4_avx: - ZUC_INIT_4 256 - -; This macro reorder the LFSR registers -; after N rounds (1 <= N <= 15), since the registers -; are shifted every round -; -; The macro clobbers XMM0-15 -; -%macro REORDER_LFSR 2 -%define %%STATE %1 -%define %%NUM_ROUNDS %2 - -%if %%NUM_ROUNDS != 16 -%assign %%i 0 -%rep 16 - vmovdqa APPEND(xmm,%%i), [%%STATE + 16*%%i] -%assign %%i (%%i+1) -%endrep - -%assign %%i 0 -%assign %%j %%NUM_ROUNDS -%rep 16 - vmovdqa [%%STATE + 16*%%i], APPEND(xmm,%%j) -%assign %%i (%%i+1) -%assign %%j ((%%j+1) % 16) -%endrep -%endif ;; %%NUM_ROUNDS != 16 - -%endmacro - -; -; Generate N*4 bytes of keystream -; for 4 buffers (where N is number of rounds) -; -%macro KEYGEN_4_AVX 1 -%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds - -%ifdef LINUX - %define pState rdi - %define pKS rsi -%else - %define pState rcx - %define pKS rdx -%endif - - FUNC_SAVE - - ; Store 4 keystream pointers on the stack - ; and reserve memory for storing keystreams for all 4 buffers - mov r10, rsp - sub rsp, (4*8 + %%NUM_ROUNDS * 16) - and rsp, -16 - -%assign i 0 -%rep 2 - vmovdqa xmm0, [pKS + 16*i] - vmovdqa [rsp + 16*i], xmm0 -%assign i (i+1) -%endrep - - ; Load state pointer in RAX - mov rax, pState - - ; Load read-only registers - vmovdqa xmm12, [rel mask31] - - ; Generate N*4B of keystream in N rounds -%assign N 1 -%rep %%NUM_ROUNDS - bits_reorg4 rax, N, xmm10 - nonlin_fun4 rax, xmm0 - ; OFS_X3 XOR W (xmm0) and store in stack - vpxor xmm10, xmm0 - vmovdqa [rsp + 4*8 + (N-1)*16], xmm10 - vpxor xmm0, xmm0 - lfsr_updt4 rax, N, xmm0 -%assign N N+1 -%endrep - -%if (%%NUM_ROUNDS == 4) - ;; Load all OFS_X3 -%assign i 0 -%rep 4 - vmovdqa APPEND(xmm,i), [rsp + 4*8 + i*16] -%assign i (i+1) -%endrep - - TRANSPOSE4_U32 xmm0, xmm1, xmm2, xmm3, xmm4, xmm5 - - store16B_kstr4 xmm0, xmm1, xmm2, xmm3 -%else ;; NUM_ROUNDS != 4 -%assign idx 0 -%rep %%NUM_ROUNDS - vmovdqa APPEND(xmm, idx), [rsp + 4*8 + idx*16] - store4B_kstr4 APPEND(xmm, idx) -%assign idx (idx + 1) -%endrep -%endif ;; NUM_ROUNDS == 4 - - ;; Clear stack frame containing keystream information -%ifdef SAFE_DATA - vpxor xmm0, xmm0 -%assign i 0 -%rep (2+%%NUM_ROUNDS) - vmovdqa [rsp + i*16], xmm0 -%assign i (i+1) -%endrep -%endif - - ;; Reorder memory for LFSR registers, as not all 16 rounds - ;; will be completed (can be 4 or 2) - REORDER_LFSR rax, %%NUM_ROUNDS - - ;; Restore rsp pointer to value before pushing keystreams - mov rsp, r10 - - FUNC_RESTORE - -%endmacro - -; -;; void asm_ZucGenKeystream16B_4_avx(state4_t *pSta, u32* pKeyStr[4]); -;; -;; WIN64 -;; RCX - pSta -;; RDX - pKeyStr -;; -;; LIN64 -;; RDI - pSta -;; RSI - pKeyStr -;; -MKGLOBAL(asm_ZucGenKeystream16B_4_avx,function,internal) -asm_ZucGenKeystream16B_4_avx: - - KEYGEN_4_AVX 4 - - ret - -; -;; void asm_ZucGenKeystream8B_4_avx(state4_t *pSta, u32* pKeyStr[4]); -;; -;; WIN64 -;; RCX - pSta -;; RDX - pKeyStr -;; -;; LIN64 -;; RDI - pSta -;; RSI - pKeyStr -;; -MKGLOBAL(asm_ZucGenKeystream8B_4_avx,function,internal) -asm_ZucGenKeystream8B_4_avx: - - KEYGEN_4_AVX 2 - - ret - -; -;; void asm_ZucGenKeystream4B_4_avx(state4_t *pSta, u32* pKeyStr[4]); -;; -;; WIN64 -;; RCX - pSta -;; RDX - pKeyStr -;; -;; LIN64 -;; RDI - pSta -;; RSI - pKeyStr -;; -MKGLOBAL(asm_ZucGenKeystream4B_4_avx,function,internal) -asm_ZucGenKeystream4B_4_avx: - - KEYGEN_4_AVX 1 - - ret - -;; -;; Encrypt N*4B bytes on all 4 buffers -;; where N is number of rounds (up to 4) -;; In final call, an array of final bytes is read -;; from memory and only these final bytes are of -;; plaintext are read and XOR'ed. -;; -%macro CIPHERNx4B_4 4 -%define %%NROUNDS %1 -%define %%INITIAL_ROUND %2 -%define %%OFFSET %3 -%define %%LAST_CALL %4 - -%ifdef LINUX -%define %%TMP1 r8 -%define %%TMP2 r9 -%else -%define %%TMP1 rdi -%define %%TMP2 rsi -%endif - ; Load read-only registers - vmovdqa xmm12, [rel mask31] - - ; Generate N*4B of keystream in N rounds -%assign %%N 1 -%assign %%round (%%INITIAL_ROUND + %%N) -%rep %%NROUNDS - bits_reorg4 rax, %%round, xmm10 - nonlin_fun4 rax, xmm0 - ; OFS_XR XOR W (xmm0) and store in stack - vpxor xmm10, xmm0 - vmovdqa [rsp + _keystr_save + (%%N-1)*16], xmm10 - vpxor xmm0, xmm0 - lfsr_updt4 rax, %%round, xmm0 -%assign %%N (%%N + 1) -%assign %%round (%%round + 1) -%endrep - -%assign %%N 0 -%assign %%idx 4 -%rep %%NROUNDS - vmovdqa APPEND(xmm, %%idx), [rsp + _keystr_save + %%N*16] -%assign %%N (%%N + 1) -%assign %%idx (%%idx+1) -%endrep - - TRANSPOSE4_U32 xmm4, xmm5, xmm6, xmm7, xmm8, xmm9 - - vmovdqa xmm15, [rel swap_mask] - - ;; XOR Input buffer with keystream in rounds of 16B - mov r12, [pIn] - mov r13, [pIn + 8] - mov r14, [pIn + 16] - mov r15, [pIn + 24] -%if (%%LAST_CALL == 4) - ;; Save GP registers - mov [rsp + _gpr_save], %%TMP1 - mov [rsp + _gpr_save + 8], %%TMP2 - - ;; Read in r10 the word containing the number of final bytes to read for each lane - movzx r10d, word [rsp + _rem_bytes_save] - simd_load_avx_16_1 xmm0, r12 + %%OFFSET, r10 - movzx r10d, word [rsp + _rem_bytes_save + 2] - simd_load_avx_16_1 xmm1, r13 + %%OFFSET, r10 - movzx r10d, word [rsp + _rem_bytes_save + 4] - simd_load_avx_16_1 xmm2, r14 + %%OFFSET, r10 - movzx r10d, word [rsp + _rem_bytes_save + 6] - simd_load_avx_16_1 xmm3, r15 + %%OFFSET, r10 -%else - vmovdqu xmm0, [r12 + %%OFFSET] - vmovdqu xmm1, [r13 + %%OFFSET] - vmovdqu xmm2, [r14 + %%OFFSET] - vmovdqu xmm3, [r15 + %%OFFSET] -%endif - - vpshufb xmm4, xmm15 - vpshufb xmm5, xmm15 - vpshufb xmm6, xmm15 - vpshufb xmm7, xmm15 - - vpxor xmm4, xmm0 - vpxor xmm5, xmm1 - vpxor xmm6, xmm2 - vpxor xmm7, xmm3 - - mov r12, [pOut] - mov r13, [pOut + 8] - mov r14, [pOut + 16] - mov r15, [pOut + 24] - -%if (%%LAST_CALL == 1) - movzx r10d, word [rsp + _rem_bytes_save] - simd_store_avx r12, xmm4, r10, %%TMP1, %%TMP2, %%OFFSET - movzx r10d, word [rsp + _rem_bytes_save + 2] - simd_store_avx r13, xmm5, r10, %%TMP1, %%TMP2, %%OFFSET - movzx r10d, word [rsp + _rem_bytes_save + 4] - simd_store_avx r14, xmm6, r10, %%TMP1, %%TMP2, %%OFFSET - movzx r10d, word [rsp + _rem_bytes_save + 6] - simd_store_avx r15, xmm7, r10, %%TMP1, %%TMP2, %%OFFSET - - ; Restore registers - mov %%TMP1, [rsp + _gpr_save] - mov %%TMP2, [rsp + _gpr_save + 8] -%else - vmovdqu [r12 + %%OFFSET], xmm4 - vmovdqu [r13 + %%OFFSET], xmm5 - vmovdqu [r14 + %%OFFSET], xmm6 - vmovdqu [r15 + %%OFFSET], xmm7 -%endif -%endmacro - -;; -;; void asm_ZucCipher_4_avx(state16_t *pSta, u64 *pIn[4], -;; u64 *pOut[4], u16 *length[4], u64 min_length); -;; -;; WIN64 -;; RCX - pSta -;; RDX - pIn -;; R8 - pOut -;; R9 - lengths -;; rsp + 40 - min_length -;; -;; LIN64 -;; RDI - pSta -;; RSI - pIn -;; RDX - pOut -;; RCX - lengths -;; R8 - min_length -;; -MKGLOBAL(asm_ZucCipher_4_avx,function,internal) -asm_ZucCipher_4_avx: - -%ifdef LINUX - %define pState rdi - %define pIn rsi - %define pOut rdx - %define lengths rcx - %define arg5 r8 - - %define nrounds r8 -%else - %define pState rcx - %define pIn rdx - %define pOut r8 - %define lengths r9 - %define arg5 [rsp + 40] - - %define nrounds rdi -%endif - -%define min_length r10 -%define buf_idx r11 - - mov min_length, arg5 - - or min_length, min_length - jz exit_cipher - - FUNC_SAVE - - ;; Convert all lengths from UINT16_MAX (indicating that lane is not valid) to min length - vmovd xmm0, DWORD(min_length) - vpshufb xmm0, [rel broadcast_word] - vmovq xmm1, [lengths] - vpcmpeqw xmm2, xmm2 ;; Get all ff's in XMM register - vpcmpeqw xmm3, xmm1, xmm2 ;; Mask with FFFF in NULL jobs - - vpand xmm4, xmm3, xmm0 ;; Length of valid job in all NULL jobs - vpxor xmm2, xmm3 ;; Mask with 0000 in NULL jobs - vpand xmm1, xmm2 ;; Zero out lengths of NULL jobs - vpor xmm1, xmm4 ;; XMM1 contain updated lengths - - ; Round up to nearest multiple of 4 bytes - vpaddw xmm0, [rel all_threes] - vpand xmm0, [rel all_fffcs] - - ; Calculate remaining bytes to encrypt after function call - vpsubw xmm2, xmm1, xmm0 - vpxor xmm3, xmm3 - vpcmpgtw xmm4, xmm2, xmm3 ;; Mask with FFFF in lengths > 0 - ; Set to zero the lengths of the lanes which are going to be completed (lengths < 0) - vpand xmm2, xmm4 - vmovq [lengths], xmm2 ; Update in memory the final updated lengths - - ; Calculate number of bytes to encrypt after rounds of 16 bytes (up to 15 bytes), - ; for each lane, and store it in stack to be used in the last round - vpsubw xmm1, xmm2 ; Bytes to encrypt in all lanes - vpand xmm1, [rel all_0fs] ; Number of final bytes (up to 15 bytes) for each lane - vpcmpeqw xmm2, xmm1, xmm3 ;; Mask with FFFF in lengths == 0 - vpand xmm2, [rel all_10s] ;; 16 in positions where lengths was 0 - vpor xmm1, xmm2 ;; Number of final bytes (up to 16 bytes) for each lane - - ; Allocate stack frame to store keystreams (16*4 bytes), number of final bytes (8 bytes), - ; space for rsp (8 bytes) and 2 GP registers (16 bytes) that will be clobbered later - mov rax, rsp - sub rsp, STACK_size - and rsp, -16 - xor buf_idx, buf_idx - vmovq [rsp + _rem_bytes_save], xmm1 - mov [rsp + _rsp_save], rax - - ; Load state pointer in RAX - mov rax, pState - -loop_cipher64: - cmp min_length, 64 - jl exit_loop_cipher64 - -%assign round_off 0 -%rep 4 - CIPHERNx4B_4 4, round_off, buf_idx, 0 - - add buf_idx, 16 - sub min_length, 16 -%assign round_off (round_off + 4) -%endrep - jmp loop_cipher64 -exit_loop_cipher64: - - ; Check if there are more bytes left to encrypt - mov r15, min_length - add r15, 3 - shr r15, 2 ;; number of rounds left (round up length to nearest multiple of 4B) - jz exit_final_rounds - - cmp r15, 8 - je _num_final_rounds_is_8 - jb _final_rounds_is_1_7 - - ; Final blocks 9-16 - cmp r15, 12 - je _num_final_rounds_is_12 - ja _final_rounds_is_13_16 - - ; Final blocks 9-11 - cmp r15, 10 - je _num_final_rounds_is_10 - jb _num_final_rounds_is_9 - ja _num_final_rounds_is_11 - -_final_rounds_is_13_16: - cmp r15, 16 - je _num_final_rounds_is_16 - cmp r15, 14 - je _num_final_rounds_is_14 - jb _num_final_rounds_is_13 - ja _num_final_rounds_is_15 - -_final_rounds_is_1_7: - cmp r15, 4 - je _num_final_rounds_is_4 - jl _final_rounds_is_1_3 - - ; Final blocks 5-7 - cmp r15, 6 - je _num_final_rounds_is_6 - jb _num_final_rounds_is_5 - ja _num_final_rounds_is_7 - -_final_rounds_is_1_3: - cmp r15, 2 - je _num_final_rounds_is_2 - ja _num_final_rounds_is_3 - - ; Perform encryption of last bytes (<= 63 bytes) and reorder LFSR registers -%assign I 1 -%rep 4 -APPEND(_num_final_rounds_is_,I): - CIPHERNx4B_4 I, 0, buf_idx, 1 - REORDER_LFSR rax, I - add buf_idx, (I*4) - jmp exit_final_rounds -%assign I (I + 1) -%endrep - -%assign I 5 -%rep 4 -APPEND(_num_final_rounds_is_,I): - CIPHERNx4B_4 4, 0, buf_idx, 0 - add buf_idx, 16 - CIPHERNx4B_4 (I-4), 4, buf_idx, 1 - add buf_idx, ((I-4)*4) - REORDER_LFSR rax, I - jmp exit_final_rounds -%assign I (I + 1) -%endrep - -%assign I 9 -%rep 4 -APPEND(_num_final_rounds_is_,I): - CIPHERNx4B_4 4, 0, buf_idx, 0 - add buf_idx, 16 - CIPHERNx4B_4 4, 4, buf_idx, 0 - add buf_idx, 16 - CIPHERNx4B_4 (I-8), 8, buf_idx, 1 - add buf_idx, ((I-8)*4) - REORDER_LFSR rax, I - jmp exit_final_rounds -%assign I (I + 1) -%endrep - -%assign I 13 -%rep 4 -APPEND(_num_final_rounds_is_,I): - CIPHERNx4B_4 4, 0, buf_idx, 0 - add buf_idx, 16 - CIPHERNx4B_4 4, 4, buf_idx, 0 - add buf_idx, 16 - CIPHERNx4B_4 4, 8, buf_idx, 0 - add buf_idx, 16 - CIPHERNx4B_4 (I-12), 12, buf_idx, 1 - add buf_idx, ((I-12)*4) - REORDER_LFSR rax, I - jmp exit_final_rounds -%assign I (I + 1) -%endrep - -exit_final_rounds: - ;; update in/out pointers - vmovq xmm0, buf_idx - vpshufd xmm0, xmm0, 0x44 - vpaddq xmm1, xmm0, [pIn] - vpaddq xmm2, xmm0, [pIn + 16] - vmovdqa [pIn], xmm1 - vmovdqa [pIn + 16], xmm2 - vpaddq xmm1, xmm0, [pOut] - vpaddq xmm2, xmm0, [pOut + 16] - vmovdqa [pOut], xmm1 - vmovdqa [pOut + 16], xmm2 - - ;; Clear stack frame containing keystream information -%ifdef SAFE_DATA - vpxor xmm0, xmm0 -%assign i 0 -%rep 4 - vmovdqa [rsp + _keystr_save + i*16], xmm0 -%assign i (i+1) -%endrep -%endif - ; Restore rsp - mov rsp, [rsp + _rsp_save] - - FUNC_RESTORE - -exit_cipher: - - ret - -; -; Processes 16 bytes of data and updates the digest -; -%macro DIGEST_16_BYTES 12 -%define %%KS %1 ; [in] Pointer to 24-byte keystream -%define %%BIT_REV_L %2 ; [in] Bit reverse low table (XMM) -%define %%BIT_REV_H %3 ; [in] Bit reverse high table (XMM) -%define %%BIT_REV_AND %4 ; [in] Bit reverse and table (XMM) -%define %%XDIGEST %5 ; [in/out] Temporary digest (XMM) -%define %%XTMP1 %6 ; [clobbered] Temporary XMM register -%define %%XTMP2 %7 ; [clobbered] Temporary XMM register -%define %%XTMP3 %8 ; [clobbered] Temporary XMM register -%define %%XTMP4 %9 ; [clobbered] Temporary XMM register -%define %%KS_L %10 ; [clobbered] Temporary XMM register -%define %%KS_H %11 ; [clobbered] Temporary XMM register -%define %%OFF %12 ; [in] Offset into KS - - vpand %%XTMP2, %%XTMP1, %%BIT_REV_AND - - vpandn %%XTMP3, %%BIT_REV_AND, %%XTMP1 - vpsrld %%XTMP3, 4 - - vpshufb %%XTMP4, %%BIT_REV_H, %%XTMP2 - vpshufb %%XTMP1, %%BIT_REV_L, %%XTMP3 - vpor %%XTMP4, %%XTMP1 ;; %%XTMP4 - bit reverse data bytes - - ;; ZUC authentication part - ;; - 4x32 data bits - ;; - set up KS - vpshufd %%KS_L, [%%KS + %%OFF + (0*4)], 0x61 - vpshufd %%KS_H, [%%KS + %%OFF + (2*4)], 0x61 - - ;; - set up DATA - ; Data bytes [31:0 0s 63:32 0s] - vpshufb %%XTMP1, %%XTMP4, [rel shuf_mask_dw0_0_dw1_0] - - ; Data bytes [95:64 0s 127:96 0s] - vpshufb %%XTMP3, %%XTMP4, [rel shuf_mask_dw2_0_dw3_0] - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq %%XTMP2, %%XTMP1, %%KS_L, 0x11 - vpclmulqdq %%XTMP1, %%KS_L, 0x00 - vpclmulqdq %%XTMP4, %%XTMP3, %%KS_H, 0x00 - vpclmulqdq %%XTMP3, %%KS_H, 0x11 - - vpxor %%XTMP2, %%XTMP1 - vpxor %%XTMP4, %%XTMP3 - vpxor %%XDIGEST, %%XTMP2 - vpxor %%XDIGEST, %%XTMP4 -%endmacro - -%macro REMAINDER 18 -%define %%T %1 ; [in] Pointer to authentication tag -%define %%KS %2 ; [in/clobbered] Pointer to 32-byte keystream -%define %%DATA %3 ; [in/clobbered] Pointer to input data -%define %%N_BITS %4 ; [in/clobbered] Number of bits to digest -%define %%N_BYTES %5 ; [clobbered] Number of bytes to digest -%define %%TMP %6 ; [clobbered] Temporary GP register -%define %%TMP2 %7 ; [clobbered] Temporary GP register -%define %%TMP3 %8 ; [clobbered] Temporary GP register -%define %%BIT_REV_L %9 ; [in] Bit reverse low table (XMM) -%define %%BIT_REV_H %10 ; [in] Bit reverse high table (XMM) -%define %%BIT_REV_AND %11 ; [in] Bit reverse and table (XMM) -%define %%XDIGEST %12 ; [clobbered] Temporary digest (XMM) -%define %%XTMP1 %13 ; [clobbered] Temporary XMM register -%define %%XTMP2 %14 ; [clobbered] Temporary XMM register -%define %%XTMP3 %15 ; [clobbered] Temporary XMM register -%define %%XTMP4 %16 ; [clobbered] Temporary XMM register -%define %%KS_L %17 ; [clobbered] Temporary XMM register -%define %%KS_H %18 ; [clobbered] Temporary XMM register - - FUNC_SAVE - - vpxor %%XDIGEST, %%XDIGEST - - ; Length between 1 and 255 bits - test %%N_BITS, 128 - jz %%Eia3RoundsAVX_dq_end - - ;; read up to 16 bytes of data and reverse bits - vmovdqu %%XTMP1, [%%DATA] - DIGEST_16_BYTES %%KS, %%BIT_REV_L, %%BIT_REV_H, %%BIT_REV_AND, \ - %%XDIGEST, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, \ - %%KS_L, %%KS_H, 0 - - add %%DATA, 16 - add %%KS, 16 - sub %%N_BITS, 128 -%%Eia3RoundsAVX_dq_end: - - or %%N_BITS, %%N_BITS - jz %%Eia3RoundsAVX_end - - ; Get number of bytes - lea %%N_BYTES, [%%N_BITS + 7] - shr %%N_BYTES, 3 - - ;; read up to 16 bytes of data, zero bits not needed if partial byte and bit-reverse - simd_load_avx_16_1 %%XTMP1, %%DATA, %%N_BYTES - ; check if there is a partial byte (less than 8 bits in last byte) - mov %%TMP, %%N_BITS - and %%TMP, 0x7 - shl %%TMP, 4 - lea %%TMP2, [rel bit_mask_table] - add %%TMP2, %%TMP - - ; Get mask to clear last bits - vmovdqa %%XTMP2, [%%TMP2] - - ; Shift left 16-N bytes to have the last byte always at the end of the XMM register - ; to apply mask, then restore by shifting right same amount of bytes - mov %%TMP2, 16 - sub %%TMP2, %%N_BYTES - XVPSLLB %%XTMP1, %%TMP2, %%XTMP3, %%TMP - vpand %%XTMP1, %%XTMP2 - XVPSRLB %%XTMP1, %%TMP2, %%XTMP3, %%TMP - - DIGEST_16_BYTES %%KS, %%BIT_REV_L, %%BIT_REV_H, %%BIT_REV_AND, \ - %%XDIGEST, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, \ - %%KS_L, %%KS_H, 0 - -%%Eia3RoundsAVX_end: - -%define %%TAG DWORD(%%TMP) - ;; - update T - mov %%TAG, [%%T] - vmovq %%TMP2, %%XDIGEST - shr %%TMP2, 32 - xor %%TAG, DWORD(%%TMP2) - - ;; XOR with keyStr[n_bits] (Z_length, from spec) - - ; Read keyStr[N_BITS / 32] - mov %%TMP2, %%N_BITS - shr %%TMP2, 5 - mov %%TMP3, [%%KS + %%TMP2*4] - - ; Rotate left by N_BITS % 32 - mov %%TMP2, rcx ; Save RCX - mov rcx, %%N_BITS - and rcx, 0x1F - rol %%TMP3, cl - mov rcx, %%TMP2 ; Restore RCX - - ; XOR with previous digest calculation - xor %%TAG, DWORD(%%TMP3) - - ;; XOR with keyStr[L-1] - - ; Read keyStr[L - 1] (last double word of keyStr) - mov %%TMP2, %%N_BITS - add %%TMP2, (31 + 64 - 32) ; (32 is subtracted here to get L - 1) - shr %%TMP2, 5 ; L - 1 - ; XOR with previous digest calculation - xor %%TAG, [%%KS + %%TMP2 * 4] - - bswap %%TAG - mov [%%T], %%TAG - - FUNC_RESTORE - -%endmacro - -;; -;; extern void asm_Eia3Remainder_avx(void *T, const void *ks, const void *data, uint64_t n_bits) -;; -;; Returns authentication update value to be XOR'ed with current authentication tag -;; -;; @param [in] T (digest pointer) -;; @param [in] KS (key stream pointer) -;; @param [in] DATA (data pointer) -;; @param [in] N_BITS (number of bits to digest) -;; -align 64 -MKGLOBAL(asm_Eia3Remainder_avx,function,internal) -asm_Eia3Remainder_avx: - -%define T arg1 -%define KS arg2 -%define DATA arg3 -%define N_BITS arg4 - - vmovdqa xmm0, [rel bit_reverse_table_l] - vmovdqa xmm1, [rel bit_reverse_table_h] - vmovdqa xmm2, [rel bit_reverse_and_table] - - REMAINDER T, KS, DATA, N_BITS, r12, r13, r14, r15, \ - xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ - xmm8, xmm9 - - ret - -%macro EIA3_ROUND 15 -%define %%T %1 ; [in] Pointer to authentication tag -%define %%KS %2 ; [in/clobbered] Pointer to 32-byte keystream -%define %%DATA %3 ; [in/clobbered] Pointer to input data -%define %%TMP %4 ; [clobbered] Temporary GP register -%define %%BIT_REV_L %5 ; [in] Bit reverse low table (XMM) -%define %%BIT_REV_H %6 ; [in] Bit reverse high table (XMM) -%define %%BIT_REV_AND %7 ; [in] Bit reverse and table (XMM) -%define %%XDIGEST %8 ; [clobbered] Temporary digest (XMM) -%define %%XTMP1 %9 ; [clobbered] Temporary XMM register -%define %%XTMP2 %10 ; [clobbered] Temporary XMM register -%define %%XTMP3 %11 ; [clobbered] Temporary XMM register -%define %%XTMP4 %12 ; [clobbered] Temporary XMM register -%define %%KS_L %13 ; [clobbered] Temporary XMM register -%define %%KS_H %14 ; [clobbered] Temporary XMM register -%define %%NUM_16B_ROUNDS %15 ; [in] Number of 16-byte rounds - - vpxor %%XDIGEST, %%XDIGEST - -%assign %%OFF 0 -%rep %%NUM_16B_ROUNDS - vmovdqu %%XTMP1, [%%DATA + %%OFF] - - DIGEST_16_BYTES %%KS, %%BIT_REV_L, %%BIT_REV_H, %%BIT_REV_AND, \ - %%XDIGEST, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, \ - %%KS_L, %%KS_H, %%OFF - -%assign %%OFF (%%OFF + 16) -%endrep - - ;; - update T - vmovq %%TMP, %%XDIGEST - shr %%TMP, 32 - xor [%%T], DWORD(%%TMP) - -%endmacro - -;; -;;extern void asm_Eia3Round32B_avx(void *T, const void *KS, const void *DATA) -;; -;; Updates authentication tag T based on keystream KS and DATA. -;; - it processes 32 bytes of DATA -;; - reads data in 16 byte chunks and bit reverses them -;; - reads and re-arranges KS -;; - employs clmul for the XOR & ROL part -;; - copies top 32 bytes of KS to bottom (for the next round) -;; -;; @param [in] T (digest pointer) -;; @param [in] KS (key stream pointer) -;; @param [in] DATA (data pointer) -;; -align 64 -MKGLOBAL(asm_Eia3Round32B_avx,function,internal) -asm_Eia3Round32B_avx: - -%define T arg1 -%define KS arg2 -%define DATA arg3 - - FUNC_SAVE - - vmovdqa xmm0, [bit_reverse_table_l] - vmovdqa xmm1, [bit_reverse_table_h] - vmovdqa xmm2, [bit_reverse_and_table] - - EIA3_ROUND T, KS, DATA, r11, \ - xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ - xmm8, xmm9, 2 - - ;; Copy last 32 bytes of KS to the front - vmovdqa xmm0, [KS + 32] - vmovdqa xmm1, [KS + 48] - vmovdqa [KS], xmm0 - vmovdqa [KS + 16], xmm1 - - FUNC_RESTORE - - ret - -;; -;;extern void asm_Eia3Round16B_avx(void *T, const void *KS, const void *DATA) -;; -;; Updates authentication tag T based on keystream KS and DATA. -;; - it processes 16 bytes of DATA -;; - reads data in 16 byte chunks and bit reverses them -;; - reads and re-arranges KS -;; - employs clmul for the XOR & ROL part -;; - copies top 16 bytes of KS to bottom (for the next round) -;; -;; @param [in] T (digest pointer) -;; @param [in] KS (key stream pointer) -;; @param [in] DATA (data pointer) -;; -align 64 -MKGLOBAL(asm_Eia3Round16B_avx,function,internal) -asm_Eia3Round16B_avx: - -%define T arg1 -%define KS arg2 -%define DATA arg3 - - FUNC_SAVE - - vmovdqa xmm0, [bit_reverse_table_l] - vmovdqa xmm1, [bit_reverse_table_h] - vmovdqa xmm2, [bit_reverse_and_table] - - EIA3_ROUND T, KS, DATA, r11, \ - xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ - xmm8, xmm9, 1 - - ;; Copy last 16 bytes of KS to the front - vmovdqa xmm0, [KS + 16] - vmovdqa [KS], xmm0 - - FUNC_RESTORE - - ret - -;---------------------------------------------------------------------------------------- -;---------------------------------------------------------------------------------------- - -mksection stack-noexec diff --git a/lib/avx2/zuc_x8_avx2.asm b/lib/avx2/zuc_x8_avx2.asm deleted file mode 100755 index a9598f01d9083857a2d4acc16f39f62308e86c8f..0000000000000000000000000000000000000000 --- a/lib/avx2/zuc_x8_avx2.asm +++ /dev/null @@ -1,1406 +0,0 @@ -;; -;; Copyright (c) 2020-2022, Intel Corporation -;; -;; Redistribution and use in source and binary forms, with or without -;; modification, are permitted provided that the following conditions are met: -;; -;; * Redistributions of source code must retain the above copyright notice, -;; this list of conditions and the following disclaimer. -;; * Redistributions in binary form must reproduce the above copyright -;; notice, this list of conditions and the following disclaimer in the -;; documentation and/or other materials provided with the distribution. -;; * Neither the name of Intel Corporation nor the names of its contributors -;; may be used to endorse or promote products derived from this software -;; without specific prior written permission. -;; -;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -;; - -%include "include/os.asm" -%include "include/reg_sizes.asm" -%include "include/zuc_sbox.inc" -%include "include/transpose_avx2.asm" -%include "include/memcpy.asm" -%include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" -%define APPEND(a,b) a %+ b - -mksection .rodata -default rel - -align 32 -Ek_d: -dd 0x0044D700, 0x0026BC00, 0x00626B00, 0x00135E00, 0x00578900, 0x0035E200, 0x00713500, 0x0009AF00 -dd 0x004D7800, 0x002F1300, 0x006BC400, 0x001AF100, 0x005E2600, 0x003C4D00, 0x00789A00, 0x0047AC00 - -align 16 -EK256_d64: -dd 0x00220000, 0x002F0000, 0x00240000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 16 -EK256_EIA3_4: -dd 0x00220000, 0x002F0000, 0x00250000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 16 -EK256_EIA3_8: -dd 0x00230000, 0x002F0000, 0x00240000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 16 -EK256_EIA3_16: -dd 0x00230000, 0x002F0000, 0x00250000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 32 -shuf_mask_key: -dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, 0x04FFFFFF, 0x05FFFFFF, 0x06FFFFFF, 0x07FFFFFF, -dd 0x08FFFFFF, 0x09FFFFFF, 0x0AFFFFFF, 0x0BFFFFFF, 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, - -align 32 -shuf_mask_iv: -dd 0xFFFFFF00, 0xFFFFFF01, 0xFFFFFF02, 0xFFFFFF03, 0xFFFFFF04, 0xFFFFFF05, 0xFFFFFF06, 0xFFFFFF07, -dd 0xFFFFFF08, 0xFFFFFF09, 0xFFFFFF0A, 0xFFFFFF0B, 0xFFFFFF0C, 0xFFFFFF0D, 0xFFFFFF0E, 0xFFFFFF0F, - -align 16 -shuf_mask_iv_17_19: -db 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF - -align 16 -clear_iv_mask: -db 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00 - -align 16 -shuf_mask_iv_20_23: -db 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF, 0xFF, 0xFF, 0x03, 0xFF - -align 32 -mask31: -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, - -align 32 -swap_mask: -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c - -align 32 -S1_S0_shuf: -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F - -align 32 -S0_S1_shuf: -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, - -align 32 -rev_S1_S0_shuf: -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F - -align 32 -rev_S0_S1_shuf: -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 - -align 32 -rot8_mod32: -db 0x03, 0x00, 0x01, 0x02, 0x07, 0x04, 0x05, 0x06, -db 0x0B, 0x08, 0x09, 0x0A, 0x0F, 0x0C, 0x0D, 0x0E -db 0x03, 0x00, 0x01, 0x02, 0x07, 0x04, 0x05, 0x06, -db 0x0B, 0x08, 0x09, 0x0A, 0x0F, 0x0C, 0x0D, 0x0E - -align 32 -rot16_mod32: -db 0x02, 0x03, 0x00, 0x01, 0x06, 0x07, 0x04, 0x05, -db 0x0A, 0x0B, 0x08, 0x09, 0x0E, 0x0F, 0x0C, 0x0D -db 0x02, 0x03, 0x00, 0x01, 0x06, 0x07, 0x04, 0x05, -db 0x0A, 0x0B, 0x08, 0x09, 0x0E, 0x0F, 0x0C, 0x0D - -align 32 -rot24_mod32: -db 0x01, 0x02, 0x03, 0x00, 0x05, 0x06, 0x07, 0x04, -db 0x09, 0x0A, 0x0B, 0x08, 0x0D, 0x0E, 0x0F, 0x0C -db 0x01, 0x02, 0x03, 0x00, 0x05, 0x06, 0x07, 0x04, -db 0x09, 0x0A, 0x0B, 0x08, 0x0D, 0x0E, 0x0F, 0x0C - -align 16 -broadcast_word: -db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 -db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 - -align 16 -all_ffs: -dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff - -align 16 -all_threes: -dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 - -align 16 -all_fffcs: -dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc - -align 16 -all_1fs: -dw 0x001f, 0x001f, 0x001f, 0x001f, 0x001f, 0x001f, 0x001f, 0x001f - -align 16 -all_20s: -dw 0x0020, 0x0020, 0x0020, 0x0020, 0x0020, 0x0020, 0x0020, 0x0020 - -mksection .text -align 64 - -%define MASK31 ymm12 - -%define OFS_R1 (16*(2*16)) -%define OFS_R2 (OFS_R1 + (2*16)) -%define OFS_X0 (OFS_R2 + (2*16)) -%define OFS_X1 (OFS_X0 + (2*16)) -%define OFS_X2 (OFS_X1 + (2*16)) - -%ifidn __OUTPUT_FORMAT__, win64 - %define XMM_STORAGE 16*10 - %define GP_STORAGE 8*8 -%else - %define XMM_STORAGE 0 - %define GP_STORAGE 6*8 -%endif - -%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE -%define GP_OFFSET XMM_STORAGE - -%macro FUNC_SAVE 0 - mov r11, rsp - sub rsp, VARIABLE_OFFSET - and rsp, ~15 - -%ifidn __OUTPUT_FORMAT__, win64 - ; xmm6:xmm15 need to be maintained for Windows - vmovdqa [rsp + 0*16], xmm6 - vmovdqa [rsp + 1*16], xmm7 - vmovdqa [rsp + 2*16], xmm8 - vmovdqa [rsp + 3*16], xmm9 - vmovdqa [rsp + 4*16], xmm10 - vmovdqa [rsp + 5*16], xmm11 - vmovdqa [rsp + 6*16], xmm12 - vmovdqa [rsp + 7*16], xmm13 - vmovdqa [rsp + 8*16], xmm14 - vmovdqa [rsp + 9*16], xmm15 - mov [rsp + GP_OFFSET + 48], rdi - mov [rsp + GP_OFFSET + 56], rsi -%endif - mov [rsp + GP_OFFSET], r12 - mov [rsp + GP_OFFSET + 8], r13 - mov [rsp + GP_OFFSET + 16], r14 - mov [rsp + GP_OFFSET + 24], r15 - mov [rsp + GP_OFFSET + 32], rbx - mov [rsp + GP_OFFSET + 40], r11 ;; rsp pointer -%endmacro - -%macro FUNC_RESTORE 0 - -%ifidn __OUTPUT_FORMAT__, win64 - vmovdqa xmm6, [rsp + 0*16] - vmovdqa xmm7, [rsp + 1*16] - vmovdqa xmm8, [rsp + 2*16] - vmovdqa xmm9, [rsp + 3*16] - vmovdqa xmm10, [rsp + 4*16] - vmovdqa xmm11, [rsp + 5*16] - vmovdqa xmm12, [rsp + 6*16] - vmovdqa xmm13, [rsp + 7*16] - vmovdqa xmm14, [rsp + 8*16] - vmovdqa xmm15, [rsp + 9*16] - mov rdi, [rsp + GP_OFFSET + 48] - mov rsi, [rsp + GP_OFFSET + 56] -%endif - mov r12, [rsp + GP_OFFSET] - mov r13, [rsp + GP_OFFSET + 8] - mov r14, [rsp + GP_OFFSET + 16] - mov r15, [rsp + GP_OFFSET + 24] - mov rbx, [rsp + GP_OFFSET + 32] - mov rsp, [rsp + GP_OFFSET + 40] -%endmacro - -; This macro reorder the LFSR registers -; after N rounds (1 <= N <= 15), since the registers -; are shifted every round -; -; The macro clobbers YMM0-15 -; -%macro REORDER_LFSR 2 -%define %%STATE %1 -%define %%NUM_ROUNDS %2 - -%if %%NUM_ROUNDS != 16 -%assign i 0 -%rep 16 - vmovdqa APPEND(ymm,i), [%%STATE + 32*i] -%assign i (i+1) -%endrep - -%assign i 0 -%assign j %%NUM_ROUNDS -%rep 16 - vmovdqa [%%STATE + 32*i], APPEND(ymm,j) -%assign i (i+1) -%assign j ((j+1) % 16) -%endrep -%endif ;; %%NUM_ROUNDS != 16 - -%endmacro - -;; -;; make_u31() -;; -%macro make_u31 4 - -%define %%Rt %1 -%define %%Ke %2 -%define %%Ek %3 -%define %%Iv %4 - xor %%Rt, %%Rt - shrd %%Rt, %%Iv, 8 - shrd %%Rt, %%Ek, 15 - shrd %%Rt, %%Ke, 9 -%endmacro - -; -; bits_reorg8() -; -%macro bits_reorg8 2-3 -%define %%STATE %1 ; [in] ZUC state -%define %%ROUND_NUM %2 ; [in] Round number -%define %%X3 %3 ; [out] YMM register containing X3 of all lanes - ; - ; ymm15 = LFSR_S15 - ; ymm14 = LFSR_S14 - ; ymm11 = LFSR_S11 - ; ymm9 = LFSR_S9 - ; ymm7 = LFSR_S7 - ; ymm5 = LFSR_S5 - ; ymm2 = LFSR_S2 - ; ymm0 = LFSR_S0 - ; - vmovdqa ymm15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm14, [%%STATE + ((14 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm11, [%%STATE + ((11 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm9, [%%STATE + (( 9 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm7, [%%STATE + (( 7 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm5, [%%STATE + (( 5 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm2, [%%STATE + (( 2 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*32] - - vpxor ymm1, ymm1 - vpslld ymm15, 1 - vpblendw ymm3, ymm14, ymm1, 0xAA - vpblendw ymm15, ymm3, ymm15, 0xAA - - vmovdqa [%%STATE + OFS_X0], ymm15 ; BRC_X0 - vpslld ymm11, 16 - vpsrld ymm9, 15 - vpor ymm11, ymm9 - vmovdqa [%%STATE + OFS_X1], ymm11 ; BRC_X1 - vpslld ymm7, 16 - vpsrld ymm5, 15 - vpor ymm7, ymm5 - vmovdqa [%%STATE + OFS_X2], ymm7 ; BRC_X2 -%if (%0 == 3) - vpslld ymm2, 16 - vpsrld ymm0, 15 - vpor %%X3, ymm2, ymm0 ; Store BRC_X3 in YMM register -%endif -%endmacro - -; -; rot_mod32() -; -; uses ymm7 -; -%macro rot_mod32 3 -%if (%3 == 8) - vpshufb %1, %2, [rel rot8_mod32] -%elif (%3 == 16) - vpshufb %1, %2, [rel rot16_mod32] -%elif (%3 == 24) - vpshufb %1, %2, [rel rot24_mod32] -%else - vpslld %1, %2, %3 - vpsrld ymm7, %2, (32 - %3) - - vpor %1, ymm7 -%endif -%endmacro - -; -; nonlin_fun8() -; -; return -; W value, updates F_R1[] / F_R2[] -; -%macro nonlin_fun8 1-2 -%define %%STATE %1 ; [in] ZUC state -%define %%W %2 ; [out] YMM register to contain W for all lanes - -%if (%0 == 2) - vmovdqa %%W, [%%STATE + OFS_X0] - vpxor %%W, [%%STATE + OFS_R1] - vpaddd %%W, [%%STATE + OFS_R2] ; W = (BRC_X0 ^ F_R1) + F_R2 -%endif - - vmovdqa ymm1, [%%STATE + OFS_R1] - vmovdqa ymm2, [%%STATE + OFS_R2] - vpaddd ymm1, [%%STATE + OFS_X1] ; W1 = F_R1 + BRC_X1 - vpxor ymm2, [%%STATE + OFS_X2] ; W2 = F_R2 ^ BRC_X2 - - vpslld ymm3, ymm1, 16 - vpsrld ymm4, ymm1, 16 - vpslld ymm5, ymm2, 16 - vpsrld ymm6, ymm2, 16 - vpor ymm1, ymm3, ymm6 - vpor ymm2, ymm4, ymm5 - - rot_mod32 ymm3, ymm1, 2 - rot_mod32 ymm4, ymm1, 10 - rot_mod32 ymm5, ymm1, 18 - rot_mod32 ymm6, ymm1, 24 - vpxor ymm1, ymm3 - vpxor ymm1, ymm4 - vpxor ymm1, ymm5 - vpxor ymm1, ymm6 ; XMM1 = U = L1(P) - - rot_mod32 ymm3, ymm2, 8 - rot_mod32 ymm4, ymm2, 14 - rot_mod32 ymm5, ymm2, 22 - rot_mod32 ymm6, ymm2, 30 - vpxor ymm2, ymm3 - vpxor ymm2, ymm4 - vpxor ymm2, ymm5 - vpxor ymm2, ymm6 ; XMM2 = V = L2(Q) - - ; Shuffle U and V to have all S0 lookups in XMM1 and all S1 lookups in XMM2 - - ; Compress all S0 and S1 input values in each register - vpshufb ymm1, [rel S0_S1_shuf] ; S0: Bytes 0-7,16-23 S1: Bytes 8-15,24-31 - vpshufb ymm2, [rel S1_S0_shuf] ; S1: Bytes 0-7,16-23 S0: Bytes 8-15,24-31 - - vshufpd ymm3, ymm1, ymm2, 0xA ; All S0 input values - vshufpd ymm4, ymm2, ymm1, 0xA ; All S1 input values - - ; Compute S0 and S1 values - S0_comput_AVX2 ymm3, ymm1, ymm2 - S1_comput_AVX2 ymm4, ymm1, ymm2, ymm5 - - ; Need to shuffle back ymm1 & ymm2 before storing output - ; (revert what was done before S0 and S1 computations) - vshufpd ymm1, ymm3, ymm4, 0xA - vshufpd ymm2, ymm4, ymm3, 0xA - - vpshufb ymm1, [rel rev_S0_S1_shuf] - vpshufb ymm2, [rel rev_S1_S0_shuf] - - vmovdqa [%%STATE + OFS_R1], ymm1 - vmovdqa [%%STATE + OFS_R2], ymm2 -%endmacro - -; -; store32B_kstr8() -; -%macro store32B_kstr8 8 -%define %%DATA32B_L0 %1 ; [in] 32 bytes of keystream for lane 0 -%define %%DATA32B_L1 %2 ; [in] 32 bytes of keystream for lane 1 -%define %%DATA32B_L2 %3 ; [in] 32 bytes of keystream for lane 2 -%define %%DATA32B_L3 %4 ; [in] 32 bytes of keystream for lane 3 -%define %%DATA32B_L4 %5 ; [in] 32 bytes of keystream for lane 4 -%define %%DATA32B_L5 %6 ; [in] 32 bytes of keystream for lane 5 -%define %%DATA32B_L6 %7 ; [in] 32 bytes of keystream for lane 6 -%define %%DATA32B_L7 %8 ; [in] 32 bytes of keystream for lane 7 - - mov rcx, [rsp] - mov rdx, [rsp + 8] - mov r8, [rsp + 16] - mov r9, [rsp + 24] - vmovdqu [rcx], %%DATA32B_L0 - vmovdqu [rdx], %%DATA32B_L1 - vmovdqu [r8], %%DATA32B_L2 - vmovdqu [r9], %%DATA32B_L3 - - mov rcx, [rsp + 32] - mov rdx, [rsp + 40] - mov r8, [rsp + 48] - mov r9, [rsp + 56] - vmovdqu [rcx], %%DATA32B_L4 - vmovdqu [rdx], %%DATA32B_L5 - vmovdqu [r8], %%DATA32B_L6 - vmovdqu [r9], %%DATA32B_L7 - -%endmacro - -; -; store4B_kstr8() -; -; params -; -; %1 - YMM register with OFS_X3 -; return -; -%macro store4B_kstr8 1 - mov rcx, [rsp] - mov rdx, [rsp + 8] - mov r8, [rsp + 16] - mov r9, [rsp + 24] - vpextrd [r9], XWORD(%1), 3 - vpextrd [r8], XWORD(%1), 2 - vpextrd [rdx], XWORD(%1), 1 - vmovd [rcx], XWORD(%1) - add rcx, 4 - add rdx, 4 - add r8, 4 - add r9, 4 - mov [rsp], rcx - mov [rsp + 8], rdx - mov [rsp + 16], r8 - mov [rsp + 24], r9 - - vextracti128 XWORD(%1), %1, 1 - mov rcx, [rsp + 32] - mov rdx, [rsp + 40] - mov r8, [rsp + 48] - mov r9, [rsp + 56] - vpextrd [r9], XWORD(%1), 3 - vpextrd [r8], XWORD(%1), 2 - vpextrd [rdx], XWORD(%1), 1 - vmovd [rcx], XWORD(%1) - add rcx, 4 - add rdx, 4 - add r8, 4 - add r9, 4 - mov [rsp + 32], rcx - mov [rsp + 40], rdx - mov [rsp + 48], r8 - mov [rsp + 56], r9 - -%endmacro - -; -; add_mod31() -; add two 32-bit args and reduce mod (2^31-1) -; params -; %1 - arg1/res -; %2 - arg2 -; uses -; ymm2 -; return -; %1 -%macro add_mod31 2 - vpaddd %1, %2 - vpsrld ymm2, %1, 31 - vpand %1, MASK31 - vpaddd %1, ymm2 -%endmacro - -; -; rot_mod31() -; rotate (mult by pow of 2) 32-bit arg and reduce mod (2^31-1) -; params -; %1 - arg -; %2 - # of bits -; uses -; ymm2 -; return -; %1 -%macro rot_mod31 2 - - vpslld ymm2, %1, %2 - vpsrld %1, %1, (31 - %2) - - vpor %1, ymm2 - vpand %1, MASK31 -%endmacro - -; -; lfsr_updt8() -; -; -%macro lfsr_updt8 3 -%define %%STATE %1 ; [in] ZUC state -%define %%ROUND_NUM %2 ; [in] Round number -%define %%W %3 ; [in/clobbered] YMM register to contain W for all lanes - ; - ; ymm1 = LFSR_S0 - ; ymm4 = LFSR_S4 - ; ymm10 = LFSR_S10 - ; ymm13 = LFSR_S13 - ; ymm15 = LFSR_S15 - ; - vmovdqa ymm1, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm4, [%%STATE + (( 4 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm10, [%%STATE + ((10 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm13, [%%STATE + ((13 + %%ROUND_NUM) % 16)*32] - vmovdqa ymm15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*32] - - ; Calculate LFSR feedback - add_mod31 %%W, ymm1 - rot_mod31 ymm1, 8 - add_mod31 %%W, ymm1 - rot_mod31 ymm4, 20 - add_mod31 %%W, ymm4 - rot_mod31 ymm10, 21 - add_mod31 %%W, ymm10 - rot_mod31 ymm13, 17 - add_mod31 %%W, ymm13 - rot_mod31 ymm15, 15 - add_mod31 %%W, ymm15 - - vmovdqa [%%STATE + (( 0 + %%ROUND_NUM) % 16)*32], %%W - - ; LFSR_S16 = (LFSR_S15++) = eax -%endmacro - -; -; Initialize LFSR registers for a single lane, for ZUC-128 -; -; This macro initializes 8 LFSR registers at time. -; so it needs to be called twice. -; -; From spec, s_i (LFSR) registers need to be loaded as follows: -; -; For 0 <= i <= 15, let s_i= k_i || d_i || iv_i. -; Where k_i is each byte of the key, d_i is a 15-bit constant -; and iv_i is each byte of the IV. -; -%macro INIT_LFSR_128 7 -%define %%KEY %1 ;; [in] Key pointer -%define %%IV %2 ;; [in] IV pointer -%define %%SHUF_KEY %3 ;; [in] Shuffle key mask -%define %%SHUF_IV %4 ;; [in] Shuffle key mask -%define %%EKD_MASK %5 ;; [in] Shuffle key mask -%define %%LFSR %6 ;; [out] YMM register to contain initialized LFSR regs -%define %%YTMP %7 ;; [clobbered] YMM temporary register - - vbroadcastf128 %%LFSR, [%%KEY] - vbroadcastf128 %%YTMP, [%%IV] - vpshufb %%LFSR, %%SHUF_KEY - vpsrld %%LFSR, 1 - vpshufb %%YTMP, %%SHUF_IV - vpor %%LFSR, %%YTMP - vpor %%LFSR, %%EKD_MASK - -%endmacro - -; -; Initialize LFSR registers for a single lane, for ZUC-256 -; -%macro INIT_LFSR_256 8 -%define %%KEY %1 ;; [in] Key pointer -%define %%IV %2 ;; [in] IV pointer -%define %%LFSR0_7 %3 ;; [out] YMM register to contain initialized LFSR regs 0-7 -%define %%LFSR8_15 %4 ;; [out] YMM register to contain initialized LFSR regs 8-15 -%define %%XTMP %5 ;; [clobbered] XMM temporary register -%define %%XTMP2 %6 ;; [clobbered] XMM temporary register -%define %%TMP %7 ;; [clobbered] GP temporary register -%define %%CONSTANTS %8 ;; [in] Address to constants - - ; s0 - s7 - vpxor %%LFSR0_7, %%LFSR0_7 - vpinsrb XWORD(%%LFSR0_7), [%%KEY], 3 ; s0 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 1], 7 ; s1 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 2], 11 ; s2 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 3], 15 ; s3 - - vpsrld XWORD(%%LFSR0_7), 1 - - vpor XWORD(%%LFSR0_7), [%%CONSTANTS] ; s0 - s3 - - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 21], 1 ; s0 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 16], 0 ; s0 - - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 22], 5 ; s1 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 17], 4 ; s1 - - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 23], 9 ; s2 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 18], 8 ; s2 - - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 24], 13 ; s3 - vpinsrb XWORD(%%LFSR0_7), [%%KEY + 19], 12 ; s3 - - vpxor %%XTMP, %%XTMP - vpinsrb %%XTMP, [%%KEY + 4], 3 ; s4 - vpinsrb %%XTMP, [%%IV], 7 ; s5 - vpinsrb %%XTMP, [%%IV + 1], 11 ; s6 - vpinsrb %%XTMP, [%%IV + 10], 15 ; s7 - - vpsrld %%XTMP, 1 - - vpinsrb %%XTMP, [%%KEY + 25], 1 ; s4 - vpinsrb %%XTMP, [%%KEY + 20], 0 ; s4 - - vpinsrb %%XTMP, [%%KEY + 5], 5 ; s5 - vpinsrb %%XTMP, [%%KEY + 26], 4 ; s5 - - vpinsrb %%XTMP, [%%KEY + 6], 9 ; s6 - vpinsrb %%XTMP, [%%KEY + 27], 8 ; s6 - - vpinsrb %%XTMP, [%%KEY + 7], 13 ; s7 - vpinsrb %%XTMP, [%%IV + 2], 12 ; s7 - - vpor %%XTMP, [%%CONSTANTS + 16] ; s4 - s7 - - vmovd %%XTMP2, [%%IV + 17] - vpshufb %%XTMP2, [rel shuf_mask_iv_17_19] - vpand %%XTMP2, [rel clear_iv_mask] - - vpor %%XTMP, %%XTMP2 - - vinserti128 %%LFSR0_7, %%XTMP, 1 - - ; s8 - s15 - vpxor %%LFSR8_15, %%LFSR8_15 - vpinsrb XWORD(%%LFSR8_15), [%%KEY + 8], 3 ; s8 - vpinsrb XWORD(%%LFSR8_15), [%%KEY + 9], 7 ; s9 - vpinsrb XWORD(%%LFSR8_15), [%%IV + 5], 11 ; s10 - vpinsrb XWORD(%%LFSR8_15), [%%KEY + 11], 15 ; s11 - - vpsrld XWORD(%%LFSR8_15), 1 - - vpinsrb XWORD(%%LFSR8_15), [%%IV + 3], 1 ; s8 - vpinsrb XWORD(%%LFSR8_15), [%%IV + 11], 0 ; s8 - - vpinsrb XWORD(%%LFSR8_15), [%%IV + 12], 5 ; s9 - vpinsrb XWORD(%%LFSR8_15), [%%IV + 4], 4 ; s9 - - vpinsrb XWORD(%%LFSR8_15), [%%KEY + 10], 9 ; s10 - vpinsrb XWORD(%%LFSR8_15), [%%KEY + 28], 8 ; s10 - - vpinsrb XWORD(%%LFSR8_15), [%%IV + 6], 13 ; s11 - vpinsrb XWORD(%%LFSR8_15), [%%IV + 13], 12 ; s11 - - vpor XWORD(%%LFSR8_15), [%%CONSTANTS + 32] ; s8 - s11 - - vmovd %%XTMP, [%%IV + 20] - vpshufb %%XTMP, [rel shuf_mask_iv_20_23] - vpand %%XTMP, [rel clear_iv_mask] - - vpor XWORD(%%LFSR8_15), %%XTMP - - vpxor %%XTMP, %%XTMP - vpinsrb %%XTMP, [%%KEY + 12], 3 ; s12 - vpinsrb %%XTMP, [%%KEY + 13], 7 ; s13 - vpinsrb %%XTMP, [%%KEY + 14], 11 ; s14 - vpinsrb %%XTMP, [%%KEY + 15], 15 ; s15 - - vpsrld %%XTMP, 1 - - vpinsrb %%XTMP, [%%IV + 7], 1 ; s12 - vpinsrb %%XTMP, [%%IV + 14], 0 ; s12 - - vpinsrb %%XTMP, [%%IV + 15], 5 ; s13 - vpinsrb %%XTMP, [%%IV + 8], 4 ; s13 - - vpinsrb %%XTMP, [%%IV + 16], 9 ; s14 - vpinsrb %%XTMP, [%%IV + 9], 8 ; s14 - - vpinsrb %%XTMP, [%%KEY + 30], 13 ; s15 - vpinsrb %%XTMP, [%%KEY + 29], 12 ; s15 - - vpor %%XTMP, [%%CONSTANTS + 48] ; s12 - s15 - - movzx DWORD(%%TMP), byte [%%IV + 24] - and DWORD(%%TMP), 0x0000003f - shl DWORD(%%TMP), 16 - vmovd %%XTMP2, DWORD(%%TMP) - - movzx DWORD(%%TMP), byte [%%KEY + 31] - shl DWORD(%%TMP), 12 - and DWORD(%%TMP), 0x000f0000 ; high nibble of K_31 - vpinsrd %%XTMP2, DWORD(%%TMP), 2 - - movzx DWORD(%%TMP), byte [%%KEY + 31] - shl DWORD(%%TMP), 16 - and DWORD(%%TMP), 0x000f0000 ; low nibble of K_31 - vpinsrd %%XTMP2, DWORD(%%TMP), 3 - - vpor %%XTMP, %%XTMP2 - vinserti128 %%LFSR8_15, %%XTMP, 1 -%endmacro - -%macro ZUC_INIT_8 1 -%define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) - -%ifdef LINUX - %define pKe rdi - %define pIv rsi - %define pState rdx - %define tag_sz rcx ; Only used in ZUC-256 -%else - %define pKe rcx - %define pIv rdx - %define pState r8 - %define tag_sz r9 ; Only used in ZUC-256 -%endif - - FUNC_SAVE - - ; Zero out R1/R2 (only lower half is used) - vpxor ymm0, ymm0 -%assign I 0 -%rep 2 - vmovdqa [pState + OFS_R1 + I*32], ymm0 -%assign I (I + 1) -%endrep - - ;;; Initialize all LFSR registers in two steps: - ;;; first, registers 0-7, then registers 8-15 - -%if %%KEY_SIZE == 128 -%assign off 0 -%rep 2 - ; Set read-only registers for shuffle masks for key, IV and Ek_d for 8 registers - vmovdqa ymm13, [rel shuf_mask_key + off] - vmovdqa ymm14, [rel shuf_mask_iv + off] - vmovdqa ymm15, [rel Ek_d + off] - - ; Set 8xLFSR registers for all packets -%assign idx 0 -%rep 8 - mov r9, [pKe + 8*idx] ; Load Key N pointer - lea r10, [pIv + 32*idx] ; Load IV N pointer - INIT_LFSR_128 r9, r10, ymm13, ymm14, ymm15, APPEND(ymm, idx), ymm12 -%assign idx (idx + 1) -%endrep - - ; Store 8xLFSR registers in memory (reordering first, - ; so all SX registers are together) - TRANSPOSE8_U32 ymm0, ymm1, ymm2, ymm3, ymm4, ymm5, ymm6, ymm7, ymm8, ymm9 - -%assign i 0 -%rep 8 - vmovdqa [pState + 8*off + 32*i], APPEND(ymm, i) -%assign i (i+1) -%endrep - -%assign off (off + 32) -%endrep -%else ;; %%KEY_SIZE == 256 - - ; Get pointer to constants (depending on tag size, this will point at - ; constants for encryption, authentication with 4-byte, 8-byte or 16-byte tags) - lea r13, [rel EK256_d64] - bsf DWORD(tag_sz), DWORD(tag_sz) - dec DWORD(tag_sz) - shl DWORD(tag_sz), 6 - add r13, tag_sz - - ;;; Initialize all LFSR registers -%assign off 0 -%rep 8 - ;; Load key and IV for each packet - mov r12, [pKe + off] - lea r10, [pIv + 4*off] ; Load IV N pointer - - ; Initialize S0-15 for each packet - INIT_LFSR_256 r12, r10, ymm0, ymm1, xmm2, xmm3, r11, r13 - -%assign i 0 -%rep 2 - vmovdqa [pState + 256*i + 4*off], APPEND(ymm, i) -%assign i (i+1) -%endrep - -%assign off (off + 8) -%endrep - - ; Read, transpose and store, so all S_X from the 8 packets are in the same register -%assign off 0 -%rep 2 - -%assign i 0 -%rep 8 - vmovdqa APPEND(ymm, i), [pState + 32*i + off] -%assign i (i+1) -%endrep - - TRANSPOSE8_U32 ymm0, ymm1, ymm2, ymm3, ymm4, ymm5, ymm6, ymm7, ymm8, ymm9 - -%assign i 0 -%rep 8 - vmovdqa [pState + 32*i + off], APPEND(ymm, i) -%assign i (i+1) -%endrep - -%assign off (off + 256) -%endrep -%endif ;; %%KEY_SIZE == 256 - - ; Load read-only registers - vmovdqa ymm12, [rel mask31] - - mov rax, pState - - ; Shift LFSR 32-times, update state variables -%assign N 0 -%rep 32 - bits_reorg8 rax, N - nonlin_fun8 rax, ymm0 - vpsrld ymm0,1 ; Shift out LSB of W - lfsr_updt8 rax, N, ymm0 ; W (ymm0) used in LFSR update - not set to zero -%assign N N+1 -%endrep - - ; And once more, initial round from keygen phase = 33 times - bits_reorg8 rax, 0 - nonlin_fun8 rax - - vpxor ymm0, ymm0 - lfsr_updt8 rax, 0, ymm0 - - FUNC_RESTORE - - ret -%endmacro - -MKGLOBAL(asm_ZucInitialization_8_avx2,function,internal) -asm_ZucInitialization_8_avx2: - endbranch64 - ZUC_INIT_8 128 - -MKGLOBAL(asm_Zuc256Initialization_8_avx2,function,internal) -asm_Zuc256Initialization_8_avx2: - endbranch64 - ZUC_INIT_8 256 - -; -; Generate N*4 bytes of keystream -; for 8 buffers (where N is number of rounds) -; -%macro KEYGEN_8_AVX2 1 -%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds - -%ifdef LINUX - %define pState rdi - %define pKS rsi -%else - %define pState rcx - %define pKS rdx -%endif - - FUNC_SAVE - - ; Store 8 keystream pointers on the stack - ; and reserve memory for storing keystreams for all 8 buffers - mov r10, rsp - sub rsp, (8*8 + %%NUM_ROUNDS * 32) - and rsp, -32 - -%assign i 0 -%rep 2 - vmovdqa ymm0, [pKS + 32*i] - vmovdqa [rsp + 32*i], ymm0 -%assign i (i+1) -%endrep - - ; Load state pointer in RAX - mov rax, pState - - ; Load read-only registers - vmovdqa ymm12, [rel mask31] - - ; Generate N*4B of keystream in N rounds -%assign N 1 -%rep %%NUM_ROUNDS - bits_reorg8 rax, N, ymm10 - nonlin_fun8 rax, ymm0 - ; OFS_X3 XOR W (ymm0) and store in stack - vpxor ymm10, ymm0 - vmovdqa [rsp + 64 + (N-1)*32], ymm10 - vpxor ymm0, ymm0 - lfsr_updt8 rax, N, ymm0 -%assign N N+1 -%endrep - -%if (%%NUM_ROUNDS == 8) - ;; Load all OFS_X3 - vmovdqa xmm0,[rsp + 64] - vmovdqa xmm1,[rsp + 64 + 32*1] - vmovdqa xmm2,[rsp + 64 + 32*2] - vmovdqa xmm3,[rsp + 64 + 32*3] - vmovdqa xmm4,[rsp + 64 + 16] - vmovdqa xmm5,[rsp + 64 + 32*1 + 16] - vmovdqa xmm6,[rsp + 64 + 32*2 + 16] - vmovdqa xmm7,[rsp + 64 + 32*3 + 16] - - vinserti128 ymm0, ymm0, [rsp + 64 + 32*4], 0x01 - vinserti128 ymm1, ymm1, [rsp + 64 + 32*5], 0x01 - vinserti128 ymm2, ymm2, [rsp + 64 + 32*6], 0x01 - vinserti128 ymm3, ymm3, [rsp + 64 + 32*7], 0x01 - vinserti128 ymm4, ymm4, [rsp + 64 + 32*4 + 16], 0x01 - vinserti128 ymm5, ymm5, [rsp + 64 + 32*5 + 16], 0x01 - vinserti128 ymm6, ymm6, [rsp + 64 + 32*6 + 16], 0x01 - vinserti128 ymm7, ymm7, [rsp + 64 + 32*7 + 16], 0x01 - - TRANSPOSE8_U32_PRELOADED ymm0, ymm1, ymm2, ymm3, ymm4, ymm5, ymm6, ymm7, ymm8, ymm9 - - store32B_kstr8 ymm0, ymm1, ymm2, ymm3, ymm4, ymm5, ymm6, ymm7 - -%else ;; NUM_ROUNDS == 8 -%assign idx 0 -%rep %%NUM_ROUNDS - vmovdqa APPEND(ymm, idx), [rsp + 64 + idx*32] - store4B_kstr8 APPEND(ymm, idx) -%assign idx (idx + 1) -%endrep -%endif ;; NUM_ROUNDS == 8 - - ;; Reorder LFSR registers, as not all 16 rounds have been completed - REORDER_LFSR rax, %%NUM_ROUNDS - - ;; Clear stack frame containing keystream information -%ifdef SAFE_DATA - vpxor ymm0, ymm0 -%assign i 0 -%rep (2+%%NUM_ROUNDS) - vmovdqa [rsp + i*32], ymm0 -%assign i (i+1) -%endrep -%endif - - ;; Restore rsp pointer - mov rsp, r10 - - FUNC_RESTORE - -%endmacro - -;; -;; void asm_ZucGenKeystream32B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) -;; -;; WIN64 -;; RCX - pSta -;; RDX - pKeyStr -;; -;; LIN64 -;; RDI - pSta -;; RSI - pKeyStr -;; -MKGLOBAL(asm_ZucGenKeystream32B_8_avx2,function,internal) -asm_ZucGenKeystream32B_8_avx2: - endbranch64 - KEYGEN_8_AVX2 8 - vzeroupper - ret - -;; -;; void asm_ZucGenKeystream8B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) -;; -;; WIN64 -;; RCX - pSta -;; RDX - pKeyStr -;; -;; LIN64 -;; RDI - pSta -;; RSI - pKeyStr -;; -MKGLOBAL(asm_ZucGenKeystream8B_8_avx2,function,internal) -asm_ZucGenKeystream8B_8_avx2: - endbranch64 - KEYGEN_8_AVX2 2 - vzeroupper - ret - -;; -;; void asm_ZucGenKeystream4B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) -;; -;; WIN64 -;; RCX - pSta -;; RDX - pKeyStr -;; -;; LIN64 -;; RDI - pSta -;; RSI - pKeyStr -;; -MKGLOBAL(asm_ZucGenKeystream4B_8_avx2,function,internal) -asm_ZucGenKeystream4B_8_avx2: - endbranch64 - KEYGEN_8_AVX2 1 - vzeroupper - ret - -;; -;; Encrypt N*4B bytes on all 8 buffers -;; where N is number of rounds (up to 8) -;; In final call, an array of final bytes is read -;; from memory and only these final bytes are of -;; plaintext are read and XOR'ed. -%macro CIPHERNx4B_8 4 -%define %%NROUNDS %1 -%define %%INITIAL_ROUND %2 -%define %%OFFSET %3 -%define %%LAST_CALL %4 - -%ifdef LINUX -%define %%TMP1 r8 -%define %%TMP2 r9 -%else -%define %%TMP1 rdi -%define %%TMP2 rsi -%endif - ; Load read-only registers - vmovdqa ymm12, [rel mask31] - - ; Generate N*4B of keystream in N rounds -%assign N 1 -%assign round (%%INITIAL_ROUND + N) -%rep %%NROUNDS - bits_reorg8 rax, round, ymm10 - nonlin_fun8 rax, ymm0 - ; OFS_XR XOR W (ymm0) - vpxor ymm10, ymm0 - vmovdqa [rsp + (N-1)*32], ymm10 - vpxor ymm0, ymm0 - lfsr_updt8 rax, round, ymm0 -%assign N N+1 -%assign round (round + 1) -%endrep - -%assign N 0 -%assign idx 8 -%rep %%NROUNDS - vmovdqa APPEND(ymm, idx), [rsp + N*32] -%assign N N+1 -%assign idx (idx+1) -%endrep - - TRANSPOSE8_U32 ymm8, ymm9, ymm10, ymm11, ymm12, ymm13, ymm14, \ - ymm15, ymm0, ymm1 - ;; XOR Input buffer with keystream in rounds of 32B - - mov r12, [pIn] - mov r13, [pIn + 8] - mov r14, [pIn + 16] - mov r15, [pIn + 24] -%if (%%LAST_CALL == 1) - ;; Save GP registers - mov [rsp + 32*8 + 16 + 8], %%TMP1 - mov [rsp + 32*8 + 16 + 16], %%TMP2 - - ;; Read in r10 the word containing the number of final bytes to read for each lane - movzx r10d, word [rsp + 8*32] - simd_load_avx2 ymm0, r12 + %%OFFSET, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 2] - simd_load_avx2 ymm1, r13 + %%OFFSET, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 4] - simd_load_avx2 ymm2, r14 + %%OFFSET, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 6] - simd_load_avx2 ymm3, r15 + %%OFFSET, r10, %%TMP1, %%TMP2 -%else - vmovdqu ymm0, [r12 + %%OFFSET] - vmovdqu ymm1, [r13 + %%OFFSET] - vmovdqu ymm2, [r14 + %%OFFSET] - vmovdqu ymm3, [r15 + %%OFFSET] -%endif - - mov r12, [pIn + 32] - mov r13, [pIn + 40] - mov r14, [pIn + 48] - mov r15, [pIn + 56] -%if (%%LAST_CALL == 1) - movzx r10d, word [rsp + 8*32 + 8] - simd_load_avx2 ymm4, r12 + %%OFFSET, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 10] - simd_load_avx2 ymm5, r13 + %%OFFSET, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 12] - simd_load_avx2 ymm6, r14 + %%OFFSET, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 14] - simd_load_avx2 ymm7, r15 + %%OFFSET, r10, %%TMP1, %%TMP2 -%else - vmovdqu ymm4, [r12 + %%OFFSET] - vmovdqu ymm5, [r13 + %%OFFSET] - vmovdqu ymm6, [r14 + %%OFFSET] - vmovdqu ymm7, [r15 + %%OFFSET] -%endif - ; Shuffle all keystreams and XOR with plaintext -%assign %%I 0 -%assign %%J 8 -%rep 8 - vpshufb ymm %+ %%J, [rel swap_mask] - vpxor ymm %+ %%J, ymm %+ %%I -%assign %%I (%%I + 1) -%assign %%J (%%J + 1) -%endrep - - ;; Write output - mov r12, [pOut] - mov r13, [pOut + 8] - mov r14, [pOut + 16] - mov r15, [pOut + 24] - -%if (%%LAST_CALL == 1) - add r12, %%OFFSET - add r13, %%OFFSET - add r14, %%OFFSET - add r15, %%OFFSET - ;; Read in r10 the word containing the number of final bytes to write for each lane - movzx r10d, word [rsp + 8*32] - simd_store_avx2 r12, ymm8, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 2] - simd_store_avx2 r13, ymm9, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 4] - simd_store_avx2 r14, ymm10, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 6] - simd_store_avx2 r15, ymm11, r10, %%TMP1, %%TMP2 -%else - vmovdqu [r12 + %%OFFSET], ymm8 - vmovdqu [r13 + %%OFFSET], ymm9 - vmovdqu [r14 + %%OFFSET], ymm10 - vmovdqu [r15 + %%OFFSET], ymm11 -%endif - - mov r12, [pOut + 32] - mov r13, [pOut + 40] - mov r14, [pOut + 48] - mov r15, [pOut + 56] - -%if (%%LAST_CALL == 1) - add r12, %%OFFSET - add r13, %%OFFSET - add r14, %%OFFSET - add r15, %%OFFSET - movzx r10d, word [rsp + 8*32 + 8] - simd_store_avx2 r12, ymm12, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 10] - simd_store_avx2 r13, ymm13, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 12] - simd_store_avx2 r14, ymm14, r10, %%TMP1, %%TMP2 - movzx r10d, word [rsp + 8*32 + 14] - simd_store_avx2 r15, ymm15, r10, %%TMP1, %%TMP2 - - ; Restore registers - mov %%TMP1, [rsp + 32*8 + 16 + 8] - mov %%TMP2, [rsp + 32*8 + 16 + 16] -%else - vmovdqu [r12 + %%OFFSET], ymm12 - vmovdqu [r13 + %%OFFSET], ymm13 - vmovdqu [r14 + %%OFFSET], ymm14 - vmovdqu [r15 + %%OFFSET], ymm15 -%endif - -%endmacro - -;; -;; void asm_ZucCipher_8_avx2(state16_t *pSta, u64 *pIn[8], -;; u64 *pOut[8], u16 lengths, u64 min_length); -;; -;; WIN64 -;; RCX - pSta -;; RDX - pIn -;; R8 - pOut -;; R9 - lengths -;; rsp + 40 - min_length -;; -;; LIN64 -;; RDI - pSta -;; RSI - pIn -;; RDX - pOut -;; RCX - lengths -;; R8 - min_length -;; -MKGLOBAL(asm_ZucCipher_8_avx2,function,internal) -asm_ZucCipher_8_avx2: - endbranch64 -%ifdef LINUX - %define pState rdi - %define pIn rsi - %define pOut rdx - %define lengths rcx - %define arg5 r8 -%else - %define pState rcx - %define pIn rdx - %define pOut r8 - %define lengths r9 - %define arg5 [rsp + 40] -%endif - -%define min_length r10 -%define buf_idx r11 - - mov min_length, arg5 - - or min_length, min_length - jz exit_cipher32 - - FUNC_SAVE - - ;; Convert all lengths from UINT16_MAX (indicating that lane is not valid) to min length - vmovd xmm0, DWORD(min_length) - vpshufb xmm0, xmm0, [rel broadcast_word] - vmovdqa xmm1, [lengths] - vpcmpeqw xmm2, xmm2 ;; Get all ff's in XMM register - vpcmpeqw xmm3, xmm1, xmm2 ;; Mask with FFFF in NULL jobs - - vpand xmm4, xmm3, xmm0 ;; Length of valid job in all NULL jobs - vpxor xmm2, xmm3 ;; Mask with 0000 in NULL jobs - vpand xmm1, xmm2 ;; Zero out lengths of NULL jobs - vpor xmm1, xmm4 ;; XMM1 contain updated lengths - - ; Round up to nearest multiple of 4 bytes - vpaddw xmm0, [rel all_threes] - vpand xmm0, [rel all_fffcs] - - ; Calculate remaining bytes to encrypt after function call - vpsubw xmm2, xmm1, xmm0 - vpxor xmm3, xmm3 - vpcmpgtw xmm4, xmm2, xmm3 ;; Mask with FFFF in lengths > 0 - ; Set to zero the lengths of the lanes which are going to be completed (lengths < 0) - vpand xmm2, xmm4 - vmovdqa [lengths], xmm2 ; Update in memory the final updated lengths - - ; Calculate number of bytes to encrypt after round of 32 bytes (up to 31 bytes), - ; for each lane, and store it in stack to be used in the last round - vpsubw xmm1, xmm2 ; Bytes to encrypt in all lanes - vpand xmm1, [rel all_1fs] ; Number of final bytes (up to 31 bytes) for each lane - vpcmpeqw xmm2, xmm1, xmm3 ;; Mask with FFFF in lengths == 0 - vpand xmm2, [rel all_20s] ;; 32 in positions where lengths was 0 - vpor xmm1, xmm2 ;; Number of final bytes (up to 32 bytes) for each lane - - ; Allocate stack frame to store keystreams (32*8 bytes), number of final bytes (16 bytes), - ; space for rsp (8 bytes) and 2 GP registers (16 bytes) that will be clobbered later - mov rax, rsp - sub rsp, (32*8 + 16 + 16 + 8) - and rsp, -32 - xor buf_idx, buf_idx - vmovdqu [rsp + 32*8], xmm1 - mov [rsp + 32*8 + 16], rax - - ; Load state pointer in RAX - mov rax, pState - -loop_cipher64: - cmp min_length, 64 - jl exit_loop_cipher64 - - CIPHERNx4B_8 8, 0, buf_idx, 0 - - add buf_idx, 32 - sub min_length, 32 - - CIPHERNx4B_8 8, 8, buf_idx, 0 - - add buf_idx, 32 - sub min_length, 32 - - jmp loop_cipher64 -exit_loop_cipher64: - - ; Check if at least 32 bytes are left to encrypt - cmp min_length, 32 - jl less_than_32 - - CIPHERNx4B_8 8, 0, buf_idx, 0 - REORDER_LFSR rax, 8 - - add buf_idx, 32 - sub min_length, 32 - - ; Check if there are more bytes left to encrypt -less_than_32: - - mov r15, min_length - add r15, 3 - shr r15, 2 ;; number of rounds left (round up length to nearest multiple of 4B) - jz exit_final_rounds - -_final_rounds_is_1_8: - cmp r15, 4 - je _num_final_rounds_is_4 - jl _final_rounds_is_1_3 - - ; Final rounds 5-8 - cmp r15, 8 - je _num_final_rounds_is_8 - cmp r15, 7 - je _num_final_rounds_is_7 - cmp r15, 6 - je _num_final_rounds_is_6 - cmp r15, 5 - je _num_final_rounds_is_5 - -_final_rounds_is_1_3: - cmp r15, 3 - je _num_final_rounds_is_3 - cmp r15, 2 - je _num_final_rounds_is_2 - - jmp _num_final_rounds_is_1 - - ; Perform encryption of last bytes (<= 31 bytes) and reorder LFSR registers -%assign I 1 -%rep 8 -APPEND(_num_final_rounds_is_,I): - CIPHERNx4B_8 I, 0, buf_idx, 1 - REORDER_LFSR rax, I - add buf_idx, (I*4) - jmp exit_final_rounds -%assign I (I + 1) -%endrep - -exit_final_rounds: - ;; update in/out pointers - - ; Broadcast buf_idx in all qwords of ymm0 - vmovq xmm0, buf_idx - vpshufd xmm0, xmm0, 0x44 - vperm2f128 ymm0, ymm0, 0x0 - vpaddq ymm1, ymm0, [pIn] - vpaddq ymm2, ymm0, [pIn + 32] - vmovdqa [pIn], ymm1 - vmovdqa [pIn + 32], ymm2 - vpaddq ymm1, ymm0, [pOut] - vpaddq ymm2, ymm0, [pOut + 32] - vmovdqa [pOut], ymm1 - vmovdqa [pOut + 32], ymm2 - - ;; Clear stack frame containing keystream information -%ifdef SAFE_DATA - vpxor ymm0, ymm0 -%assign i 0 -%rep 8 - vmovdqa [rsp + i*32], ymm0 -%assign i (i+1) -%endrep -%endif - ; Restore rsp - mov rsp, [rsp + 32*8 + 16] - - FUNC_RESTORE - -exit_cipher32: - vzeroupper - ret - -;---------------------------------------------------------------------------------------- -;---------------------------------------------------------------------------------------- - -mksection stack-noexec diff --git a/lib/avx2_t1/README b/lib/avx2_t1/README new file mode 100644 index 0000000000000000000000000000000000000000..82bf0bcda889bcf7f6673d3f62ace482ab94ef2b --- /dev/null +++ b/lib/avx2_t1/README @@ -0,0 +1,3 @@ +AVX2 TYPE1: +- AVX +- AVX2, BMI2, AESNI, PCLMULQDQ diff --git a/lib/avx2/aes128_gcm_by8_avx2.asm b/lib/avx2_t1/aes128_gcm_by8_avx2.asm similarity index 97% rename from lib/avx2/aes128_gcm_by8_avx2.asm rename to lib/avx2_t1/aes128_gcm_by8_avx2.asm index 0d2633e280476110034818dd1213976c92396b1d..789453183c5ef0f4313a46baf2f71128e4f7b7b5 100644 --- a/lib/avx2/aes128_gcm_by8_avx2.asm +++ b/lib/avx2_t1/aes128_gcm_by8_avx2.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "avx2/gcm_avx_gen4.asm" +%include "avx2_t1/gcm_avx_gen4.asm" diff --git a/lib/avx2/aes192_gcm_by8_avx2.asm b/lib/avx2_t1/aes192_gcm_by8_avx2.asm similarity index 97% rename from lib/avx2/aes192_gcm_by8_avx2.asm rename to lib/avx2_t1/aes192_gcm_by8_avx2.asm index cd9a60b6ce8e07175fc7f09f0c5d92f132652947..6f3255a303f89b3c4f5a22d99c80defdab74c844 100644 --- a/lib/avx2/aes192_gcm_by8_avx2.asm +++ b/lib/avx2_t1/aes192_gcm_by8_avx2.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "avx2/gcm_avx_gen4.asm" +%include "avx2_t1/gcm_avx_gen4.asm" diff --git a/lib/avx2/aes256_gcm_by8_avx2.asm b/lib/avx2_t1/aes256_gcm_by8_avx2.asm similarity index 97% rename from lib/avx2/aes256_gcm_by8_avx2.asm rename to lib/avx2_t1/aes256_gcm_by8_avx2.asm index 71e90bf456feb57ca17f6a1da391abc1a5df7857..4e3bf23e64025931be13f8f2f30b30ca017d1b98 100644 --- a/lib/avx2/aes256_gcm_by8_avx2.asm +++ b/lib/avx2_t1/aes256_gcm_by8_avx2.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "avx2/gcm_avx_gen4.asm" +%include "avx2_t1/gcm_avx_gen4.asm" diff --git a/lib/avx2/chacha20_avx2.asm b/lib/avx2_t1/chacha20_avx2.asm similarity index 98% rename from lib/avx2/chacha20_avx2.asm rename to lib/avx2_t1/chacha20_avx2.asm index 2b3b46807885dd45934776a2b0e067abf472323f..25e116fd17c7d289a7d9301189941ff51c809a79 100644 --- a/lib/avx2/chacha20_avx2.asm +++ b/lib/avx2_t1/chacha20_avx2.asm @@ -81,6 +81,7 @@ db 2, 3, 0, 1, 6, 7, 4, 5, 10, 11, 8, 9, 14, 15, 12, 13 struc STACK _STATE: reso 32 ; Space to store first 8 states _YMM_SAVE: resy 2 ; Space to store up to 2 temporary YMM registers +_XMM_WIN_SAVE: reso 10 ; Space to store up to 10 XMM registers _GP_SAVE: resq 7 ; Space to store up to 7 GP registers _RSP_SAVE: resq 1 ; Space to store rsp pointer endstruc @@ -582,6 +583,15 @@ submit_job_chacha20_enc_dec_avx2: mov rax, rsp sub rsp, STACK_SIZE and rsp, -32 +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov [rsp + _RSP_SAVE], rax ; save RSP xor off, off @@ -917,7 +927,7 @@ partial_block: no_partial_block: endbranch64 %ifdef SAFE_DATA - vpxor ymm0, ymm0 + vpxor ymm0, ymm0 ; Clear stack frame %assign i 0 %rep 16 @@ -928,12 +938,26 @@ no_partial_block: vmovdqa [rsp + _YMM_SAVE + 32], ymm0 %endif +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov rsp, [rsp + _RSP_SAVE] +%ifdef SAFE_DATA + clear_scratch_ymms_asm +%else + vzeroupper +%endif + exit: mov rax, job or dword [rax + _status], IMB_STATUS_COMPLETED_CIPHER - clear_all_ymms_asm ret @@ -981,6 +1005,13 @@ chacha20_enc_dec_ks_avx2: mov [rsp + _GP_SAVE + 40], rbp %ifndef LINUX mov [rsp + _GP_SAVE + 48], rdi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov [rsp + _RSP_SAVE], rax ; save RSP @@ -1353,7 +1384,7 @@ no_partial_block_ks: mov [ctx + LastBlkCount], blk_cnt %ifdef SAFE_DATA - vpxor ymm0, ymm0 + vpxor ymm0, ymm0 ; Clear stack frame %assign i 0 %rep 16 @@ -1372,11 +1403,23 @@ no_partial_block_ks: mov rbp, [rsp + _GP_SAVE + 40] %ifndef LINUX mov rdi, [rsp + _GP_SAVE + 48] +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov rsp, [rsp + _RSP_SAVE] +%ifdef SAFE_DATA + clear_scratch_ymms_asm +%else + vzeroupper +%endif + exit_ks: - clear_all_ymms_asm ret diff --git a/lib/avx2/gcm_avx_gen4.asm b/lib/avx2_t1/gcm_avx_gen4.asm similarity index 99% rename from lib/avx2/gcm_avx_gen4.asm rename to lib/avx2_t1/gcm_avx_gen4.asm index a4cb7be8616166fc145159fb74b2b818d93a4281..189280bd9fb5261925bea21db9e3c32da2e9582d 100644 --- a/lib/avx2/gcm_avx_gen4.asm +++ b/lib/avx2_t1/gcm_avx_gen4.asm @@ -491,7 +491,7 @@ default rel %ifidn %%ENC_DEC, DEC vmovdqa xmm3, xmm1 - vpxor xmm9, xmm1 ; Cyphertext XOR E(K, Yn) + vpxor xmm9, xmm1 ; Ciphertext XOR E(K, Yn) mov r15, %%PLAIN_CYPH_LEN add r15, r13 diff --git a/lib/avx2_t1/mb_mgr_avx2.c b/lib/avx2_t1/mb_mgr_avx2.c new file mode 100644 index 0000000000000000000000000000000000000000..731658a87784c6e43f97b894566bdc69e0cf1f97 --- /dev/null +++ b/lib/avx2_t1/mb_mgr_avx2.c @@ -0,0 +1,100 @@ +/******************************************************************************* + Copyright (c) 2012-2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/cpu_feature.h" +#include "include/aesni_emu.h" +#include "include/error.h" +#include "include/arch_x86_64.h" + +IMB_DLL_LOCAL void +init_mb_mgr_avx2_internal(IMB_MGR *state, const int reset_mgrs) +{ +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return; + } +#endif + + if (!(state->features & IMB_FEATURE_AESNI)) { + fallback_no_aesni(state, 1); + return; + } + + /* reset error status */ + imb_set_errno(state, 0); + + state->features = cpu_feature_adjust(state->flags, + cpu_feature_detect()); + + if ((state->features & IMB_CPUFLAGS_AVX2_T2) == + IMB_CPUFLAGS_AVX2_T2) + init_mb_mgr_avx2_t2_internal(state, reset_mgrs); + else + init_mb_mgr_avx2_t1_internal(state, reset_mgrs); +} + +void +init_mb_mgr_avx2(IMB_MGR *state) +{ + init_mb_mgr_avx2_internal(state, 1); + + if (!self_test(state)) + imb_set_errno(state, IMB_ERR_SELFTEST); +} + +IMB_JOB *submit_job_avx2(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB(state); +} + +IMB_JOB *flush_job_avx2(IMB_MGR *state) +{ + return IMB_FLUSH_JOB(state); +} + +uint32_t queue_size_avx2(IMB_MGR *state) +{ + return IMB_QUEUE_SIZE(state); +} + +IMB_JOB *submit_job_nocheck_avx2(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB_NOCHECK(state); +} + +IMB_JOB *get_next_job_avx2(IMB_MGR *state) +{ + return IMB_GET_NEXT_JOB(state); +} + +IMB_JOB *get_completed_job_avx2(IMB_MGR *state) +{ + return IMB_GET_COMPLETED_JOB(state); +} diff --git a/lib/avx2_t1/mb_mgr_avx2_t1.c b/lib/avx2_t1/mb_mgr_avx2_t1.c new file mode 100644 index 0000000000000000000000000000000000000000..249c62d3f11cecaf80bbf9e398e6205390773f93 --- /dev/null +++ b/lib/avx2_t1/mb_mgr_avx2_t1.c @@ -0,0 +1,483 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define AVX2 + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/snow3g_submit.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_sse_type1.h" /* poly1305, snow3g */ +#include "include/arch_avx_type1.h" +#include "include/arch_avx2_type1.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms_avx +#define RESTORE_XMMS restore_xmms_avx + +/* JOB API */ +#define SUBMIT_JOB submit_job_avx2_t1 +#define FLUSH_JOB flush_job_avx2_t1 +#define QUEUE_SIZE queue_size_avx2_t1 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_avx2_t1 +#define GET_NEXT_JOB get_next_job_avx2_t1 +#define GET_COMPLETED_JOB get_completed_job_avx2_t1 +#define GET_NEXT_BURST get_next_burst_avx2_t1 +#define SUBMIT_BURST submit_burst_avx2_t1 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx2_t1 +#define FLUSH_BURST flush_burst_avx2_t1 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx2_t1 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx2_t1 +#define SUBMIT_HASH_BURST submit_hash_burst_avx2_t1 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx2_t1 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_AVX2 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_AVX2 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_AVX2 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_AVX2 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_AVX2 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_avx_gen4 +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_avx_gen4 +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_avx_gen4 +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_avx_gen4 +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_avx_gen4 +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_avx_gen4 + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_gcm_dec_avx2 +#define SUBMIT_JOB_AES_GCM_ENC submit_job_gcm_enc_avx2 + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_avx +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_avx +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_avx + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_avx +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_avx +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_avx + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_avx +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_avx +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_avx + +#define AES_CBC_DEC_128 aes_cbc_dec_128_avx +#define AES_CBC_DEC_192 aes_cbc_dec_192_avx +#define AES_CBC_DEC_256 aes_cbc_dec_256_avx + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_avx +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_avx +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_avx +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_avx + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_avx2 +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_avx2 +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_avx2 +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_avx2 +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_avx2 +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_avx2 + +#define AES_ECB_ENC_128 aes_ecb_enc_128_avx +#define AES_ECB_ENC_192 aes_ecb_enc_192_avx +#define AES_ECB_ENC_256 aes_ecb_enc_256_avx +#define AES_ECB_DEC_128 aes_ecb_dec_128_avx +#define AES_ECB_DEC_192 aes_ecb_dec_192_avx +#define AES_ECB_DEC_256 aes_ecb_dec_256_avx + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_avx +#define AES_CTR_192 aes_cntr_192_avx +#define AES_CTR_256 aes_cntr_256_avx +#define AES_CTR_128_BIT aes_cntr_bit_128_avx +#define AES_CTR_192_BIT aes_cntr_bit_192_avx +#define AES_CTR_256_BIT aes_cntr_bit_256_avx + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_avx +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_avx + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_avx +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_avx + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_avx +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_avx + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_avx +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_avx + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_avx +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_avx + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_avx2 +#define AES_CFB_256_ONE aes_cfb_256_one_avx2 + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_avx +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_avx + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_avx +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_avx +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_avx +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_avx + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_avx2 +#define FLUSH_JOB_SHA1 flush_job_sha1_avx2 +#define SUBMIT_JOB_SHA224 submit_job_sha224_avx2 +#define FLUSH_JOB_SHA224 flush_job_sha224_avx2 +#define SUBMIT_JOB_SHA256 submit_job_sha256_avx2 +#define FLUSH_JOB_SHA256 flush_job_sha256_avx2 +#define SUBMIT_JOB_SHA384 submit_job_sha384_avx2 +#define FLUSH_JOB_SHA384 flush_job_sha384_avx2 +#define SUBMIT_JOB_SHA512 submit_job_sha512_avx2 +#define FLUSH_JOB_SHA512 flush_job_sha512_avx2 + +/* HMAC-SHA1/224/256/384/512 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_avx2 +#define FLUSH_JOB_HMAC flush_job_hmac_avx2 +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_avx2 +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_avx2 +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_avx2 +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_avx2 +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_avx2 +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_avx2 +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_avx2 +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_avx2 +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_avx2 +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_avx2 + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_avx2 +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_avx2 +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_avx2 +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_avx2 +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_avx2 +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_avx2 +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_avx2 +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_avx2 +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_avx2 +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_avx2 +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_avx2 + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_avx +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_avx + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_avx2_t1(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_avx2_t1(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_avx2_t1 +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_avx2_t1 + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_avx_local + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 8); + ooo_mgr_aes_reset(state->aes192_ooo, 8); + ooo_mgr_aes_reset(state->aes256_ooo, 8); + + /* DOCSIS SEC BPI (AES CBC + AES CFB for partial block) + * uses same settings as AES CBC. + */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 8); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 8); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 8); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 8); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 8); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, AVX2_NUM_SHA1_LANES); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, + AVX2_NUM_SHA256_LANES); + + /* Init HMAC/SHA256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, + AVX2_NUM_SHA256_LANES); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + AVX2_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + AVX2_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, AVX2_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 8); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 8); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 8); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 8); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 8); + + /* Init AES CBC-S out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 8); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, AVX2_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, AVX2_NUM_SHA256_LANES); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, AVX2_NUM_SHA256_LANES); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, AVX2_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, AVX2_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_avx2_t1_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for AVX2 interface are present */ + if ((state->features & IMB_CPUFLAGS_AVX2) != IMB_CPUFLAGS_AVX2) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_AVX2; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_avx2; + state->keyexp_192 = aes_keyexp_192_avx2; + state->keyexp_256 = aes_keyexp_256_avx2; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_avx2; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_avx2; + + state->xcbc_keyexp = aes_xcbc_expand_key_avx2; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_avx2; + state->sha1 = sha1_avx2; + state->sha224_one_block = sha224_one_block_avx2; + state->sha224 = sha224_avx2; + state->sha256_one_block = sha256_one_block_avx2; + state->sha256 = sha256_avx2; + state->sha384_one_block = sha384_one_block_avx2; + state->sha384 = sha384_avx2; + state->sha512_one_block = sha512_one_block_avx2; + state->sha512 = sha512_avx2; + state->md5_one_block = md5_one_block_avx2; + + state->aes128_cfb_one = aes_cfb_128_one_avx2; + + state->eea3_1_buffer = zuc_eea3_1_buffer_avx2; + state->eea3_4_buffer = zuc_eea3_4_buffer_avx; + state->eea3_n_buffer = zuc_eea3_n_buffer_avx2; + state->eia3_1_buffer = zuc_eia3_1_buffer_avx2; + state->eia3_n_buffer = zuc_eia3_n_buffer_avx2; + + state->f8_1_buffer = kasumi_f8_1_buffer_avx; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_avx; + state->f8_2_buffer = kasumi_f8_2_buffer_avx; + state->f8_3_buffer = kasumi_f8_3_buffer_avx; + state->f8_4_buffer = kasumi_f8_4_buffer_avx; + state->f8_n_buffer = kasumi_f8_n_buffer_avx; + state->f9_1_buffer = kasumi_f9_1_buffer_avx; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_avx; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_avx; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_avx; + state->kasumi_key_sched_size = kasumi_key_sched_size_avx; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_avx2; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_avx2; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_avx2; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_avx2; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_avx2; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_avx2; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_avx2; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_avx2; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_avx2; + state->snow3g_init_key_sched = snow3g_init_key_sched_avx2; + state->snow3g_key_sched_size = snow3g_key_sched_size_avx2; + + state->hec_32 = hec_32_avx; + state->hec_64 = hec_64_avx; + + state->crc32_ethernet_fcs = ethernet_fcs_avx; + state->crc16_x25 = crc16_x25_avx; + state->crc32_sctp = crc32_sctp_avx; + state->crc24_lte_a = crc24_lte_a_avx; + state->crc24_lte_b = crc24_lte_b_avx; + state->crc16_fp_data = crc16_fp_data_avx; + state->crc11_fp_header = crc11_fp_header_avx; + state->crc7_fp_header = crc7_fp_header_avx; + state->crc10_iuup_data = crc10_iuup_data_avx; + state->crc6_iuup_header = crc6_iuup_header_avx; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_avx; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_avx; + + state->chacha20_poly1305_init = init_chacha20_poly1305_avx; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_avx2; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_avx2; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_avx; + + state->gcm128_enc = aes_gcm_enc_128_avx_gen4; + state->gcm192_enc = aes_gcm_enc_192_avx_gen4; + state->gcm256_enc = aes_gcm_enc_256_avx_gen4; + state->gcm128_dec = aes_gcm_dec_128_avx_gen4; + state->gcm192_dec = aes_gcm_dec_192_avx_gen4; + state->gcm256_dec = aes_gcm_dec_256_avx_gen4; + state->gcm128_init = aes_gcm_init_128_avx_gen4; + state->gcm192_init = aes_gcm_init_192_avx_gen4; + state->gcm256_init = aes_gcm_init_256_avx_gen4; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_avx_gen4; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_avx_gen4; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_avx_gen4; + state->gcm128_enc_update = aes_gcm_enc_128_update_avx_gen4; + state->gcm192_enc_update = aes_gcm_enc_192_update_avx_gen4; + state->gcm256_enc_update = aes_gcm_enc_256_update_avx_gen4; + state->gcm128_dec_update = aes_gcm_dec_128_update_avx_gen4; + state->gcm192_dec_update = aes_gcm_dec_192_update_avx_gen4; + state->gcm256_dec_update = aes_gcm_dec_256_update_avx_gen4; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_avx_gen4; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_avx_gen4; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_avx_gen4; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_avx_gen4; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_avx_gen4; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_avx_gen4; + state->gcm128_precomp = aes_gcm_precomp_128_avx_gen4; + state->gcm192_precomp = aes_gcm_precomp_192_avx_gen4; + state->gcm256_precomp = aes_gcm_precomp_256_avx_gen4; + state->gcm128_pre = aes_gcm_pre_128_avx_gen4; + state->gcm192_pre = aes_gcm_pre_192_avx_gen4; + state->gcm256_pre = aes_gcm_pre_256_avx_gen4; + + state->ghash = ghash_avx_gen4; + state->ghash_pre = ghash_pre_avx_gen2; + + state->gmac128_init = imb_aes_gmac_init_128_avx_gen4; + state->gmac192_init = imb_aes_gmac_init_192_avx_gen4; + state->gmac256_init = imb_aes_gmac_init_256_avx_gen4; + state->gmac128_update = imb_aes_gmac_update_128_avx_gen4; + state->gmac192_update = imb_aes_gmac_update_192_avx_gen4; + state->gmac256_update = imb_aes_gmac_update_256_avx_gen4; + state->gmac128_finalize = imb_aes_gmac_finalize_128_avx_gen4; + state->gmac192_finalize = imb_aes_gmac_finalize_192_avx_gen4; + state->gmac256_finalize = imb_aes_gmac_finalize_256_avx_gen4; +} + +#include "mb_mgr_code.h" diff --git a/lib/avx2/mb_mgr_hmac_md5_flush_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_md5_flush_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_md5_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_md5_flush_avx2.asm index 39a12466b37cee0e831991ad9e9f7bff6034899f..9583367c69a6aec36b9a0abe9e26917c31b74f70 100644 --- a/lib/avx2/mb_mgr_hmac_md5_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_md5_flush_avx2.asm @@ -29,7 +29,6 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" ;%define DO_DBGPRINT %include "include/dbgprint.asm" extern md5_x8x2_avx2 @@ -129,7 +128,6 @@ endstruc ; arg 1 : rcx : state MKGLOBAL(flush_job_hmac_md5_avx2,function,internal) flush_job_hmac_md5_avx2: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 @@ -161,7 +159,6 @@ flush_job_hmac_md5_avx2: %endrep copy_lane_data: - endbranch64 ; copy good lane (idx) to empty lanes mov tmp, [state + _args_data_ptr_md5 + PTR_SZ*idx] ;; tackle lower 8 lanes @@ -190,7 +187,6 @@ APPEND(upper_skip_,I): align 32 start_loop0: - endbranch64 ; Find min length vphminposuw xmm2, xmm0 vpextrw DWORD(len2), xmm2, 0 ; min value @@ -341,7 +337,6 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA return: - endbranch64 DBGPRINTL "---------- exit md5 flush -----------" vzeroupper diff --git a/lib/avx2/mb_mgr_hmac_md5_submit_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_md5_submit_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_md5_submit_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_md5_submit_avx2.asm index 456ba951a147b97c81d3ed275446c6a0533bebf4..9ddc9305f22940f2ff28f50fb0096f99487a2cce 100644 --- a/lib/avx2/mb_mgr_hmac_md5_submit_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_md5_submit_avx2.asm @@ -31,7 +31,6 @@ %include "include/memcpy.asm" %include "include/reg_sizes.asm" %include "include/const.inc" -%include "include/cet.inc" ;%define DO_DBGPRINT %include "include/dbgprint.asm" extern md5_x8x2_avx2 @@ -97,7 +96,6 @@ mksection .text ; arg 2 : rdx : job MKGLOBAL(submit_job_hmac_md5_avx2,function,internal) submit_job_hmac_md5_avx2: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 @@ -156,7 +154,6 @@ fast_copy: vmovdqu [lane_data + _extra_block + 0*32], ymm0 vmovdqu [lane_data + _extra_block + 1*32], ymm1 end_fast_copy: - endbranch64 mov size_offset, extra_blocks shl size_offset, 6 sub size_offset, last_len @@ -195,7 +192,6 @@ ge64_bytes: align 16 start_loop: - endbranch64 ; Find min length vmovdqa xmm0, [state + _lens_md5] vphminposuw xmm1, xmm0 @@ -351,7 +347,6 @@ clear_ret: %endif return: - endbranch64 DBGPRINTL "---------- exit md5 submit -----------" vzeroupper diff --git a/lib/avx2/mb_mgr_hmac_sha1_flush_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha1_flush_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_sha1_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha1_flush_avx2.asm index f95727633cede104e0a52ba775a8f0caf1152c1a..1dda74d9d14c03ec602e15f631350cc46adb0156 100644 --- a/lib/avx2/mb_mgr_hmac_sha1_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha1_flush_avx2.asm @@ -29,7 +29,6 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" ;%define DO_DBGPRINT %include "include/dbgprint.asm" extern sha1_x8_avx2 @@ -118,7 +117,6 @@ endstruc ; arg 1 : rcx : state MKGLOBAL(flush_job_hmac_avx2,function,internal) flush_job_hmac_avx2: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 ; align stack to 32 byte boundary @@ -143,7 +141,6 @@ flush_job_hmac_avx2: %endrep copy_lane_data: - endbranch64 ; copy valid lane (idx) to empty lanes vmovdqa xmm0, [state + _lens] mov tmp, [state + _args_data_ptr + PTR_SZ*idx] @@ -302,7 +299,6 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA return: - endbranch64 vzeroupper mov rbp, [rsp + _gpr_save + 8*0] mov r12, [rsp + _gpr_save + 8*1] diff --git a/lib/avx2/mb_mgr_hmac_sha1_submit_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha1_submit_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_sha1_submit_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha1_submit_avx2.asm index 91eaaf317242408d44d4b5cd57848a8e71ae3ff5..d7dbea7377377f7c062314243924d0548d552beb 100644 --- a/lib/avx2/mb_mgr_hmac_sha1_submit_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha1_submit_avx2.asm @@ -33,7 +33,6 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" %include "include/const.inc" -%include "include/cet.inc" extern sha1_x8_avx2 mksection .rodata @@ -100,7 +99,6 @@ endstruc ; arg 2 : rdx : job MKGLOBAL(submit_job_hmac_avx2,function,internal) submit_job_hmac_avx2: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 ; align to 32 byte boundary @@ -154,7 +152,6 @@ fast_copy: vmovdqu [lane_data + _extra_block + 0*32], ymm0 vmovdqu [lane_data + _extra_block + 1*32], ymm1 end_fast_copy: - endbranch64 mov size_offset, extra_blocks shl size_offset, 6 sub size_offset, last_len @@ -197,7 +194,6 @@ ge64_bytes: align 16 start_loop: ; Find min length - endbranch64 vmovdqa xmm0, [state + _lens] vphminposuw xmm1, xmm0 vpextrw DWORD(len2), xmm1, 0 ; min value @@ -348,7 +344,6 @@ clear_ret: %endif return: - endbranch64 vzeroupper DBGPRINTL "---------- exit sha1 submit -----------" mov rbp, [rsp + _gpr_save + 8*0] diff --git a/lib/avx2/mb_mgr_hmac_sha224_flush_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha224_flush_avx2.asm similarity index 96% rename from lib/avx2/mb_mgr_hmac_sha224_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha224_flush_avx2.asm index cc360c760e8c8d248444b1e4b0b7940038ff6700..bae687b7111b0f5120a7d004847c19f2413d0fff 100644 --- a/lib/avx2/mb_mgr_hmac_sha224_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha224_flush_avx2.asm @@ -28,4 +28,4 @@ %define FUNC flush_job_hmac_sha_224_avx2 %define SHA224 -%include "avx2/mb_mgr_hmac_sha256_flush_avx2.asm" +%include "avx2_t1/mb_mgr_hmac_sha256_flush_avx2.asm" diff --git a/lib/avx2/mb_mgr_hmac_sha224_submit_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha224_submit_avx2.asm similarity index 96% rename from lib/avx2/mb_mgr_hmac_sha224_submit_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha224_submit_avx2.asm index 0c32dfc39a92e31375395d397640e8a6647ae99a..0b6c7fdeee7825a9346f35e73a77e3667dd22303 100644 --- a/lib/avx2/mb_mgr_hmac_sha224_submit_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha224_submit_avx2.asm @@ -28,4 +28,4 @@ %define FUNC submit_job_hmac_sha_224_avx2 %define SHA224 -%include "avx2/mb_mgr_hmac_sha256_submit_avx2.asm" +%include "avx2_t1/mb_mgr_hmac_sha256_submit_avx2.asm" diff --git a/lib/avx2/mb_mgr_hmac_sha256_flush_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha256_flush_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_sha256_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha256_flush_avx2.asm index 2a7e76b1d24b29f017bdfe676f8ade80e9cdc231..7b6d3746e03c18746f07906b6166e1351ef4604a 100644 --- a/lib/avx2/mb_mgr_hmac_sha256_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha256_flush_avx2.asm @@ -29,7 +29,7 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" + extern sha256_oct_avx2 mksection .rodata @@ -118,7 +118,6 @@ endstruc ; arg 1 : state MKGLOBAL(FUNC,function,internal) FUNC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 @@ -149,7 +148,6 @@ FUNC: %endrep copy_lane_data: - endbranch64 ; copy idx to empty lanes vmovdqa xmm0, [state + _lens_sha256] mov tmp, [state + _args_data_ptr_sha256 + 8*idx] @@ -317,7 +315,6 @@ copy_full_digest: %endif clear_ret: - endbranch64 %ifdef SAFE_DATA vpxor ymm0, ymm0 @@ -360,7 +357,6 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA return: - endbranch64 vzeroupper mov rbx, [rsp + _gpr_save + 8*0] diff --git a/lib/avx2/mb_mgr_hmac_sha256_submit_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha256_submit_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_sha256_submit_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha256_submit_avx2.asm index 6fcca91ebc852272ad9c867c04eb4bc979755d72..30575b35aa4f1a278766e073fd1599d85a9a0354 100644 --- a/lib/avx2/mb_mgr_hmac_sha256_submit_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha256_submit_avx2.asm @@ -31,7 +31,7 @@ %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" -%include "include/cet.inc" + extern sha256_oct_avx2 mksection .rodata @@ -102,7 +102,6 @@ endstruc ; arg 2 : rdx : job MKGLOBAL(FUNC,function,internal) FUNC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 @@ -157,7 +156,6 @@ fast_copy: vmovdqu [lane_data + _extra_block + 1*32], ymm1 end_fast_copy: - endbranch64 mov size_offset, extra_blocks shl size_offset, 6 sub size_offset, last_len @@ -202,7 +200,6 @@ ge64_bytes: align 16 start_loop: - endbranch64 ; Find min length vmovdqa xmm0, [state + _lens_sha256] vphminposuw xmm1, xmm0 @@ -376,7 +373,6 @@ copy_full_digest: %endif clear_ret: - endbranch64 %ifdef SAFE_DATA ;; Clear digest (28B/32B), outer_block (28B/32B) and extra_block (64B) of returned job @@ -407,7 +403,6 @@ clear_ret: %endif ;; SAFE_DATA return: - endbranch64 vzeroupper mov rbx, [rsp + _gpr_save + 8*0] diff --git a/lib/avx2/mb_mgr_hmac_sha384_flush_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha384_flush_avx2.asm similarity index 96% rename from lib/avx2/mb_mgr_hmac_sha384_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha384_flush_avx2.asm index 10787b4014bd36013073f1aa85b9d5720e5e41ea..9f4c13dd4e18f0ac0152f4a0fe27f26da05b1aa3 100644 --- a/lib/avx2/mb_mgr_hmac_sha384_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha384_flush_avx2.asm @@ -28,4 +28,4 @@ %define FUNC flush_job_hmac_sha_384_avx2 %define SHA_X_DIGEST_SIZE 384 -%include "avx2/mb_mgr_hmac_sha512_flush_avx2.asm" +%include "avx2_t1/mb_mgr_hmac_sha512_flush_avx2.asm" diff --git a/lib/avx2/mb_mgr_hmac_sha384_submit_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha384_submit_avx2.asm similarity index 96% rename from lib/avx2/mb_mgr_hmac_sha384_submit_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha384_submit_avx2.asm index 8aec38c59a9231d8e96a030db5372502c223c63e..a7687b52570700115207b78c660299d83a2c6f24 100644 --- a/lib/avx2/mb_mgr_hmac_sha384_submit_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha384_submit_avx2.asm @@ -28,4 +28,4 @@ %define FUNC submit_job_hmac_sha_384_avx2 %define SHA_X_DIGEST_SIZE 384 -%include "avx2/mb_mgr_hmac_sha512_submit_avx2.asm" +%include "avx2_t1/mb_mgr_hmac_sha512_submit_avx2.asm" diff --git a/lib/avx2/mb_mgr_hmac_sha512_flush_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha512_flush_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_sha512_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha512_flush_avx2.asm index a54a382cd5e2e5294092e824db1110ee1f47b03b..1c13d9dce3e91ccaee92b423363e5b80af8e1b12 100644 --- a/lib/avx2/mb_mgr_hmac_sha512_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha512_flush_avx2.asm @@ -29,7 +29,7 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" + extern sha512_x4_avx2 mksection .rodata @@ -109,7 +109,6 @@ endstruc ; arg 1 : rcx : state MKGLOBAL(FUNC,function,internal) FUNC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 @@ -132,7 +131,6 @@ FUNC: %endrep copy_lane_data: - endbranch64 ; copy good lane (idx) to empty lanes vmovdqa xmm0, [state + _lens_sha512] mov tmp, [state + _args_sha512 + _data_ptr_sha512 + PTR_SZ*idx] @@ -295,7 +293,6 @@ copy_full_digest: %endif clear_ret: - endbranch64 %ifdef SAFE_DATA vpxor ymm0, ymm0 @@ -339,7 +336,6 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA return: - endbranch64 vzeroupper mov rbx, [rsp + _gpr_save + 8*0] diff --git a/lib/avx2/mb_mgr_hmac_sha512_submit_avx2.asm b/lib/avx2_t1/mb_mgr_hmac_sha512_submit_avx2.asm similarity index 99% rename from lib/avx2/mb_mgr_hmac_sha512_submit_avx2.asm rename to lib/avx2_t1/mb_mgr_hmac_sha512_submit_avx2.asm index 815931985659a63118467d383be626be7b7470a9..7b87b76343e95132002c5eec974b9293f48bed33 100644 --- a/lib/avx2/mb_mgr_hmac_sha512_submit_avx2.asm +++ b/lib/avx2_t1/mb_mgr_hmac_sha512_submit_avx2.asm @@ -31,7 +31,6 @@ %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" -%include "include/cet.inc" extern sha512_x4_avx2 mksection .rodata @@ -105,7 +104,6 @@ endstruc ; arg 2 : rdx : job MKGLOBAL(FUNC,function,internal) FUNC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 @@ -159,7 +157,6 @@ fast_copy: vmovdqu [lane_data + _extra_block_sha512 + 2*32], ymm2 vmovdqu [lane_data + _extra_block_sha512 + 3*32], ymm3 end_fast_copy: - endbranch64 mov size_offset, extra_blocks shl size_offset, 7 sub size_offset, last_len @@ -203,7 +200,6 @@ ge128_bytes: align 16 start_loop: ; Find min length - endbranch64 vmovdqa xmm0, [state + _lens_sha512] vphminposuw xmm1, xmm0 vpextrw DWORD(len2), xmm1, 0 ; min value @@ -365,7 +361,6 @@ copy_full_digest: %endif clear_ret: - endbranch64 %ifdef SAFE_DATA ;; Clear digest (48B/64B), outer_block (48B/64B) and extra_block (128B) of returned job %assign J 0 @@ -398,7 +393,6 @@ clear_ret: %endif ;; SAFE_DATA return: - endbranch64 vzeroupper mov rbx, [rsp + _gpr_save + 8*0] diff --git a/lib/avx2/mb_mgr_zuc_submit_flush_avx2.asm b/lib/avx2_t1/mb_mgr_zuc_submit_flush_avx2.asm similarity index 91% rename from lib/avx2/mb_mgr_zuc_submit_flush_avx2.asm rename to lib/avx2_t1/mb_mgr_zuc_submit_flush_avx2.asm index 96ca6ebcaf0076ad0c981a2d75f7ad2bca6256ca..b7f52fc3da4352d8f99cb5f893c18d7cde24b1e6 100644 --- a/lib/avx2/mb_mgr_zuc_submit_flush_avx2.asm +++ b/lib/avx2_t1/mb_mgr_zuc_submit_flush_avx2.asm @@ -31,6 +31,7 @@ %include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" +%include "include/clear_regs.asm" %define SUBMIT_JOB_ZUC128_EEA3 submit_job_zuc_eea3_avx2 %define FLUSH_JOB_ZUC128_EEA3 flush_job_zuc_eea3_avx2 @@ -92,13 +93,15 @@ extern asm_ZucCipher_8_avx2 %define arg4 rcx %define arg5 r8 %define arg6 r9 +%define arg7 qword [rsp] %else %define arg1 rcx %define arg2 rdx %define arg3 r8 %define arg4 r9 -%define arg5 [rsp + 32] -%define arg6 [rsp + 40] +%define arg5 qword [rsp + 32] +%define arg6 qword [rsp + 40] +%define arg7 qword [rsp + 48] %endif %define state arg1 @@ -232,14 +235,15 @@ mksection .text ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 - ; Expand to 8 bytes and write + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out mov tmp3, 0x3f3f3f3f3f3f3f3f pdep tmp2, tmp2, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -302,29 +306,19 @@ mksection .text %assign I (I + 1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 32 bytes for 4 parameters - sub rsp, 32 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_args_keys] lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] -%if %%KEY_SIZE == 256 - ;; Setting "tag size" to 2 in case of ciphering - ;; (dummy size, just for constant selecion at Initialization) - mov arg4, 2 -%endif - %if %%KEY_SIZE == 128 call ZUC128_INIT_8 %else + mov arg5, 0 ; Tag size = 0, arg4 not used call ZUC256_INIT_8 %endif -%ifndef LINUX - add rsp, 32 -%endif + RESTORE_STACK_SPACE 5 cmp word [r12 + _zuc_init_not_done], 0xff ; Init done for all lanes je %%skip_submit_restoring_state @@ -380,11 +374,8 @@ mksection .text mov word [r12 + _zuc_init_not_done], 0 ; Init done for all lanes - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -393,9 +384,8 @@ mksection .text call asm_ZucCipher_8_avx2 -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -417,6 +407,11 @@ mksection .text %endif %%return_submit_eea3: +%ifdef SAFE_DATA + clear_scratch_ymms_asm +%else + vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] @@ -531,29 +526,21 @@ APPEND(%%skip_eea3_,I): %assign I (I + 1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 32 bytes for 4 parameters - sub rsp, 32 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_args_keys] lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] -%if %%KEY_SIZE == 256 - ;; Setting "tag size" to 2 in case of ciphering - ;; (dummy size, just for constant selecion at Initialization) - mov arg4, 2 -%endif %if %%KEY_SIZE == 128 call ZUC128_INIT_8 %else + mov arg5, 0 ; Tag size = 0, arg4 not used call ZUC256_INIT_8 %endif -%ifndef LINUX - add rsp, 32 -%endif + RESTORE_STACK_SPACE 5 + cmp word [r12 + _zuc_init_not_done], 0xff ; Init done for all lanes je %%skip_flush_restoring_state @@ -656,11 +643,8 @@ APPEND3(%%skip_eea3_copy_,I,J): %assign I (I+1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -669,9 +653,8 @@ APPEND3(%%skip_eea3_copy_,I,J): call asm_ZucCipher_8_avx2 -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] ; Clear ZUC state of the lane that is returned and NULL lanes @@ -704,6 +687,11 @@ APPEND3(%%skip_eea3_copy_,I,J): SHIFT_GP 1, idx, tmp3, tmp4, left or [state + _zuc_unused_lane_bitmask], BYTE(tmp3) %%return_flush_eea3: +%ifdef SAFE_DATA + clear_scratch_ymms_asm +%else + vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] @@ -729,7 +717,6 @@ APPEND3(%%skip_eea3_copy_,I,J): ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EEA3,function,internal) SUBMIT_JOB_ZUC128_EEA3: - endbranch64 SUBMIT_JOB_ZUC_EEA3 128 ; JOB* SUBMIT_JOB_ZUC256_EEA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -737,7 +724,6 @@ SUBMIT_JOB_ZUC128_EEA3: ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC256_EEA3,function,internal) SUBMIT_JOB_ZUC256_EEA3: - endbranch64 SUBMIT_JOB_ZUC_EEA3 256 ; JOB* FLUSH_JOB_ZUC128_EEA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -745,7 +731,6 @@ SUBMIT_JOB_ZUC256_EEA3: ; arg 2 : job MKGLOBAL(FLUSH_JOB_ZUC128_EEA3,function,internal) FLUSH_JOB_ZUC128_EEA3: - endbranch64 FLUSH_JOB_ZUC_EEA3 128 ; JOB* FLUSH_JOB_ZUC256_EEA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -753,11 +738,11 @@ FLUSH_JOB_ZUC128_EEA3: ; arg 2 : job MKGLOBAL(FLUSH_JOB_ZUC256_EEA3,function,internal) FLUSH_JOB_ZUC256_EEA3: - endbranch64 FLUSH_JOB_ZUC_EEA3 256 -%macro SUBMIT_JOB_ZUC_EIA3 1 +%macro SUBMIT_JOB_ZUC_EIA3 2 %define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16) ; idx needs to be in rbp %define len rbp @@ -812,14 +797,17 @@ FLUSH_JOB_ZUC256_EEA3: ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 + ; Read next 6 bytes and write as 8 bytes + + ; Read last 8 bytes and keep only 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; Expand to 8 bytes and write mov tmp3, 0x3f3f3f3f3f3f3f3f pdep tmp2, tmp2, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -866,11 +854,12 @@ FLUSH_JOB_ZUC256_EEA3: ; to pass parameter to next function mov r11, state - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 48 bytes for 6 parameters (already aligned to 16 bytes) - sub rsp, 48 +%if %%KEY_SIZE == 128 + RESERVE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESERVE_STACK_SPACE 7 %endif + lea arg1, [r11 + _zuc_args_keys] lea arg2, [r11 + _zuc_args_IV] lea arg3, [r11 + _zuc_args_in] @@ -884,6 +873,9 @@ FLUSH_JOB_ZUC256_EEA3: lea r12, [r11 + _zuc_job_in_lane] mov arg6, r12 %endif +%if %%KEY_SIZE == 256 + mov arg7, %%TAG_SIZE +%endif %if %%KEY_SIZE == 128 call zuc_eia3_8_buffer_job_avx2 @@ -891,9 +883,12 @@ FLUSH_JOB_ZUC256_EEA3: call zuc256_eia3_8_buffer_job_avx2 %endif -%ifndef LINUX - add rsp, 48 +%if %%KEY_SIZE == 128 + RESTORE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESTORE_STACK_SPACE 7 %endif + mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -914,6 +909,11 @@ FLUSH_JOB_ZUC256_EEA3: mov [state + _zuc_unused_lanes], unused_lanes %%return_submit_eia3: +%ifdef SAFE_DATA + clear_scratch_ymms_asm +%else + vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] @@ -927,15 +927,18 @@ FLUSH_JOB_ZUC256_EEA3: %endif mov rsp, [rsp + _rsp_save] ; original SP - ret + jmp %%exit_submit_eia3 %%return_null_submit_eia3: xor job_rax, job_rax jmp %%return_submit_eia3 + +%%exit_submit_eia3: %endmacro -%macro FLUSH_JOB_ZUC_EIA3 1 +%macro FLUSH_JOB_ZUC_EIA3 2 %define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16) %define unused_lanes rbx %define tmp1 rbx @@ -1019,10 +1022,12 @@ APPEND(%%skip_eia3_,I): ; to pass parameter to next function mov r11, state -%ifndef LINUX - ;; 48 bytes for 6 parameters (already aligned to 16 bytes) - sub rsp, 48 +%if %%KEY_SIZE == 128 + RESERVE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESERVE_STACK_SPACE 7 %endif + lea arg1, [r11 + _zuc_args_keys] lea arg2, [r11 + _zuc_args_IV] lea arg3, [r11 + _zuc_args_in] @@ -1036,6 +1041,9 @@ APPEND(%%skip_eia3_,I): lea r12, [r11 + _zuc_job_in_lane] mov arg6, r12 %endif +%if %%KEY_SIZE == 256 + mov arg7, %%TAG_SIZE +%endif %if %%KEY_SIZE == 128 call zuc_eia3_8_buffer_job_avx2 @@ -1043,9 +1051,12 @@ APPEND(%%skip_eia3_,I): call zuc256_eia3_8_buffer_job_avx2 %endif -%ifndef LINUX - add rsp, 48 +%if %%KEY_SIZE == 128 + RESTORE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESTORE_STACK_SPACE 7 %endif + vmovdqa xmm2, [rsp + _null_len_save] mov state, [rsp + _gpr_save + 8*8] @@ -1066,6 +1077,11 @@ APPEND(%%skip_eia3_,I): mov [state + _zuc_unused_lanes], unused_lanes %%return_flush_eia3: +%ifdef SAFE_DATA + clear_scratch_ymms_asm +%else + vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] @@ -1079,11 +1095,13 @@ APPEND(%%skip_eia3_,I): %endif mov rsp, [rsp + _rsp_save] ; original SP - ret + jmp %%exit_flush_eia3 %%return_null_flush_eia3: xor job_rax, job_rax jmp %%return_flush_eia3 + +%%exit_flush_eia3: %endmacro ; JOB* SUBMIT_JOB_ZUC128_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -1091,29 +1109,59 @@ APPEND(%%skip_eia3_,I): ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EIA3,function,internal) SUBMIT_JOB_ZUC128_EIA3: - endbranch64 - SUBMIT_JOB_ZUC_EIA3 128 + SUBMIT_JOB_ZUC_EIA3 128, 4 + ret -; JOB* SUBMIT_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) +; JOB* SUBMIT_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job, +; const uint64_t tag_sz) ; arg 1 : state ; arg 2 : job +; arg 3 : tag size (4, 8 or 16 bytes) MKGLOBAL(SUBMIT_JOB_ZUC256_EIA3,function,internal) SUBMIT_JOB_ZUC256_EIA3: - endbranch64 - SUBMIT_JOB_ZUC_EIA3 256 + cmp arg3, 8 + je submit_tag_8B + jb submit_tag_4B + + ; Fall-through for 16-byte tag +submit_tag_16B: + SUBMIT_JOB_ZUC_EIA3 256, 16 + ret +submit_tag_8B: + SUBMIT_JOB_ZUC_EIA3 256, 8 + ret +submit_tag_4B: + SUBMIT_JOB_ZUC_EIA3 256, 4 + ret ; JOB* FLUSH_JOB_ZUC128_EIA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC128_EIA3,function,internal) FLUSH_JOB_ZUC128_EIA3: - endbranch64 - FLUSH_JOB_ZUC_EIA3 128 + FLUSH_JOB_ZUC_EIA3 128, 4 + ret -; JOB* FLUSH_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state) +; JOB* FLUSH_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, +; const uint64_t tag_sz) ; arg 1 : state +; arg 2 : tag size (4, 8 or 16 bytes) MKGLOBAL(FLUSH_JOB_ZUC256_EIA3,function,internal) FLUSH_JOB_ZUC256_EIA3: - endbranch64 - FLUSH_JOB_ZUC_EIA3 256 + cmp arg2, 8 + je flush_tag_8B + jb flush_tag_4B + + ; Fall-through for 16-byte tag +flush_tag_16B: + FLUSH_JOB_ZUC_EIA3 256, 16 + ret + +flush_tag_8B: + FLUSH_JOB_ZUC_EIA3 256, 8 + ret + +flush_tag_4B: + FLUSH_JOB_ZUC_EIA3 256, 4 + ret mksection stack-noexec diff --git a/lib/avx2/md5_x8x2_avx2.asm b/lib/avx2_t1/md5_x8x2_avx2.asm similarity index 99% rename from lib/avx2/md5_x8x2_avx2.asm rename to lib/avx2_t1/md5_x8x2_avx2.asm index 53f99570fe3c1c1d21a16ffb980dd53bd6ea1150..19b90acb85d25b093eabcb20239b0e22910f5957 100644 --- a/lib/avx2/md5_x8x2_avx2.asm +++ b/lib/avx2_t1/md5_x8x2_avx2.asm @@ -40,7 +40,7 @@ %include "include/mb_mgr_datastruct.asm" %include "include/transpose_avx2.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + mksection .rodata default rel align 64 @@ -387,7 +387,6 @@ align 32 MKGLOBAL(md5_x8x2_avx2,function,internal) md5_x8x2_avx2: - endbranch64 sub rsp, STACK_size mov DPTR1, rsp @@ -467,7 +466,6 @@ md5_x8x2_avx2: vmovdqu Y_D2,[state + 3 * MD5_DIGEST_ROW_SIZE + 32] lloop: - endbranch64 ; save old digests to stack vmovdqa [Y_AA], Y_A vmovdqa [Y_BB], Y_B diff --git a/lib/avx2/sha1_x8_avx2.asm b/lib/avx2_t1/sha1_x8_avx2.asm similarity index 78% rename from lib/avx2/sha1_x8_avx2.asm rename to lib/avx2_t1/sha1_x8_avx2.asm index f25dc6a03a5774c27dc9dc12f75575143e0eade9..5b0de3e54c0edc60586cfdbe5d52931c73233d9a 100644 --- a/lib/avx2/sha1_x8_avx2.asm +++ b/lib/avx2_t1/sha1_x8_avx2.asm @@ -40,7 +40,7 @@ %include "include/mb_mgr_datastruct.asm" %include "include/transpose_avx2.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + mksection .rodata default rel align 32 @@ -67,6 +67,12 @@ K60_79: ;ddq 0xCA62C1D6CA62C1D6CA62C1D6CA62C1D6 mksection .text +%define XMM_STORAGE 16*10 +%define GP_STORAGE 8*5 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + %ifdef LINUX %define arg1 rdi %define arg2 rsi @@ -140,7 +146,7 @@ mksection .text ;; Assume stack aligned to 32 bytes before call ;; Therefore FRAMESIZE mod 32 must be 32-8 = 24 -%define FRAMESZ 32*16 + 24 +%define FRAMESZ 32*16 + 16*10 + 24 %define VMOVPS vmovups @@ -296,6 +302,63 @@ mksection .text %xdefine W14 TMP_ %endm +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~31 ; align rsp to 32 bytes +%ifndef LINUX + vmovdqa [rsp + 0*16], xmm6 + vmovdqa [rsp + 1*16], xmm7 + vmovdqa [rsp + 2*16], xmm8 + vmovdqa [rsp + 3*16], xmm9 + vmovdqa [rsp + 4*16], xmm10 + vmovdqa [rsp + 5*16], xmm11 + vmovdqa [rsp + 6*16], xmm12 + vmovdqa [rsp + 7*16], xmm13 + vmovdqa [rsp + 8*16], xmm14 + vmovdqa [rsp + 9*16], xmm15 +%endif + mov [rsp + GP_OFFSET], r12 + mov [rsp + GP_OFFSET + 8], r13 + mov [rsp + GP_OFFSET + 2*8], r14 + mov [rsp + GP_OFFSET + 3*8], r15 + mov [rsp + GP_OFFSET + 4*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 +%ifndef LINUX + vmovdqa xmm6, [rsp + 0*16] + vmovdqa xmm7, [rsp + 1*16] + vmovdqa xmm8, [rsp + 2*16] + vmovdqa xmm9, [rsp + 3*16] + vmovdqa xmm10, [rsp + 4*16] + vmovdqa xmm11, [rsp + 5*16] + vmovdqa xmm12, [rsp + 6*16] + vmovdqa xmm13, [rsp + 7*16] + vmovdqa xmm14, [rsp + 8*16] + vmovdqa xmm15, [rsp + 9*16] + +%ifdef SAFE_DATA + vpxor xmm5, xmm5, xmm5 + vmovdqa [rsp + 0*16], xmm5 + vmovdqa [rsp + 1*16], xmm5 + vmovdqa [rsp + 2*16], xmm5 + vmovdqa [rsp + 3*16], xmm5 + vmovdqa [rsp + 4*16], xmm5 + vmovdqa [rsp + 5*16], xmm5 + vmovdqa [rsp + 6*16], xmm5 + vmovdqa [rsp + 7*16], xmm5 + vmovdqa [rsp + 8*16], xmm5 + vmovdqa [rsp + 9*16], xmm5 +%endif +%endif + mov r12, [rsp + GP_OFFSET] + mov r13, [rsp + GP_OFFSET + 8] + mov r14, [rsp + GP_OFFSET + 2*8] + mov r15, [rsp + GP_OFFSET + 3*8] + mov rsp, [rsp + GP_OFFSET + 4*8] ;; rsp pointer +%endmacro + align 32 ; void sha1_x8_avx2(void *state, int num_blks) @@ -303,9 +366,21 @@ align 32 ; arg 2 : rdx : size (in blocks) ;; assumed to be >= 1 MKGLOBAL(sha1_x8_avx2,function,internal) sha1_x8_avx2: - endbranch64 sub rsp, FRAMESZ +%ifndef LINUX + vmovdqa [rsp + 32*16 + 0*16], xmm6 + vmovdqa [rsp + 32*16 + 1*16], xmm7 + vmovdqa [rsp + 32*16 + 2*16], xmm8 + vmovdqa [rsp + 32*16 + 3*16], xmm9 + vmovdqa [rsp + 32*16 + 4*16], xmm10 + vmovdqa [rsp + 32*16 + 5*16], xmm11 + vmovdqa [rsp + 32*16 + 6*16], xmm12 + vmovdqa [rsp + 32*16 + 7*16], xmm13 + vmovdqa [rsp + 32*16 + 8*16], xmm14 + vmovdqa [rsp + 32*16 + 9*16], xmm15 +%endif + ;; Initialize digests vmovdqu A, [state + 0*SHA1_DIGEST_ROW_SIZE] vmovdqu B, [state + 1*SHA1_DIGEST_ROW_SIZE] @@ -454,8 +529,37 @@ lloop: %endrep %endif - add rsp, FRAMESZ +%ifndef LINUX + vmovdqa xmm6, [rsp + 32*16 + 0*16] + vmovdqa xmm7, [rsp + 32*16 + 1*16] + vmovdqa xmm8, [rsp + 32*16 + 2*16] + vmovdqa xmm9, [rsp + 32*16 + 3*16] + vmovdqa xmm10, [rsp + 32*16 + 4*16] + vmovdqa xmm11, [rsp + 32*16 + 5*16] + vmovdqa xmm12, [rsp + 32*16 + 6*16] + vmovdqa xmm13, [rsp + 32*16 + 7*16] + vmovdqa xmm14, [rsp + 32*16 + 8*16] + vmovdqa xmm15, [rsp + 32*16 + 9*16] + +%ifdef SAFE_DATA + ; xmm0 already 0 +%assign i 0 +%rep 10 + vmovdqa [rsp + 32*16 + i*16], xmm0 +%assign i (i+1) +%endrep +%endif +%endif + add rsp, FRAMESZ + + ret +; void call_sha1_x8_avx2_from_c(SHA1_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha1_x8_avx2_from_c,function,internal) +call_sha1_x8_avx2_from_c: + FUNC_SAVE + call sha1_x8_avx2 + FUNC_RESTORE ret mksection stack-noexec diff --git a/lib/avx2/sha256_oct_avx2.asm b/lib/avx2_t1/sha256_oct_avx2.asm similarity index 88% rename from lib/avx2/sha256_oct_avx2.asm rename to lib/avx2_t1/sha256_oct_avx2.asm index 0e768f9b712a46b8b0a0fb76d39d6a84b3c76426..f56b7256e7fa3fd08592a7e9e74816b314783d8a 100644 --- a/lib/avx2/sha256_oct_avx2.asm +++ b/lib/avx2_t1/sha256_oct_avx2.asm @@ -42,7 +42,6 @@ %include "include/os.asm" ;%define DO_DBGPRINT %include "include/dbgprint.asm" -%include "include/cet.inc" %include "include/mb_mgr_datastruct.asm" %include "include/transpose_avx2.asm" %include "include/clear_regs.asm" @@ -186,28 +185,81 @@ PSHUFFLE_BYTE_FLIP_MASK: ;ddq 0x0c0d0e0f08090a0b0405060700010203 dq 0x0405060700010203, 0x0c0d0e0f08090a0b dq 0x0405060700010203, 0x0c0d0e0f08090a0b -align 64 -MKGLOBAL(K256,data,internal) -K256: - dd 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 - dd 0x3956c25b,0x59f111f1,0x923f82a4,0xab1c5ed5 - dd 0xd807aa98,0x12835b01,0x243185be,0x550c7dc3 - dd 0x72be5d74,0x80deb1fe,0x9bdc06a7,0xc19bf174 - dd 0xe49b69c1,0xefbe4786,0x0fc19dc6,0x240ca1cc - dd 0x2de92c6f,0x4a7484aa,0x5cb0a9dc,0x76f988da - dd 0x983e5152,0xa831c66d,0xb00327c8,0xbf597fc7 - dd 0xc6e00bf3,0xd5a79147,0x06ca6351,0x14292967 - dd 0x27b70a85,0x2e1b2138,0x4d2c6dfc,0x53380d13 - dd 0x650a7354,0x766a0abb,0x81c2c92e,0x92722c85 - dd 0xa2bfe8a1,0xa81a664b,0xc24b8b70,0xc76c51a3 - dd 0xd192e819,0xd6990624,0xf40e3585,0x106aa070 - dd 0x19a4c116,0x1e376c08,0x2748774c,0x34b0bcb5 - dd 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 - dd 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 - dd 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 - +extern K256 mksection .text +%define XMM_STORAGE 10*16 +%define GP_STORAGE 9*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~31 ; align rsp to 32 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 + mov [rsp + 3*8], r13 + mov [rsp + 4*8], r14 + mov [rsp + 5*8], r15 +%ifndef LINUX + mov [rsp + 6*8], rsi + mov [rsp + 7*8], rdi + vmovdqa [rsp + 4*16], xmm6 + vmovdqa [rsp + 5*16], xmm7 + vmovdqa [rsp + 6*16], xmm8 + vmovdqa [rsp + 7*16], xmm9 + vmovdqa [rsp + 8*16], xmm10 + vmovdqa [rsp + 9*16], xmm11 + vmovdqa [rsp + 10*16], xmm12 + vmovdqa [rsp + 11*16], xmm13 + vmovdqa [rsp + 12*16], xmm14 + vmovdqa [rsp + 13*16], xmm15 +%endif ; LINUX + mov [rsp + 14*16], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] + mov r13, [rsp + 3*8] + mov r14, [rsp + 4*8] + mov r15, [rsp + 5*8] +%ifndef LINUX + mov rsi, [rsp + 6*8] + mov rdi, [rsp + 7*8] + vmovdqa xmm6, [rsp + 4*16] + vmovdqa xmm7, [rsp + 5*16] + vmovdqa xmm8, [rsp + 6*16] + vmovdqa xmm9, [rsp + 7*16] + vmovdqa xmm10, [rsp + 8*16] + vmovdqa xmm11, [rsp + 9*16] + vmovdqa xmm12, [rsp + 10*16] + vmovdqa xmm13, [rsp + 11*16] + vmovdqa xmm14, [rsp + 12*16] + vmovdqa xmm15, [rsp + 13*16] + +%ifdef SAFE_DATA + vpxor xmm5, xmm5, xmm5 + vmovdqa xmm5, [rsp + 4*16] + vmovdqa xmm5, [rsp + 5*16] + vmovdqa xmm5, [rsp + 6*16] + vmovdqa xmm5, [rsp + 7*16] + vmovdqa xmm5, [rsp + 8*16] + vmovdqa xmm5, [rsp + 9*16] + vmovdqa xmm5, [rsp + 10*16] + vmovdqa xmm5, [rsp + 11*16] + vmovdqa xmm5, [rsp + 12*16] + vmovdqa xmm5, [rsp + 13*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 14*16] ;; rsp pointer +%endmacro + %ifdef LINUX %define arg1 rdi %define arg2 rsi @@ -427,7 +479,6 @@ endstruc MKGLOBAL(sha256_oct_avx2,function,internal) align 16 sha256_oct_avx2: - endbranch64 ; general registers preserved in outer calling routine ; outer calling routine saves all the XMM registers sub rsp, FRAMESZ @@ -515,7 +566,6 @@ lloop: jmp Lrounds_16_xx align 16 Lrounds_16_xx: - endbranch64 %rep 16 ROUND_16_XX T1, i %assign i (i+1) @@ -582,4 +632,12 @@ Lrounds_16_xx: add rsp, FRAMESZ ret +; void call_sha256_oct_avx2_from_c(SHA256_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha256_oct_avx2_from_c,function,internal) +call_sha256_oct_avx2_from_c: + FUNC_SAVE + call sha256_oct_avx2 + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/avx2/sha512_x4_avx2.asm b/lib/avx2_t1/sha512_x4_avx2.asm similarity index 89% rename from lib/avx2/sha512_x4_avx2.asm rename to lib/avx2_t1/sha512_x4_avx2.asm index ce98b76d2d40d6dd86f1ff072f9bc640e0e27750..f3c042e9422c3631a736e5344bf060ea0b795b97 100644 --- a/lib/avx2/sha512_x4_avx2.asm +++ b/lib/avx2_t1/sha512_x4_avx2.asm @@ -47,7 +47,6 @@ %include "include/dbgprint.asm" %include "include/mb_mgr_datastruct.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" mksection .rodata default rel align 64 @@ -310,13 +309,78 @@ endstruc %endm +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~31 ; align rsp to 32 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + vmovdqa [rsp + 3*16], xmm6 + vmovdqa [rsp + 4*16], xmm7 + vmovdqa [rsp + 5*16], xmm8 + vmovdqa [rsp + 6*16], xmm9 + vmovdqa [rsp + 7*16], xmm10 + vmovdqa [rsp + 8*16], xmm11 + vmovdqa [rsp + 9*16], xmm12 + vmovdqa [rsp + 10*16], xmm13 + vmovdqa [rsp + 11*16], xmm14 + vmovdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + vmovdqa xmm6, [rsp + 3*16] + vmovdqa xmm7, [rsp + 4*16] + vmovdqa xmm8, [rsp + 5*16] + vmovdqa xmm9, [rsp + 6*16] + vmovdqa xmm10, [rsp + 7*16] + vmovdqa xmm11, [rsp + 8*16] + vmovdqa xmm12, [rsp + 9*16] + vmovdqa xmm13, [rsp + 10*16] + vmovdqa xmm14, [rsp + 11*16] + vmovdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + vpxor xmm5, xmm5, xmm5 + vmovdqa xmm5, [rsp + 3*16] + vmovdqa xmm5, [rsp + 4*16] + vmovdqa xmm5, [rsp + 5*16] + vmovdqa xmm5, [rsp + 6*16] + vmovdqa xmm5, [rsp + 7*16] + vmovdqa xmm5, [rsp + 8*16] + vmovdqa xmm5, [rsp + 9*16] + vmovdqa xmm5, [rsp + 10*16] + vmovdqa xmm5, [rsp + 11*16] + vmovdqa xmm5, [rsp + 12*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + ;; void sha512_x4_avx2(void *STATE, const int INP_SIZE) ;; arg 1 : STATE : pointer to input data ;; arg 2 : INP_SIZE : size of data in blocks (assumed >= 1) MKGLOBAL(sha512_x4_avx2,function,internal) align 32 sha512_x4_avx2: - endbranch64 ; general registers preserved in outer calling routine ; outer calling routine saves all the XMM registers @@ -383,7 +447,6 @@ lloop: jmp Lrounds_16_xx align 16 Lrounds_16_xx: - endbranch64 %rep 16 ROUND_16_XX T1, i %assign i (i+1) @@ -444,4 +507,12 @@ Lrounds_16_xx: ; outer calling routine restores XMM and other GP registers ret +; void call_sha512_x4_avx2_from_c(SHA512_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha512_x4_avx2_from_c,function,internal) +call_sha512_x4_avx2_from_c: + FUNC_SAVE + call sha512_x4_avx2 + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/avx2_t1/sha_mb_avx2.c b/lib/avx2_t1/sha_mb_avx2.c new file mode 100644 index 0000000000000000000000000000000000000000..fcca302151f95342255d81affacf83ddcab6248a --- /dev/null +++ b/lib/avx2_t1/sha_mb_avx2.c @@ -0,0 +1,140 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "include/sha_mb_mgr.h" +#include "include/arch_avx2_type1.h" + +IMB_JOB *submit_job_sha384_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_sha384_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); + +IMB_JOB *submit_job_sha512_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_sha512_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); + +/* ========================================================================== */ +/* + * SHA1 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha1_avx2(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 8, 1, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_x8_avx2_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha1_avx2(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 8, 0, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_x8_avx2_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA224 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha224_avx2(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 8, 1, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha256_oct_avx2_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha224_avx2(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 8, 0, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha256_oct_avx2_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA256 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha256_avx2(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 8, 1, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha256_oct_avx2_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha256_avx2(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 8, 0, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha256_oct_avx2_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA384 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha384_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 4, 1, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x4_avx2_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha384_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 4, 0, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x4_avx2_from_c); +} + +/* ========================================================================== */ +/* + * SHA512 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha512_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 4, 1, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x4_avx2_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha512_avx2(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 4, 0, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x4_avx2_from_c); +} diff --git a/lib/avx2/snow3g_avx2.c b/lib/avx2_t1/snow3g_avx2.c similarity index 100% rename from lib/avx2/snow3g_avx2.c rename to lib/avx2_t1/snow3g_avx2.c diff --git a/lib/avx2/zuc_top_avx2.c b/lib/avx2_t1/zuc_top_avx2.c old mode 100755 new mode 100644 similarity index 94% rename from lib/avx2/zuc_top_avx2.c rename to lib/avx2_t1/zuc_top_avx2.c index 547f33ce909376ba4ade4c4ff78dce66bc8a5df6..91cdfedd6f4608cbd70300fc42878fe3bf62bf15 --- a/lib/avx2/zuc_top_avx2.c +++ b/lib/avx2_t1/zuc_top_avx2.c @@ -488,16 +488,6 @@ void zuc_eea3_n_buffer_avx2(const void * const pKey[], const void * const pIv[], #endif } -static inline uint64_t rotate_left(uint64_t u, size_t r) -{ - return (((u) << (r)) | ((u) >> (64 - (r)))); -} - -static inline uint64_t load_uint64(const void *ptr) -{ - return *((const uint64_t *)ptr); -} - static inline void _zuc_eia3_1_buffer_avx2(const void *pKey, const void *pIv, @@ -525,7 +515,7 @@ void _zuc_eia3_1_buffer_avx2(const void *pKey, asm_ZucGenKeystream8B_avx(&keyStream[8], &zucState); else asm_ZucGenKeystream32B_avx(&keyStream[8], &zucState); - asm_Eia3Round32B_avx(&T, &keyStream[0], pIn8); + asm_Eia3Round32B_avx(&T, &keyStream[0], pIn8, 4); pIn8 = &pIn8[KEYSTR_ROUND_LEN]; } @@ -535,7 +525,7 @@ void _zuc_eia3_1_buffer_avx2(const void *pKey, */ if (remainingBits > (6 * 32)) asm_ZucGenKeystream8B_avx(&keyStream[8], &zucState); - asm_Eia3Remainder_avx(&T, &keyStream[0], pIn8, remainingBits); + asm_Eia3Remainder_avx(&T, &keyStream[0], pIn8, remainingBits, 128, 4); *pMacI = T; #ifdef SAFE_DATA @@ -597,16 +587,13 @@ void _zuc_eia3_8_buffer_avx2(const void * const pKey[NUM_AVX2_BUFS], asm_ZucGenKeystream32B_8_avx2(&state, (uint32_t **)pKeyStrArr); for (i = 0; i < NUM_AVX2_BUFS; i++) { - asm_Eia3Round32B_avx(&T[i], &keyStr[i][0], pIn8[i]); + asm_Eia3Round32B_avx(&T[i], &keyStr[i][0], pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } } /* Process each packet separately for the remaining bits */ for (i = 0; i < NUM_AVX2_BUFS; i++) { - const uint32_t N = lengthInBits[i] + (2 * ZUC_WORD_BITS); - uint32_t L = ((N + 31) / ZUC_WORD_BITS) - - numKeyStr*(keyStreamLengthInBits / 32); uint32_t remainBits = lengthInBits[i] - numKeyStr*keyStreamLengthInBits; uint32_t *keyStr32 = (uint32_t *) keyStr[i]; @@ -638,7 +625,6 @@ void _zuc_eia3_8_buffer_avx2(const void * const pKey[NUM_AVX2_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - L -= (keyStreamLengthInBits / 32); /* Generate the next key stream 8 bytes or 32 bytes */ if (!remainBits) @@ -647,7 +633,7 @@ void _zuc_eia3_8_buffer_avx2(const void * const pKey[NUM_AVX2_BUFS], else asm_ZucGenKeystream32B_avx(&keyStr32[8], &singlePktState); - asm_Eia3Round32B_avx(&T[i], &keyStr32[0], pIn8[i]); + asm_Eia3Round32B_avx(&T[i], &keyStr32[0], pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } @@ -660,7 +646,8 @@ void _zuc_eia3_8_buffer_avx2(const void * const pKey[NUM_AVX2_BUFS], asm_ZucGenKeystream8B_avx(&keyStr32[8], &singlePktState); - asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits); + asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits, + 128, 4); /* save the final MAC-I result */ *(pMacI[i]) = T[i]; } @@ -778,7 +765,7 @@ void zuc_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], if (job_in_lane[i] == NULL) continue; - asm_Eia3Round32B_avx(&T[i], &keyStr[i][0], pIn8[i]); + asm_Eia3Round32B_avx(&T[i], &keyStr[i][0], pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } } @@ -788,9 +775,6 @@ void zuc_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], if (job_in_lane[i] == NULL) continue; - const uint32_t N = lengthInBits[i] + (2 * ZUC_WORD_BITS); - uint32_t L = ((N + 31) / ZUC_WORD_BITS) - - numKeyStr*(keyStreamLengthInBits / 32); uint32_t remainBits = lengthInBits[i] - numKeyStr*keyStreamLengthInBits; uint32_t *keyStr32 = (uint32_t *) keyStr[i]; @@ -822,7 +806,6 @@ void zuc_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - L -= (keyStreamLengthInBits / 32); /* Generate the next key stream 8 bytes or 32 bytes */ if (!remainBits) @@ -831,7 +814,7 @@ void zuc_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], else asm_ZucGenKeystream32B_avx(&keyStr32[8], &singlePktState); - asm_Eia3Round32B_avx(&T[i], &keyStr32[0], pIn8[i]); + asm_Eia3Round32B_avx(&T[i], &keyStr32[0], pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } @@ -844,7 +827,8 @@ void zuc_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], asm_ZucGenKeystream8B_avx(&keyStr32[8], &singlePktState); - asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits); + asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits, + 128, 4); /* save the final MAC-I result */ *(pMacI[i]) = T[i]; @@ -862,9 +846,10 @@ void zuc_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], const uint8_t *ivs, const void * const pBufferIn[NUM_AVX2_BUFS], - uint32_t *pMacI[NUM_AVX2_BUFS], + void *pMacI[NUM_AVX2_BUFS], const uint16_t lengthInBits[NUM_AVX2_BUFS], - const void * const job_in_lane[NUM_AVX2_BUFS]) + const void * const job_in_lane[NUM_AVX2_BUFS], + const uint64_t tag_size) { unsigned int i = 0; DECLARE_ALIGNED(ZucState8_t state, 64); @@ -874,7 +859,7 @@ void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], DECLARE_ALIGNED(ZucKey8_t keys, 64); const uint8_t *pIn8[NUM_AVX2_BUFS] = {NULL}; uint32_t numKeyStr = 0; - uint8_t T[NUM_AVX2_BUFS*4]; + DECLARE_ALIGNED(uint8_t T[NUM_AVX2_BUFS*16], 32); const uint32_t keyStreamLengthInBits = KEYSTR_ROUND_LEN * 8; DECLARE_ALIGNED(uint32_t *pKeyStrArr[NUM_AVX2_BUFS], 32) = {NULL}; unsigned int allCommonBits; @@ -888,14 +873,7 @@ void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], keys.pKeys[i] = pKey[i]; } - /* TODO: Handle 8 and 16-byte digest cases */ - asm_Zuc256Initialization_8_avx2(&keys, ivs, &state, 4); - - asm_ZucGenKeystream4B_8_avx2(&state, pKeyStrArr); - - /* Initialize the tag with the first 4 bytes of the keystream */ - for (i = 0; i < NUM_AVX2_BUFS; i++) - memcpy(&T[i], pKeyStrArr[i], 4); + asm_Zuc256Initialization_8_avx2(&keys, ivs, &state, T, tag_size); /* Generate 32 bytes at a time */ asm_ZucGenKeystream32B_8_avx2(&state, (uint32_t **)pKeyStrArr); @@ -907,27 +885,35 @@ void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], while (remainCommonBits >= keyStreamLengthInBits) { remainCommonBits -= keyStreamLengthInBits; numKeyStr++; - /* Generate the next key stream 4 bytes or 32 bytes */ - if (!remainCommonBits && allCommonBits) - asm_ZucGenKeystream4B_8_avx2(&state, - (uint32_t **)pKeyStrArr); - else + /* Generate the next key stream 4/8/16 bytes or 32 bytes */ + if (!remainCommonBits && allCommonBits) { + if (tag_size == 4) + asm_ZucGenKeystream4B_8_avx2(&state, + pKeyStrArr); + else if (tag_size == 8) + asm_ZucGenKeystream8B_8_avx2(&state, + pKeyStrArr); + else + asm_ZucGenKeystream16B_8_avx2(&state, + pKeyStrArr); + } else asm_ZucGenKeystream32B_8_avx2(&state, (uint32_t **)pKeyStrArr); for (i = 0; i < NUM_AVX2_BUFS; i++) { - uint32_t *tag = (uint32_t *) &T[i*4]; + void *tag = (void *) &T[i*tag_size]; if (job_in_lane[i] == NULL) continue; - asm_Eia3Round32B_avx(tag, &keyStr[i][0], pIn8[i]); + asm_Eia3Round32B_avx(tag, &keyStr[i][0], pIn8[i], + tag_size); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } } /* Process each packet separately for the remaining bits */ for (i = 0; i < NUM_AVX2_BUFS; i++) { - uint32_t *tag = (uint32_t *) &T[i*4]; + void *tag = (void *) &T[i*tag_size]; if (job_in_lane[i] == NULL) continue; @@ -936,10 +922,15 @@ void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], numKeyStr*keyStreamLengthInBits; uint32_t *keyStr32 = (uint32_t *) keyStr[i]; - /* If remaining bits are more than 160 bytes, we need to - * generate at least 4B more of keystream, so we need to copy - * the zuc state to single packet state first */ - if (remainBits > (5*32)) { + const uint32_t N = remainBits + ((uint32_t) tag_size << 3); + uint32_t L = ((N + 31) / ZUC_WORD_BITS); + + /* 8 KS words are generated already */ + L = (L > 8) ? (L - 8) : 0; + + /* Copy the ZUC state to single packet state, + * if more KS is needed */ + if (L > 0) { singlePktState.lfsrState[0] = state.lfsrState[0][i]; singlePktState.lfsrState[1] = state.lfsrState[1][i]; singlePktState.lfsrState[2] = state.lfsrState[2][i]; @@ -964,30 +955,31 @@ void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[NUM_AVX2_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - /* Generate the next key stream 4 bytes or 32 bytes */ - if (!remainBits) - asm_ZucGenKeystream_avx(&keyStr32[8], - &singlePktState, 1); - else + /* Generate the next key stream (32 bytes max) */ + if (L > 7) { asm_ZucGenKeystream32B_avx(&keyStr32[8], &singlePktState); - asm_Eia3Round32B_avx(tag, &keyStr32[0], pIn8[i]); + L -= 8; + } else { + asm_ZucGenKeystream_avx(&keyStr32[8], + &singlePktState, L); + L = 0; + } + asm_Eia3Round32B_avx(tag, &keyStr32[0], pIn8[i], + tag_size); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } - /* - * If remaining bits has more than 5 ZUC WORDS (double words), - * keystream needs to have another ZUC WORD (4B) - */ - - if (remainBits > (5 * 32)) + /* Generate final keystream if needed */ + if (L > 0) asm_ZucGenKeystream_avx(&keyStr32[8], - &singlePktState, 1); + &singlePktState, L); - asm_Eia3Remainder_avx(tag, keyStr32, pIn8[i], remainBits); + asm_Eia3Remainder_avx(tag, keyStr32, pIn8[i], remainBits, + 256, tag_size); /* save the final MAC-I result */ - memcpy(pMacI[i], tag, 4); + memcpy(pMacI[i], tag, tag_size); } #ifdef SAFE_DATA diff --git a/lib/avx2_t1/zuc_x8_avx2.asm b/lib/avx2_t1/zuc_x8_avx2.asm new file mode 100644 index 0000000000000000000000000000000000000000..20877dd04ca5d366a92a22316e72261234dc28c5 --- /dev/null +++ b/lib/avx2_t1/zuc_x8_avx2.asm @@ -0,0 +1,1575 @@ +;; +;; Copyright (c) 2020-2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%include "include/os.asm" +%include "include/reg_sizes.asm" +%include "include/zuc_sbox.inc" +%include "include/transpose_avx2.asm" +%include "include/memcpy.asm" +%include "include/mb_mgr_datastruct.asm" +%include "include/cet.inc" + +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%define arg4 rcx +%define arg5 r8 +%else +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%define arg4 r9 +%define arg5 qword [rsp + 40] +%endif + +%define APPEND(a,b) a %+ b + +mksection .rodata +default rel + +align 32 +Ek_d: +dd 0x0044D700, 0x0026BC00, 0x00626B00, 0x00135E00, 0x00578900, 0x0035E200, 0x00713500, 0x0009AF00 +dd 0x004D7800, 0x002F1300, 0x006BC400, 0x001AF100, 0x005E2600, 0x003C4D00, 0x00789A00, 0x0047AC00 + +; Constants to be used to initialize the LFSR registers +; The tables contain four different sets of constants: +; 0-63 bytes: Encryption +; 64-127 bytes: Authentication with tag size = 4 +; 128-191 bytes: Authentication with tag size = 8 +; 192-255 bytes: Authentication with tag size = 16 +align 16 +EK256_d64: +dd 0x00220000, 0x002F0000, 0x00240000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +EK256_EIA3_4: +dd 0x00220000, 0x002F0000, 0x00250000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +EK256_EIA3_8: +dd 0x00230000, 0x002F0000, 0x00240000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +EK256_EIA3_16: +dd 0x00230000, 0x002F0000, 0x00250000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 32 +shuf_mask_key: +dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, 0x04FFFFFF, 0x05FFFFFF, 0x06FFFFFF, 0x07FFFFFF, +dd 0x08FFFFFF, 0x09FFFFFF, 0x0AFFFFFF, 0x0BFFFFFF, 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, + +align 32 +shuf_mask_iv: +dd 0xFFFFFF00, 0xFFFFFF01, 0xFFFFFF02, 0xFFFFFF03, 0xFFFFFF04, 0xFFFFFF05, 0xFFFFFF06, 0xFFFFFF07, +dd 0xFFFFFF08, 0xFFFFFF09, 0xFFFFFF0A, 0xFFFFFF0B, 0xFFFFFF0C, 0xFFFFFF0D, 0xFFFFFF0E, 0xFFFFFF0F, + +align 16 +shuf_mask_iv_17_19: +db 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF + +align 16 +clear_iv_mask: +db 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00 + +align 16 +shuf_mask_iv_20_23: +db 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF, 0xFF, 0xFF, 0x03, 0xFF + +align 32 +mask31: +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, + +align 32 +swap_mask: +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c + +align 32 +S0_S1_shuf: +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, + +align 32 +rev_S0_S1_shuf: +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 + +align 32 +rot8_mod32: +db 0x03, 0x00, 0x01, 0x02, 0x07, 0x04, 0x05, 0x06, +db 0x0B, 0x08, 0x09, 0x0A, 0x0F, 0x0C, 0x0D, 0x0E +db 0x03, 0x00, 0x01, 0x02, 0x07, 0x04, 0x05, 0x06, +db 0x0B, 0x08, 0x09, 0x0A, 0x0F, 0x0C, 0x0D, 0x0E + +align 32 +rot16_mod32: +db 0x02, 0x03, 0x00, 0x01, 0x06, 0x07, 0x04, 0x05, +db 0x0A, 0x0B, 0x08, 0x09, 0x0E, 0x0F, 0x0C, 0x0D +db 0x02, 0x03, 0x00, 0x01, 0x06, 0x07, 0x04, 0x05, +db 0x0A, 0x0B, 0x08, 0x09, 0x0E, 0x0F, 0x0C, 0x0D + +align 32 +rot24_mod32: +db 0x01, 0x02, 0x03, 0x00, 0x05, 0x06, 0x07, 0x04, +db 0x09, 0x0A, 0x0B, 0x08, 0x0D, 0x0E, 0x0F, 0x0C +db 0x01, 0x02, 0x03, 0x00, 0x05, 0x06, 0x07, 0x04, +db 0x09, 0x0A, 0x0B, 0x08, 0x0D, 0x0E, 0x0F, 0x0C + +align 16 +broadcast_word: +db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 +db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 + +align 16 +all_threes: +dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 + +align 16 +all_fffcs: +dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc + +align 16 +all_1fs: +dw 0x001f, 0x001f, 0x001f, 0x001f, 0x001f, 0x001f, 0x001f, 0x001f + +align 16 +all_20s: +dw 0x0020, 0x0020, 0x0020, 0x0020, 0x0020, 0x0020, 0x0020, 0x0020 + +mksection .text +align 64 + +%define OFS_R1 (16*(2*16)) +%define OFS_R2 (OFS_R1 + (2*16)) +%define OFS_X0 (OFS_R2 + (2*16)) +%define OFS_X1 (OFS_X0 + (2*16)) +%define OFS_X2 (OFS_X1 + (2*16)) + +%ifidn __OUTPUT_FORMAT__, win64 + %define XMM_STORAGE 16*10 + %define GP_STORAGE 8*8 +%else + %define XMM_STORAGE 0 + %define GP_STORAGE 6*8 +%endif + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 + +%ifidn __OUTPUT_FORMAT__, win64 + ; xmm6:xmm15 need to be maintained for Windows + vmovdqa [rsp + 0*16], xmm6 + vmovdqa [rsp + 1*16], xmm7 + vmovdqa [rsp + 2*16], xmm8 + vmovdqa [rsp + 3*16], xmm9 + vmovdqa [rsp + 4*16], xmm10 + vmovdqa [rsp + 5*16], xmm11 + vmovdqa [rsp + 6*16], xmm12 + vmovdqa [rsp + 7*16], xmm13 + vmovdqa [rsp + 8*16], xmm14 + vmovdqa [rsp + 9*16], xmm15 + mov [rsp + GP_OFFSET + 48], rdi + mov [rsp + GP_OFFSET + 56], rsi +%endif + mov [rsp + GP_OFFSET], r12 + mov [rsp + GP_OFFSET + 8], r13 + mov [rsp + GP_OFFSET + 16], r14 + mov [rsp + GP_OFFSET + 24], r15 + mov [rsp + GP_OFFSET + 32], rbx + mov [rsp + GP_OFFSET + 40], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + +%ifidn __OUTPUT_FORMAT__, win64 + vmovdqa xmm6, [rsp + 0*16] + vmovdqa xmm7, [rsp + 1*16] + vmovdqa xmm8, [rsp + 2*16] + vmovdqa xmm9, [rsp + 3*16] + vmovdqa xmm10, [rsp + 4*16] + vmovdqa xmm11, [rsp + 5*16] + vmovdqa xmm12, [rsp + 6*16] + vmovdqa xmm13, [rsp + 7*16] + vmovdqa xmm14, [rsp + 8*16] + vmovdqa xmm15, [rsp + 9*16] + mov rdi, [rsp + GP_OFFSET + 48] + mov rsi, [rsp + GP_OFFSET + 56] +%endif + mov r12, [rsp + GP_OFFSET] + mov r13, [rsp + GP_OFFSET + 8] + mov r14, [rsp + GP_OFFSET + 16] + mov r15, [rsp + GP_OFFSET + 24] + mov rbx, [rsp + GP_OFFSET + 32] + mov rsp, [rsp + GP_OFFSET + 40] +%endmacro + +; +; Transpose 4 YMM registers, double word granularity +; +%macro TRANSPOSE4_U32 8 +%define %%R0 %1 ; [in/out] Input / Output row 0 +%define %%R1 %2 ; [in/out] Input / Output row 1 +%define %%R2 %3 ; [in/out] Input / Output row 2 +%define %%R3 %4 ; [in/out] Input / Output row 3 +%define %%T0 %5 ; [clobbered] Temporary YMM register +%define %%T1 %6 ; [clobbered] Temporary YMM register +%define %%T2 %7 ; [clobbered] Temporary YMM register +%define %%T3 %8 ; [clobbered] Temporary YMM register + + vshufps %%T0, %%R0, %%R1, 0x44 ; T0 = {b5 b4 a5 a4 b1 b0 a1 a0} + vshufps %%R0, %%R0, %%R1, 0xEE ; R0 = {b7 b6 a7 a6 b3 b2 a3 a2} + vshufps %%T1, %%R2, %%R3, 0x44 ; T1 = {d5 d4 c5 c4 d1 d0 c1 c0} + vshufps %%R2, %%R2, %%R3, 0xEE ; R2 = {d7 d6 c7 c6 d3 d2 c3 c2} + + vshufps %%T3, %%T0, %%T1, 0xDD ; T3 = {d5 c5 b5 a5 d1 c1 b1 a1} + vshufps %%T2, %%R0, %%R2, 0x88 ; T2 = {d6 c6 b6 a6 d2 c2 b2 a2} + vshufps %%R0, %%R0, %%R2, 0xDD ; R0 = {d7 c7 b7 a7 d3 c3 b3 a3} + vshufps %%T0, %%T0, %%T1, 0x88 ; T0 = {d4 c4 b4 a4 d0 c0 b0 a0} + + vperm2i128 %%R2, %%T0, %%T3, 0x31 ; {d5 c5 b5 a5 d4 c4 b4 a4} + vperm2i128 %%R1, %%T2, %%R0, 0x20 ; {d3 c3 b3 a3 d2 c2 b2 a2} + vperm2i128 %%R3, %%T2, %%R0, 0x31 ; {d7 c7 b7 a7 d6 c6 b6 a6} + vperm2i128 %%R0, %%T0, %%T3, 0x20 ; {d1 c1 b1 a1 d0 c0 b0 a0} +%endmacro + +; This macro reorder the LFSR registers +; after N rounds (1 <= N <= 15), since the registers +; are shifted every round +; +; The macro clobbers YMM0-15 +; +%macro REORDER_LFSR 2 +%define %%STATE %1 ; [in] Pointer to LFSR state +%define %%NUM_ROUNDS %2 ; [immediate] Number of key generation rounds + +%if %%NUM_ROUNDS != 16 +%assign i 0 +%rep 16 + vmovdqa APPEND(ymm,i), [%%STATE + 32*i] +%assign i (i+1) +%endrep + +%assign i 0 +%assign j %%NUM_ROUNDS +%rep 16 + vmovdqa [%%STATE + 32*i], APPEND(ymm,j) +%assign i (i+1) +%assign j ((j+1) % 16) +%endrep +%endif ;; %%NUM_ROUNDS != 16 + +%endmacro + +; +; Calculates X0-X3 from LFSR registers +; +%macro BITS_REORG8 12-13 +%define %%STATE %1 ; [in] ZUC state +%define %%ROUND_NUM %2 ; [in] Round number +%define %%LFSR_0 %3 ; [clobbered] LFSR_0 +%define %%LFSR_2 %4 ; [clobbered] LFSR_2 +%define %%LFSR_5 %5 ; [clobbered] LFSR_5 +%define %%LFSR_7 %6 ; [clobbered] LFSR_7 +%define %%LFSR_9 %7 ; [clobbered] LFSR_9 +%define %%LFSR_11 %8 ; [clobbered] LFSR_11 +%define %%LFSR_14 %9 ; [clobbered] LFSR_14 +%define %%LFSR_15 %10 ; [clobbered] LFSR_15 +%define %%YTMP1 %11 ; [clobbered] Temporary YMM register +%define %%YTMP2 %12 ; [clobbered] Temporary YMM register +%define %%X3 %13 ; [out] YMM register containing X3 of all lanes (only for work mode) + vmovdqa %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_14, [%%STATE + ((14 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_11, [%%STATE + ((11 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_9, [%%STATE + (( 9 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_7, [%%STATE + (( 7 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_5, [%%STATE + (( 5 + %%ROUND_NUM) % 16)*32] +%if (%0 == 13) ;Only needed when generating X3 (for "working" mode) + vmovdqa %%LFSR_2, [%%STATE + (( 2 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*32] +%endif + + vpxor %%YTMP1, %%YTMP1 + vpslld %%LFSR_15, 1 + vpblendw %%YTMP2, %%LFSR_14, %%YTMP1, 0xAA + vpblendw %%LFSR_15, %%LFSR_15, %%YTMP2, 0x55 + + vmovdqa [%%STATE + OFS_X0], %%LFSR_15 ; BRC_X0 + vpslld %%LFSR_11, 16 + vpsrld %%LFSR_9, 15 + vpor %%LFSR_11, %%LFSR_9 + vmovdqa [%%STATE + OFS_X1], %%LFSR_11 ; BRC_X1 + vpslld %%LFSR_7, 16 + vpsrld %%LFSR_5, 15 + vpor %%LFSR_7, %%LFSR_5 + vmovdqa [%%STATE + OFS_X2], %%LFSR_7 ; BRC_X2 +%if (%0 == 13) + vpslld %%LFSR_2, 16 + vpsrld %%LFSR_0, 15 + vpor %%X3, %%LFSR_2, %%LFSR_0 +%endif +%endmacro + +; +; Rotate dwords by N_BITS +; +%macro ROT_MOD32 4 +%define %%OUT %1 ; [out] YMM register +%define %%IN %2 ; [in] YMM register +%define %%YTMP %3 ; [clobbered] YMM register +%define %%N_BITS %4 ; [constant] Number of bits + +%if (%%N_BITS == 8) + vpshufb %%OUT, %%IN, [rel rot8_mod32] +%elif (%%N_BITS == 16) + vpshufb %%OUT, %%IN, [rel rot16_mod32] +%elif (%%N_BITS == 24) + vpshufb %%OUT, %%IN, [rel rot24_mod32] +%else + vpslld %%OUT, %%IN, %%N_BITS + vpsrld %%YTMP, %%IN, (32 - %%N_BITS) + vpor %%OUT, %%YTMP +%endif +%endmacro + +; +; Updates R1-R2, using X0-X3 and generates W (if needed) +; +%macro NONLIN_FUN8 8-9 +%define %%STATE %1 ; [in] ZUC state +%define %%YTMP1 %2 ; [clobbered] Temporary YMM register +%define %%YTMP2 %3 ; [clobbered] Temporary YMM register +%define %%YTMP3 %4 ; [clobbered] Temporary YMM register +%define %%YTMP4 %5 ; [clobbered] Temporary YMM register +%define %%YTMP5 %6 ; [clobbered] Temporary YMM register +%define %%YTMP6 %7 ; [clobbered] Temporary YMM register +%define %%YTMP7 %8 ; [clobbered] Temporary YMM register +%define %%W %9 ; [out] ZMM register to contain W for all lanes + +%if (%0 == 9) + vmovdqa %%W, [%%STATE + OFS_X0] + vpxor %%W, [%%STATE + OFS_R1] + vpaddd %%W, [%%STATE + OFS_R2] ; W = (BRC_X0 ^ F_R1) + F_R2 +%endif + + vmovdqa %%YTMP1, [%%STATE + OFS_R1] + vmovdqa %%YTMP2, [%%STATE + OFS_R2] + vpaddd %%YTMP1, [%%STATE + OFS_X1] ; W1 = F_R1 + BRC_X1 + vpxor %%YTMP2, [%%STATE + OFS_X2] ; W2 = F_R2 ^ BRC_X2 + + vpslld %%YTMP3, %%YTMP1, 16 + vpsrld %%YTMP4, %%YTMP1, 16 + vpslld %%YTMP5, %%YTMP2, 16 + vpsrld %%YTMP6, %%YTMP2, 16 + vpor %%YTMP1, %%YTMP3, %%YTMP6 + vpor %%YTMP2, %%YTMP4, %%YTMP5 + + ROT_MOD32 %%YTMP3, %%YTMP1, %%YTMP7, 2 + ROT_MOD32 %%YTMP4, %%YTMP1, %%YTMP7, 10 + ROT_MOD32 %%YTMP5, %%YTMP1, %%YTMP7, 18 + ROT_MOD32 %%YTMP6, %%YTMP1, %%YTMP7, 24 + vpxor %%YTMP1, %%YTMP3 + vpxor %%YTMP1, %%YTMP4 + vpxor %%YTMP1, %%YTMP5 + vpxor %%YTMP1, %%YTMP6 ; XMM1 = U = L1(P) + + ROT_MOD32 %%YTMP3, %%YTMP2, %%YTMP7, 8 + ROT_MOD32 %%YTMP4, %%YTMP2, %%YTMP7, 14 + ROT_MOD32 %%YTMP5, %%YTMP2, %%YTMP7, 22 + ROT_MOD32 %%YTMP6, %%YTMP2, %%YTMP7, 30 + vpxor %%YTMP2, %%YTMP3 + vpxor %%YTMP2, %%YTMP4 + vpxor %%YTMP2, %%YTMP5 + vpxor %%YTMP2, %%YTMP6 ; XMM2 = V = L2(Q) + + ; Shuffle U and V to have all S0 lookups in %%YTMP1 and all S1 lookups in %%YTMP2 + + ; Compress all S0 and S1 input values in each register + vpshufb %%YTMP1, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 + vpshufb %%YTMP2, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 + + vshufpd %%YTMP3, %%YTMP1, %%YTMP2, 0x00 ; All S0 input values + vshufpd %%YTMP4, %%YTMP2, %%YTMP1, 0xFF ; All S1 input values + + ; Compute S0 and S1 values + S0_comput_AVX2 %%YTMP3, %%YTMP1, %%YTMP2 + S1_comput_AVX2 %%YTMP4, %%YTMP1, %%YTMP2, %%YTMP5 + + ; Need to shuffle back %%YTMP1 & %%YTMP2 before storing output + ; (revert what was done before S0 and S1 computations) + vshufpd %%YTMP1, %%YTMP3, %%YTMP4, 0xAA + vshufpd %%YTMP2, %%YTMP3, %%YTMP4, 0x55 + + vpshufb %%YTMP1, [rel rev_S0_S1_shuf] + vpshufb %%YTMP2, [rel rev_S0_S1_shuf] + + vmovdqa [%%STATE + OFS_R1], %%YTMP1 + vmovdqa [%%STATE + OFS_R2], %%YTMP2 +%endmacro + +; +; Stores 32 bytes of keystream for 8 lanes +; +%macro STORE32B_KSTR8 13 +%define %%DATA32B_L0 %1 ; [in] 32 bytes of keystream for lane 0 +%define %%DATA32B_L1 %2 ; [in] 32 bytes of keystream for lane 1 +%define %%DATA32B_L2 %3 ; [in] 32 bytes of keystream for lane 2 +%define %%DATA32B_L3 %4 ; [in] 32 bytes of keystream for lane 3 +%define %%DATA32B_L4 %5 ; [in] 32 bytes of keystream for lane 4 +%define %%DATA32B_L5 %6 ; [in] 32 bytes of keystream for lane 5 +%define %%DATA32B_L6 %7 ; [in] 32 bytes of keystream for lane 6 +%define %%DATA32B_L7 %8 ; [in] 32 bytes of keystream for lane 7 +%define %%OUT_PTRS %9 ; [in] Keystream pointers for all 8 lanes +%define %%TMP1 %10 ; [clobbered] Temporary GP register +%define %%TMP2 %11 ; [clobbered] Temporary GP register +%define %%TMP3 %12 ; [clobbered] Temporary GP register +%define %%TMP4 %13 ; [clobbered] Temporary GP register + + mov %%TMP1, [%%OUT_PTRS] + mov %%TMP2, [%%OUT_PTRS + 8] + mov %%TMP3, [%%OUT_PTRS + 16] + mov %%TMP4, [%%OUT_PTRS + 24] + vmovdqu [%%TMP1], %%DATA32B_L0 + vmovdqu [%%TMP2], %%DATA32B_L1 + vmovdqu [%%TMP3], %%DATA32B_L2 + vmovdqu [%%TMP4], %%DATA32B_L3 + + mov %%TMP1, [%%OUT_PTRS + 32] + mov %%TMP2, [%%OUT_PTRS + 40] + mov %%TMP3, [%%OUT_PTRS + 48] + mov %%TMP4, [%%OUT_PTRS + 56] + vmovdqu [%%TMP1], %%DATA32B_L4 + vmovdqu [%%TMP2], %%DATA32B_L5 + vmovdqu [%%TMP3], %%DATA32B_L6 + vmovdqu [%%TMP4], %%DATA32B_L7 + +%endmacro + +; +; Stores 4 bytes of keystream for 8 lanes +; +%macro STORE4B_KSTR8 6 +%define %%DATA4B_L07 %1 ; [in] 4 bytes of keystream for lanes 0-7 +%define %%OUT_PTRS %2 ; [in] Keystream pointers for all 8 lanes +%define %%TMP1 %3 ; [clobbered] Temporary GP register +%define %%TMP2 %4 ; [clobbered] Temporary GP register +%define %%TMP3 %5 ; [clobbered] Temporary GP register +%define %%TMP4 %6 ; [clobbered] Temporary GP register + + mov %%TMP1, [%%OUT_PTRS] + mov %%TMP2, [%%OUT_PTRS + 8] + mov %%TMP3, [%%OUT_PTRS + 16] + mov %%TMP4, [%%OUT_PTRS + 24] + vpextrd [%%TMP4], XWORD(%%DATA4B_L07), 3 + vpextrd [%%TMP3], XWORD(%%DATA4B_L07), 2 + vpextrd [%%TMP2], XWORD(%%DATA4B_L07), 1 + vmovd [%%TMP1], XWORD(%%DATA4B_L07) + mov DWORD(%%TMP1), 4 + add [%%OUT_PTRS], %%TMP1 + add [%%OUT_PTRS + 8], %%TMP1 + add [%%OUT_PTRS + 16], %%TMP1 + add [%%OUT_PTRS + 24], %%TMP1 + + vextracti128 XWORD(%1), %1, 1 + mov %%TMP1, [%%OUT_PTRS + 32] + mov %%TMP2, [%%OUT_PTRS + 40] + mov %%TMP3, [%%OUT_PTRS + 48] + mov %%TMP4, [%%OUT_PTRS + 56] + vpextrd [%%TMP4], XWORD(%%DATA4B_L07), 3 + vpextrd [%%TMP3], XWORD(%%DATA4B_L07), 2 + vpextrd [%%TMP2], XWORD(%%DATA4B_L07), 1 + vmovd [%%TMP1], XWORD(%%DATA4B_L07) + mov DWORD(%%TMP1), 4 + add [%%OUT_PTRS + 32], %%TMP1 + add [%%OUT_PTRS + 40], %%TMP1 + add [%%OUT_PTRS + 48], %%TMP1 + add [%%OUT_PTRS + 56], %%TMP1 + +%endmacro + +; +; Add two 32-bit args and reduce mod (2^31-1) +; +%macro ADD_MOD31 4 +%define %%IN_OUT %1 ; [in/out] YMM register with first input and output +%define %%IN2 %2 ; [in] YMM register with second input +%define %%YTMP %3 ; [clobbered] Temporary YMM register +%define %%MASK31 %4 ; [in] YMM register containing 0x7FFFFFFF's in all dwords + vpaddd %%IN_OUT, %%IN2 + vpsrld %%YTMP, %%IN_OUT, 31 + vpand %%IN_OUT, %%MASK31 + vpaddd %%IN_OUT, %%YTMP +%endmacro + +; +; Rotate (mult by pow of 2) 32-bit arg and reduce mod (2^31-1) +; +%macro ROT_MOD31 4 +%define %%IN_OUT %1 ; [in/out] YMM register with input and output +%define %%YTMP %2 ; [clobbered] Temporary YMM register +%define %%MASK31 %3 ; [in] YMM register containing 0x7FFFFFFF's in all dwords +%define %%N_BITS %4 ; [immediate] Number of bits to rotate for each dword + + vpslld %%YTMP, %%IN_OUT, %%N_BITS + vpsrld %%IN_OUT, (31 - %%N_BITS) + + vpor %%IN_OUT, %%YTMP + vpand %%IN_OUT, %%MASK31 +%endmacro + +; +; Update LFSR registers, calculating S_16 +; +; S_16 = [ 2^15*S_15 + 2^17*S_13 + 2^21*S_10 + 2^20*S_4 + (1 + 2^8)*S_0 ] mod (2^31 - 1) +; If init mode, add W to the calculation above. +; S_16 -> S_15 for next round +; +%macro LFSR_UPDT8 11 +%define %%STATE %1 ; [in] ZUC state +%define %%ROUND_NUM %2 ; [in] Round number +%define %%LFSR_0 %3 ; [clobbered] LFSR_0 (YMM) +%define %%LFSR_4 %4 ; [clobbered] LFSR_4 (YMM) +%define %%LFSR_10 %5 ; [clobbered] LFSR_10 (YMM) +%define %%LFSR_13 %6 ; [clobbered] LFSR_13 (YMM) +%define %%LFSR_15 %7 ; [clobbered] LFSR_15 (YMM) +%define %%YTMP %8 ; [clobbered] Temporary YMM register +%define %%MASK_31 %9 ; [in] Mask_31 +%define %%W %10 ; [in/clobbered] In init mode, contains W for all 4 lanes +%define %%MODE %11 ; [constant] "init" / "work" mode + + vmovdqa %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_4, [%%STATE + (( 4 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_10, [%%STATE + ((10 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_13, [%%STATE + ((13 + %%ROUND_NUM) % 16)*32] + vmovdqa %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*32] + + ; Calculate LFSR feedback (S_16) + + ; In Init mode, W is added to the S_16 calculation +%ifidn %%MODE, init + ADD_MOD31 %%W, %%LFSR_0, %%YTMP, %%MASK_31 +%else + vmovdqa %%W, %%LFSR_0 +%endif + ROT_MOD31 %%LFSR_0, %%YTMP, %%MASK_31, 8 + ADD_MOD31 %%W, %%LFSR_0, %%YTMP, %%MASK_31 + ROT_MOD31 %%LFSR_4, %%YTMP, %%MASK_31, 20 + ADD_MOD31 %%W, %%LFSR_4, %%YTMP, %%MASK_31 + ROT_MOD31 %%LFSR_10, %%YTMP, %%MASK_31, 21 + ADD_MOD31 %%W, %%LFSR_10, %%YTMP, %%MASK_31 + ROT_MOD31 %%LFSR_13, %%YTMP, %%MASK_31, 17 + ADD_MOD31 %%W, %%LFSR_13, %%YTMP, %%MASK_31 + ROT_MOD31 %%LFSR_15, %%YTMP, %%MASK_31, 15 + ADD_MOD31 %%W, %%LFSR_15, %%YTMP, %%MASK_31 + + ; Store LFSR_S16 + vmovdqa [%%STATE + (( 0 + %%ROUND_NUM) % 16)*32], %%W +%endmacro + +; +; Initialize LFSR registers for a single lane, for ZUC-128 +; +; This macro initializes 8 LFSR registers at time. +; so it needs to be called twice. +; +; From spec, s_i (LFSR) registers need to be loaded as follows: +; +; For 0 <= i <= 15, let s_i= k_i || d_i || iv_i. +; Where k_i is each byte of the key, d_i is a 15-bit constant +; and iv_i is each byte of the IV. +; +%macro INIT_LFSR_128 7 +%define %%KEY %1 ;; [in] Key pointer +%define %%IV %2 ;; [in] IV pointer +%define %%SHUF_KEY %3 ;; [in] Shuffle key mask +%define %%SHUF_IV %4 ;; [in] Shuffle key mask +%define %%EKD_MASK %5 ;; [in] Shuffle key mask +%define %%LFSR %6 ;; [out] YMM register to contain initialized LFSR regs +%define %%YTMP %7 ;; [clobbered] YMM temporary register + + vbroadcastf128 %%LFSR, [%%KEY] + vbroadcastf128 %%YTMP, [%%IV] + vpshufb %%LFSR, %%SHUF_KEY + vpsrld %%LFSR, 1 + vpshufb %%YTMP, %%SHUF_IV + vpor %%LFSR, %%YTMP + vpor %%LFSR, %%EKD_MASK + +%endmacro + +; +; Initialize LFSR registers for a single lane, for ZUC-256 +; +%macro INIT_LFSR_256 8 +%define %%KEY %1 ;; [in] Key pointer +%define %%IV %2 ;; [in] IV pointer +%define %%LFSR0_7 %3 ;; [out] YMM register to contain initialized LFSR regs 0-7 +%define %%LFSR8_15 %4 ;; [out] YMM register to contain initialized LFSR regs 8-15 +%define %%XTMP %5 ;; [clobbered] XMM temporary register +%define %%XTMP2 %6 ;; [clobbered] XMM temporary register +%define %%TMP %7 ;; [clobbered] GP temporary register +%define %%TAG_SIZE %8 ;; [in] Tag size (0, 4, 8 or 16 bytes) + +%if %%TAG_SIZE == 0 +%define %%CONSTANTS rel EK256_d64 +%elif %%TAG_SIZE == 4 +%define %%CONSTANTS rel EK256_EIA3_4 +%elif %%TAG_SIZE == 8 +%define %%CONSTANTS rel EK256_EIA3_8 +%elif %%TAG_SIZE == 16 +%define %%CONSTANTS rel EK256_EIA3_16 +%endif + + ; s0 - s7 + vpxor %%LFSR0_7, %%LFSR0_7 + vpinsrb XWORD(%%LFSR0_7), [%%KEY], 3 ; s0 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 1], 7 ; s1 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 2], 11 ; s2 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 3], 15 ; s3 + + vpsrld XWORD(%%LFSR0_7), 1 + + vpor XWORD(%%LFSR0_7), [%%CONSTANTS] ; s0 - s3 + + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 21], 1 ; s0 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 16], 0 ; s0 + + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 22], 5 ; s1 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 17], 4 ; s1 + + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 23], 9 ; s2 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 18], 8 ; s2 + + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 24], 13 ; s3 + vpinsrb XWORD(%%LFSR0_7), [%%KEY + 19], 12 ; s3 + + vpxor %%XTMP, %%XTMP + vpinsrb %%XTMP, [%%KEY + 4], 3 ; s4 + vpinsrb %%XTMP, [%%IV], 7 ; s5 + vpinsrb %%XTMP, [%%IV + 1], 11 ; s6 + vpinsrb %%XTMP, [%%IV + 10], 15 ; s7 + + vpsrld %%XTMP, 1 + + vpinsrb %%XTMP, [%%KEY + 25], 1 ; s4 + vpinsrb %%XTMP, [%%KEY + 20], 0 ; s4 + + vpinsrb %%XTMP, [%%KEY + 5], 5 ; s5 + vpinsrb %%XTMP, [%%KEY + 26], 4 ; s5 + + vpinsrb %%XTMP, [%%KEY + 6], 9 ; s6 + vpinsrb %%XTMP, [%%KEY + 27], 8 ; s6 + + vpinsrb %%XTMP, [%%KEY + 7], 13 ; s7 + vpinsrb %%XTMP, [%%IV + 2], 12 ; s7 + + vpor %%XTMP, [%%CONSTANTS + 16] ; s4 - s7 + + vmovd %%XTMP2, [%%IV + 17] + vpshufb %%XTMP2, [rel shuf_mask_iv_17_19] + vpand %%XTMP2, [rel clear_iv_mask] + + vpor %%XTMP, %%XTMP2 + + vinserti128 %%LFSR0_7, %%XTMP, 1 + + ; s8 - s15 + vpxor %%LFSR8_15, %%LFSR8_15 + vpinsrb XWORD(%%LFSR8_15), [%%KEY + 8], 3 ; s8 + vpinsrb XWORD(%%LFSR8_15), [%%KEY + 9], 7 ; s9 + vpinsrb XWORD(%%LFSR8_15), [%%IV + 5], 11 ; s10 + vpinsrb XWORD(%%LFSR8_15), [%%KEY + 11], 15 ; s11 + + vpsrld XWORD(%%LFSR8_15), 1 + + vpinsrb XWORD(%%LFSR8_15), [%%IV + 3], 1 ; s8 + vpinsrb XWORD(%%LFSR8_15), [%%IV + 11], 0 ; s8 + + vpinsrb XWORD(%%LFSR8_15), [%%IV + 12], 5 ; s9 + vpinsrb XWORD(%%LFSR8_15), [%%IV + 4], 4 ; s9 + + vpinsrb XWORD(%%LFSR8_15), [%%KEY + 10], 9 ; s10 + vpinsrb XWORD(%%LFSR8_15), [%%KEY + 28], 8 ; s10 + + vpinsrb XWORD(%%LFSR8_15), [%%IV + 6], 13 ; s11 + vpinsrb XWORD(%%LFSR8_15), [%%IV + 13], 12 ; s11 + + vpor XWORD(%%LFSR8_15), [%%CONSTANTS + 32] ; s8 - s11 + + vmovd %%XTMP, [%%IV + 20] + vpshufb %%XTMP, [rel shuf_mask_iv_20_23] + vpand %%XTMP, [rel clear_iv_mask] + + vpor XWORD(%%LFSR8_15), %%XTMP + + vpxor %%XTMP, %%XTMP + vpinsrb %%XTMP, [%%KEY + 12], 3 ; s12 + vpinsrb %%XTMP, [%%KEY + 13], 7 ; s13 + vpinsrb %%XTMP, [%%KEY + 14], 11 ; s14 + vpinsrb %%XTMP, [%%KEY + 15], 15 ; s15 + + vpsrld %%XTMP, 1 + + vpinsrb %%XTMP, [%%IV + 7], 1 ; s12 + vpinsrb %%XTMP, [%%IV + 14], 0 ; s12 + + vpinsrb %%XTMP, [%%IV + 15], 5 ; s13 + vpinsrb %%XTMP, [%%IV + 8], 4 ; s13 + + vpinsrb %%XTMP, [%%IV + 16], 9 ; s14 + vpinsrb %%XTMP, [%%IV + 9], 8 ; s14 + + vpinsrb %%XTMP, [%%KEY + 30], 13 ; s15 + vpinsrb %%XTMP, [%%KEY + 29], 12 ; s15 + + vpor %%XTMP, [%%CONSTANTS + 48] ; s12 - s15 + + movzx DWORD(%%TMP), byte [%%IV + 24] + and DWORD(%%TMP), 0x0000003f + shl DWORD(%%TMP), 16 + vmovd %%XTMP2, DWORD(%%TMP) + + movzx DWORD(%%TMP), byte [%%KEY + 31] + shl DWORD(%%TMP), 12 + and DWORD(%%TMP), 0x000f0000 ; high nibble of K_31 + vpinsrd %%XTMP2, DWORD(%%TMP), 2 + + movzx DWORD(%%TMP), byte [%%KEY + 31] + shl DWORD(%%TMP), 16 + and DWORD(%%TMP), 0x000f0000 ; low nibble of K_31 + vpinsrd %%XTMP2, DWORD(%%TMP), 3 + + vpor %%XTMP, %%XTMP2 + vinserti128 %%LFSR8_15, %%XTMP, 1 +%endmacro + +%macro ZUC_INIT_8 2-3 +%define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [in] Tag size (0 (for cipher), 4, 8 or 16) +%define %%TAGS %3 ; [in] Array of temporary tags + +%define pKe arg1 +%define pIv arg2 +%define pState arg3 + +%define %%YTMP1 ymm0 +%define %%YTMP2 ymm1 +%define %%YTMP3 ymm2 +%define %%YTMP4 ymm3 +%define %%YTMP5 ymm4 +%define %%YTMP6 ymm5 +%define %%YTMP7 ymm6 +%define %%YTMP8 ymm7 +%define %%YTMP9 ymm8 +%define %%YTMP10 ymm9 +%define %%YTMP11 ymm10 +%define %%YTMP12 ymm11 +%define %%YTMP13 ymm12 +%define %%YTMP14 ymm13 +%define %%YTMP15 ymm14 +%define %%YTMP16 ymm15 + +%define %%W %%YTMP10 +%define %%X3 %%YTMP11 +%define %%KSTR1 %%YTMP12 +%define %%KSTR2 %%YTMP13 +%define %%KSTR3 %%YTMP14 +%define %%KSTR4 %%YTMP15 +%define %%MASK_31 %%YTMP16 + + FUNC_SAVE + + ; Zero out R1/R2 + vpxor %%YTMP1, %%YTMP1 + vmovdqa [pState + OFS_R1], %%YTMP1 + vmovdqa [pState + OFS_R2], %%YTMP1 + + ;;; Initialize all LFSR registers in two steps: + ;;; first, registers 0-7, then registers 8-15 + +%if %%KEY_SIZE == 128 +%assign %%OFF 0 +%rep 2 + ; Set read-only registers for shuffle masks for key, IV and Ek_d for 8 registers + vmovdqa %%YTMP13, [rel shuf_mask_key + %%OFF] + vmovdqa %%YTMP14, [rel shuf_mask_iv + %%OFF] + vmovdqa %%YTMP15, [rel Ek_d + %%OFF] + + ; Set 8xLFSR registers for all packets +%assign %%I 1 +%assign %%OFF_PTR 0 +%rep 8 + mov r9, [pKe + %%OFF_PTR] ; Load Key N pointer + lea r10, [pIv + 4*%%OFF_PTR] ; Load IV N pointer + INIT_LFSR_128 r9, r10, %%YTMP13, %%YTMP14, %%YTMP15, APPEND(%%YTMP, %%I), %%YTMP12 +%assign %%I (%%I + 1) +%assign %%OFF_PTR (%%OFF_PTR + 8) +%endrep + + ; Store 8xLFSR registers in memory (reordering first, + ; so all SX registers are together) + TRANSPOSE8_U32 %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10 + +%assign %%I 1 +%rep 8 + vmovdqa [pState + 8*%%OFF + 32*(%%I-1)], APPEND(%%YTMP, %%I) +%assign %%I (%%I+1) +%endrep + +%assign %%OFF (%%OFF + 32) +%endrep +%else ;; %%KEY_SIZE == 256 + + ;;; Initialize all LFSR registers +%assign %%OFF 0 +%rep 8 + ;; Load key and IV for each packet + mov r15, [pKe + %%OFF] + lea r10, [pIv + 4*%%OFF] ; Load IV N pointer + + ; Initialize S0-15 for each packet + INIT_LFSR_256 r15, r10, %%YTMP1, %%YTMP2, XWORD(%%YTMP3), XWORD(%%YTMP4), r11, %%TAG_SIZE + + vmovdqa [pState + 4*%%OFF], %%YTMP1 + vmovdqa [pState + 256 + 4*%%OFF], %%YTMP2 + +%assign %%OFF (%%OFF + 8) +%endrep + + ; Read, transpose and store, so all S_X from the 8 packets are in the same register +%assign %%OFF 0 +%rep 2 + +%assign %%I 1 +%rep 8 + vmovdqa APPEND(%%YTMP, %%I), [pState + 32*(%%I-1) + %%OFF] +%assign %%I (%%I+1) +%endrep + + TRANSPOSE8_U32 %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10 + +%assign %%I 1 +%rep 8 + vmovdqa [pState + 32*(%%I-1) + %%OFF], APPEND(%%YTMP, %%I) +%assign %%I (%%I+1) +%endrep + +%assign %%OFF (%%OFF + 256) +%endrep +%endif ;; %%KEY_SIZE == 256 + + ; Load read-only registers + vmovdqa %%MASK_31, [rel mask31] + + ; Shift LFSR 32-times, update state variables +%assign %%N 0 +%rep 32 + BITS_REORG8 pState, %%N, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, \ + %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10 + NONLIN_FUN8 pState, %%YTMP1, %%YTMP2, %%YTMP3, \ + %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%W + vpsrld %%W, 1 ; Shift out LSB of W + LFSR_UPDT8 pState, %%N, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, \ + %%MASK_31, %%W, init ; W used in LFSR update +%assign %%N %%N+1 +%endrep + + ; And once more, initial round from keygen phase = 33 times + BITS_REORG8 pState, 0, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, \ + %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10 + NONLIN_FUN8 pState, %%YTMP1, %%YTMP2, %%YTMP3, \ + %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%W + LFSR_UPDT8 pState, 0, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, \ + %%MASK_31, %%YTMP8, work + + ; Generate extra 4, 8 or 16 bytes of KS for initial tags +%if %%TAG_SIZE == 4 +%define %%NUM_ROUNDS 1 +%elif %%TAG_SIZE == 8 +%define %%NUM_ROUNDS 2 +%elif %%TAG_SIZE == 16 +%define %%NUM_ROUNDS 4 +%else +%define %%NUM_ROUNDS 0 +%endif + +%assign %%N 1 +%rep %%NUM_ROUNDS + BITS_REORG8 pState, %%N, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, \ + %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10, APPEND(%%KSTR,%%N) + NONLIN_FUN8 pState, %%YTMP1, %%YTMP2, %%YTMP3, \ + %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%W + ; OFS_X3 XOR W and store in stack + vpxor APPEND(%%KSTR, %%N), %%W + LFSR_UPDT8 pState, %%N, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, \ + %%MASK_31, %%YTMP8, work +%assign %%N %%N+1 +%endrep + +%if %%TAG_SIZE == 4 + vmovdqa [%%TAGS], %%KSTR1 + REORDER_LFSR pState, 1 +%elif %%TAG_SIZE == 8 + ; Transpose the keystream and store the 8 bytes per buffer consecutively, + ; being the initial tag for each buffer + vpunpckldq %%YTMP1, %%KSTR1, %%KSTR2 + vpunpckhdq %%YTMP2, %%KSTR1, %%KSTR2 + vperm2i128 %%KSTR1, %%YTMP1, %%YTMP2, 0x20 + vperm2i128 %%KSTR2, %%YTMP1, %%YTMP2, 0x31 + + vmovdqa [%%TAGS], %%KSTR1 + vmovdqa [%%TAGS + 32], %%KSTR2 + REORDER_LFSR pState, 2 +%elif %%TAG_SIZE == 16 + TRANSPOSE4_U32 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, \ + %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4 + + vmovdqa [%%TAGS], %%KSTR1 + vmovdqa [%%TAGS + 32], %%KSTR2 + vmovdqa [%%TAGS + 32*2], %%KSTR3 + vmovdqa [%%TAGS + 32*3], %%KSTR4 + REORDER_LFSR pState, 4 +%endif + FUNC_RESTORE +%endmacro + +MKGLOBAL(asm_ZucInitialization_8_avx2,function,internal) +asm_ZucInitialization_8_avx2: + endbranch64 + ZUC_INIT_8 128, 0 + + ret + +MKGLOBAL(asm_Zuc256Initialization_8_avx2,function,internal) +asm_Zuc256Initialization_8_avx2: +%define tags arg4 +%define tag_sz arg5 + + endbranch64 + + cmp tag_sz, 0 + je init_for_cipher + + cmp tag_sz, 8 + je init_for_auth_tag_8B + jb init_for_auth_tag_4B + + ; Fall-through for tag size = 16 bytes +init_for_auth_tag_16B: + ZUC_INIT_8 256, 16, tags + ret + +init_for_auth_tag_8B: + ZUC_INIT_8 256, 8, tags + ret + +init_for_auth_tag_4B: + ZUC_INIT_8 256, 4, tags + ret + +init_for_cipher: + ZUC_INIT_8 256, 0 + ret + +; +; Generate N*4 bytes of keystream +; for 8 buffers (where N is number of rounds) +; +%macro KEYGEN_8_AVX2 1 +%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds + +%define pState arg1 +%define pKS arg2 + +%define %%YTMP1 ymm0 +%define %%YTMP2 ymm1 +%define %%YTMP3 ymm2 +%define %%YTMP4 ymm3 +%define %%YTMP5 ymm4 +%define %%YTMP6 ymm5 +%define %%YTMP7 ymm6 +%define %%YTMP8 ymm7 +%define %%YTMP9 ymm8 +%define %%YTMP10 ymm9 +%define %%YTMP11 ymm10 +%define %%YTMP12 ymm11 +%define %%YTMP13 ymm12 +%define %%YTMP14 ymm13 +%define %%YTMP15 ymm14 +%define %%YTMP16 ymm15 + +%define %%W %%YTMP10 +%define %%X3 %%YTMP11 +%define %%MASK_31 %%YTMP16 + + FUNC_SAVE + + ; Store 8 keystream pointers on the stack + ; and reserve memory for storing keystreams for all 8 buffers + mov r10, rsp + sub rsp, (8*8 + %%NUM_ROUNDS * 32) + and rsp, -32 + + vmovdqa ymm0, [pKS] + vmovdqa [rsp], ymm0 + vmovdqa ymm0, [pKS + 32] + vmovdqa [rsp + 32], ymm0 + + ; Load read-only registers + vmovdqa %%MASK_31, [rel mask31] + + ; Generate N*4B of keystream in N rounds +%assign %%N 1 +%rep %%NUM_ROUNDS + BITS_REORG8 pState, %%N, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, \ + %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10, %%X3 + NONLIN_FUN8 pState, %%YTMP1, %%YTMP2, %%YTMP3, \ + %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%W + ; OFS_X3 XOR W and store in stack + vpxor %%X3, %%W + vmovdqa [rsp + 8*8 + (%%N-1)*32], %%X3 + LFSR_UPDT8 pState, %%N, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, \ + %%MASK_31, %%YTMP8, work +%assign %%N %%N+1 +%endrep + +%if (%%NUM_ROUNDS == 8) + ;; Load all OFS_X3 + vmovdqa XWORD(%%YTMP1), [rsp + 8*8] + vmovdqa XWORD(%%YTMP2), [rsp + 8*8 + 32*1] + vmovdqa XWORD(%%YTMP3), [rsp + 8*8 + 32*2] + vmovdqa XWORD(%%YTMP4), [rsp + 8*8 + 32*3] + vmovdqa XWORD(%%YTMP5), [rsp + 8*8 + 16] + vmovdqa XWORD(%%YTMP6), [rsp + 8*8 + 32*1 + 16] + vmovdqa XWORD(%%YTMP7), [rsp + 8*8 + 32*2 + 16] + vmovdqa XWORD(%%YTMP8), [rsp + 8*8 + 32*3 + 16] + + vinserti128 %%YTMP1, %%YTMP1, [rsp + 8*8 + 32*4], 0x01 + vinserti128 %%YTMP2, %%YTMP2, [rsp + 8*8 + 32*5], 0x01 + vinserti128 %%YTMP3, %%YTMP3, [rsp + 8*8 + 32*6], 0x01 + vinserti128 %%YTMP4, %%YTMP4, [rsp + 8*8 + 32*7], 0x01 + vinserti128 %%YTMP5, %%YTMP5, [rsp + 8*8 + 32*4 + 16], 0x01 + vinserti128 %%YTMP6, %%YTMP6, [rsp + 8*8 + 32*5 + 16], 0x01 + vinserti128 %%YTMP7, %%YTMP7, [rsp + 8*8 + 32*6 + 16], 0x01 + vinserti128 %%YTMP8, %%YTMP8, [rsp + 8*8 + 32*7 + 16], 0x01 + + TRANSPOSE8_U32_PRELOADED %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10 + + STORE32B_KSTR8 %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%YTMP8, rsp, r12, r13, r14, r15 + +%else ;; NUM_ROUNDS == 8 +%assign %%I 1 +%rep %%NUM_ROUNDS + vmovdqa APPEND(%%YTMP, %%I), [rsp + 8*8 + (%%I-1)*32] + STORE4B_KSTR8 APPEND(%%YTMP, %%I), rsp, r12, r13, r14, r15 +%assign %%I (%%I + 1) +%endrep +%endif ;; NUM_ROUNDS == 8 + + ;; Reorder LFSR registers, as not all 16 rounds have been completed + REORDER_LFSR pState, %%NUM_ROUNDS + + ;; Clear stack frame containing keystream information +%ifdef SAFE_DATA + vpxor %%YTMP1, %%YTMP1 +%assign %%I 0 +%rep (2+%%NUM_ROUNDS) + vmovdqa [rsp + %%I*32], %%YTMP1 +%assign %%I (%%I+1) +%endrep +%endif + + ;; Restore rsp pointer + mov rsp, r10 + + FUNC_RESTORE + +%endmacro + +;; +;; void asm_ZucGenKeystream32B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream32B_8_avx2,function,internal) +asm_ZucGenKeystream32B_8_avx2: + endbranch64 + KEYGEN_8_AVX2 8 + vzeroupper + ret + +;; +;; void asm_ZucGenKeystream16B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream16B_8_avx2,function,internal) +asm_ZucGenKeystream16B_8_avx2: + endbranch64 + KEYGEN_8_AVX2 4 + vzeroupper + ret + +;; +;; void asm_ZucGenKeystream8B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream8B_8_avx2,function,internal) +asm_ZucGenKeystream8B_8_avx2: + endbranch64 + KEYGEN_8_AVX2 2 + vzeroupper + ret + +;; +;; void asm_ZucGenKeystream4B_8_avx2(state8_t *pSta, u32* pKeyStr[8]) +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream4B_8_avx2,function,internal) +asm_ZucGenKeystream4B_8_avx2: + endbranch64 + KEYGEN_8_AVX2 1 + vzeroupper + ret + +;; +;; Encrypt N*4B bytes on all 8 buffers +;; where N is number of rounds (up to 8) +;; In final call, an array of final bytes is read +;; from memory and only these final bytes are of +;; plaintext are read and XOR'ed. +%macro CIPHERNx4B_8 4 +%define %%NROUNDS %1 +%define %%INITIAL_ROUND %2 +%define %%OFFSET %3 +%define %%LAST_CALL %4 + +%ifdef LINUX +%define %%TMP1 r8 +%define %%TMP2 r9 +%else +%define %%TMP1 rdi +%define %%TMP2 rsi +%endif + +%define %%YTMP1 ymm0 +%define %%YTMP2 ymm1 +%define %%YTMP3 ymm2 +%define %%YTMP4 ymm3 +%define %%YTMP5 ymm4 +%define %%YTMP6 ymm5 +%define %%YTMP7 ymm6 +%define %%YTMP8 ymm7 +%define %%YTMP9 ymm8 +%define %%YTMP10 ymm9 +%define %%YTMP11 ymm10 +%define %%YTMP12 ymm11 +%define %%YTMP13 ymm12 +%define %%YTMP14 ymm13 +%define %%YTMP15 ymm14 +%define %%YTMP16 ymm15 + +%define %%W %%YTMP10 +%define %%X3 %%YTMP11 +%define %%MASK_31 %%YTMP16 + + ; Load read-only registers + vmovdqa %%MASK_31, [rel mask31] + + ; Generate N*4B of keystream in N rounds +%assign %%N 1 +%assign %%round (%%INITIAL_ROUND + %%N) +%rep %%NROUNDS + BITS_REORG8 pState, %%round, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, \ + %%YTMP6, %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10, %%X3 + NONLIN_FUN8 pState, %%YTMP1, %%YTMP2, %%YTMP3, \ + %%YTMP4, %%YTMP5, %%YTMP6, %%YTMP7, %%W + ; OFS_X3 XOR W and store in stack + vpxor %%X3, %%W + vmovdqa [rsp + (%%N-1)*32], %%X3 + LFSR_UPDT8 pState, %%round, %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, \ + %%MASK_31, %%YTMP8, work +%assign %%N (%%N + 1) +%assign %%round (%%round + 1) +%endrep + +%assign %%N 1 +%rep %%NROUNDS + vmovdqa APPEND(%%YTMP, %%N), [rsp + (%%N-1)*32] +%assign %%N (%%N + 1) +%endrep + + TRANSPOSE8_U32 %%YTMP1, %%YTMP2, %%YTMP3, %%YTMP4, %%YTMP5, %%YTMP6, \ + %%YTMP7, %%YTMP8, %%YTMP9, %%YTMP10 + ;; XOR Input buffer with keystream in rounds of 32B + + mov r12, [pIn] + mov r13, [pIn + 8] + mov r14, [pIn + 16] + mov r15, [pIn + 24] +%if (%%LAST_CALL == 1) + ;; Save GP registers + mov [rsp + 32*8 + 16 + 8], %%TMP1 + mov [rsp + 32*8 + 16 + 16], %%TMP2 + + ;; Read in r10 the word containing the number of final bytes to read for each lane + movzx r10d, word [rsp + 8*32] + simd_load_avx2 %%YTMP9, r12 + %%OFFSET, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 2] + simd_load_avx2 %%YTMP10, r13 + %%OFFSET, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 4] + simd_load_avx2 %%YTMP11, r14 + %%OFFSET, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 6] + simd_load_avx2 %%YTMP12, r15 + %%OFFSET, r10, %%TMP1, %%TMP2 +%else + vmovdqu %%YTMP9, [r12 + %%OFFSET] + vmovdqu %%YTMP10, [r13 + %%OFFSET] + vmovdqu %%YTMP11, [r14 + %%OFFSET] + vmovdqu %%YTMP12, [r15 + %%OFFSET] +%endif + + mov r12, [pIn + 32] + mov r13, [pIn + 40] + mov r14, [pIn + 48] + mov r15, [pIn + 56] +%if (%%LAST_CALL == 1) + movzx r10d, word [rsp + 8*32 + 8] + simd_load_avx2 %%YTMP13, r12 + %%OFFSET, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 10] + simd_load_avx2 %%YTMP14, r13 + %%OFFSET, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 12] + simd_load_avx2 %%YTMP15, r14 + %%OFFSET, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 14] + simd_load_avx2 %%YTMP16, r15 + %%OFFSET, r10, %%TMP1, %%TMP2 +%else + vmovdqu %%YTMP13, [r12 + %%OFFSET] + vmovdqu %%YTMP14, [r13 + %%OFFSET] + vmovdqu %%YTMP15, [r14 + %%OFFSET] + vmovdqu %%YTMP16, [r15 + %%OFFSET] +%endif + ; Shuffle all keystreams and XOR with plaintext +%assign %%I 1 +%assign %%J 9 +%rep 8 + vpshufb APPEND(%%YTMP, %%I), [rel swap_mask] + vpxor APPEND(%%YTMP, %%I), APPEND(%%YTMP, %%J) +%assign %%I (%%I + 1) +%assign %%J (%%J + 1) +%endrep + + ;; Write output + mov r12, [pOut] + mov r13, [pOut + 8] + mov r14, [pOut + 16] + mov r15, [pOut + 24] + +%if (%%LAST_CALL == 1) + add r12, %%OFFSET + add r13, %%OFFSET + add r14, %%OFFSET + add r15, %%OFFSET + ;; Read in r10 the word containing the number of final bytes to write for each lane + movzx r10d, word [rsp + 8*32] + simd_store_avx2 r12, %%YTMP1, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 2] + simd_store_avx2 r13, %%YTMP2, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 4] + simd_store_avx2 r14, %%YTMP3, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 6] + simd_store_avx2 r15, %%YTMP4, r10, %%TMP1, %%TMP2 +%else + vmovdqu [r12 + %%OFFSET], %%YTMP1 + vmovdqu [r13 + %%OFFSET], %%YTMP2 + vmovdqu [r14 + %%OFFSET], %%YTMP3 + vmovdqu [r15 + %%OFFSET], %%YTMP4 +%endif + + mov r12, [pOut + 32] + mov r13, [pOut + 40] + mov r14, [pOut + 48] + mov r15, [pOut + 56] + +%if (%%LAST_CALL == 1) + add r12, %%OFFSET + add r13, %%OFFSET + add r14, %%OFFSET + add r15, %%OFFSET + movzx r10d, word [rsp + 8*32 + 8] + simd_store_avx2 r12, %%YTMP5, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 10] + simd_store_avx2 r13, %%YTMP6, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 12] + simd_store_avx2 r14, %%YTMP7, r10, %%TMP1, %%TMP2 + movzx r10d, word [rsp + 8*32 + 14] + simd_store_avx2 r15, %%YTMP8, r10, %%TMP1, %%TMP2 + + ; Restore registers + mov %%TMP1, [rsp + 32*8 + 16 + 8] + mov %%TMP2, [rsp + 32*8 + 16 + 16] +%else + vmovdqu [r12 + %%OFFSET], %%YTMP5 + vmovdqu [r13 + %%OFFSET], %%YTMP6 + vmovdqu [r14 + %%OFFSET], %%YTMP7 + vmovdqu [r15 + %%OFFSET], %%YTMP8 +%endif + +%endmacro + +;; +;; void asm_ZucCipher_8_avx2(state16_t *pSta, u64 *pIn[8], +;; u64 *pOut[8], u16 lengths, u64 min_length); +;; +;; WIN64 +;; RCX - pSta +;; RDX - pIn +;; R8 - pOut +;; R9 - lengths +;; rsp + 40 - min_length +;; +;; LIN64 +;; RDI - pSta +;; RSI - pIn +;; RDX - pOut +;; RCX - lengths +;; R8 - min_length +;; +MKGLOBAL(asm_ZucCipher_8_avx2,function,internal) +asm_ZucCipher_8_avx2: +%define pState arg1 +%define pIn arg2 +%define pOut arg3 +%define lengths arg4 + +%define min_length r10 +%define buf_idx r11 + + endbranch64 + mov min_length, arg5 + + or min_length, min_length + jz exit_cipher32 + + FUNC_SAVE + + ;; Convert all lengths from UINT16_MAX (indicating that lane is not valid) to min length + vmovd xmm0, DWORD(min_length) + vpshufb xmm0, xmm0, [rel broadcast_word] + vmovdqa xmm1, [lengths] + vpcmpeqw xmm2, xmm2 ;; Get all ff's in XMM register + vpcmpeqw xmm3, xmm1, xmm2 ;; Mask with FFFF in NULL jobs + + vpand xmm4, xmm3, xmm0 ;; Length of valid job in all NULL jobs + vpxor xmm2, xmm3 ;; Mask with 0000 in NULL jobs + vpand xmm1, xmm2 ;; Zero out lengths of NULL jobs + vpor xmm1, xmm4 ;; XMM1 contain updated lengths + + ; Round up to nearest multiple of 4 bytes + vpaddw xmm0, [rel all_threes] + vpand xmm0, [rel all_fffcs] + + ; Calculate remaining bytes to encrypt after function call + vpsubw xmm2, xmm1, xmm0 + vpxor xmm3, xmm3 + vpcmpgtw xmm4, xmm2, xmm3 ;; Mask with FFFF in lengths > 0 + ; Set to zero the lengths of the lanes which are going to be completed (lengths < 0) + vpand xmm2, xmm4 + vmovdqa [lengths], xmm2 ; Update in memory the final updated lengths + + ; Calculate number of bytes to encrypt after round of 32 bytes (up to 31 bytes), + ; for each lane, and store it in stack to be used in the last round + vpsubw xmm1, xmm2 ; Bytes to encrypt in all lanes + vpand xmm1, [rel all_1fs] ; Number of final bytes (up to 31 bytes) for each lane + vpcmpeqw xmm2, xmm1, xmm3 ;; Mask with FFFF in lengths == 0 + vpand xmm2, [rel all_20s] ;; 32 in positions where lengths was 0 + vpor xmm1, xmm2 ;; Number of final bytes (up to 32 bytes) for each lane + + ; Allocate stack frame to store keystreams (32*8 bytes), number of final bytes (16 bytes), + ; space for rsp (8 bytes) and 2 GP registers (16 bytes) that will be clobbered later + mov rax, rsp + sub rsp, (32*8 + 16 + 16 + 8) + and rsp, -32 + xor buf_idx, buf_idx + vmovdqu [rsp + 32*8], xmm1 + mov [rsp + 32*8 + 16], rax + + ; Load state pointer in RAX + mov rax, pState + +loop_cipher64: + cmp min_length, 64 + jl exit_loop_cipher64 + + CIPHERNx4B_8 8, 0, buf_idx, 0 + + add buf_idx, 32 + sub min_length, 32 + + CIPHERNx4B_8 8, 8, buf_idx, 0 + + add buf_idx, 32 + sub min_length, 32 + + jmp loop_cipher64 +exit_loop_cipher64: + + ; Check if at least 32 bytes are left to encrypt + cmp min_length, 32 + jl less_than_32 + + CIPHERNx4B_8 8, 0, buf_idx, 0 + REORDER_LFSR rax, 8 + + add buf_idx, 32 + sub min_length, 32 + + ; Check if there are more bytes left to encrypt +less_than_32: + + mov r15, min_length + add r15, 3 + shr r15, 2 ;; number of rounds left (round up length to nearest multiple of 4B) + jz exit_final_rounds + +_final_rounds_is_1_8: + cmp r15, 4 + je _num_final_rounds_is_4 + jl _final_rounds_is_1_3 + + ; Final rounds 5-8 + cmp r15, 8 + je _num_final_rounds_is_8 + cmp r15, 7 + je _num_final_rounds_is_7 + cmp r15, 6 + je _num_final_rounds_is_6 + cmp r15, 5 + je _num_final_rounds_is_5 + +_final_rounds_is_1_3: + cmp r15, 3 + je _num_final_rounds_is_3 + cmp r15, 2 + je _num_final_rounds_is_2 + + jmp _num_final_rounds_is_1 + + ; Perform encryption of last bytes (<= 31 bytes) and reorder LFSR registers +%assign I 1 +%rep 8 +APPEND(_num_final_rounds_is_,I): + CIPHERNx4B_8 I, 0, buf_idx, 1 + REORDER_LFSR rax, I + add buf_idx, (I*4) + jmp exit_final_rounds +%assign I (I + 1) +%endrep + +exit_final_rounds: + ;; update in/out pointers + + ; Broadcast buf_idx in all qwords of ymm0 + vmovq xmm0, buf_idx + vpshufd xmm0, xmm0, 0x44 + vperm2f128 ymm0, ymm0, 0x0 + vpaddq ymm1, ymm0, [pIn] + vpaddq ymm2, ymm0, [pIn + 32] + vmovdqa [pIn], ymm1 + vmovdqa [pIn + 32], ymm2 + vpaddq ymm1, ymm0, [pOut] + vpaddq ymm2, ymm0, [pOut + 32] + vmovdqa [pOut], ymm1 + vmovdqa [pOut + 32], ymm2 + + ;; Clear stack frame containing keystream information +%ifdef SAFE_DATA + vpxor ymm0, ymm0 +%assign i 0 +%rep 8 + vmovdqa [rsp + i*32], ymm0 +%assign i (i+1) +%endrep +%endif + ; Restore rsp + mov rsp, [rsp + 32*8 + 16] + + FUNC_RESTORE + +exit_cipher32: + vzeroupper + ret + +;---------------------------------------------------------------------------------------- +;---------------------------------------------------------------------------------------- + +mksection stack-noexec diff --git a/lib/avx2_t2/README b/lib/avx2_t2/README new file mode 100644 index 0000000000000000000000000000000000000000..e3fa7d2bb3c499ea9ef2954bf9767c6fce8d9070 --- /dev/null +++ b/lib/avx2_t2/README @@ -0,0 +1,3 @@ +AVX2 TYPE2: +- AVX2 TYPE1: AVX2, BMI2, AESNI, PCLMULQDQ, CMOV +- VAES, VPCLMULQDQ, SHANI, GFNI diff --git a/lib/avx2_t2/aes128_ecb_vaes_avx2.asm b/lib/avx2_t2/aes128_ecb_vaes_avx2.asm new file mode 100644 index 0000000000000000000000000000000000000000..e0c8a9deb35f2a85990047535bed546c755305b6 --- /dev/null +++ b/lib/avx2_t2/aes128_ecb_vaes_avx2.asm @@ -0,0 +1,218 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB encrypt/decrypt on 16n bytes doing AES by 16 + +; YMM registers are clobbered. Saving/restoring must be done at a higher level + +; void aes_ecb_x_y_vaes_avx2(void *in, +; UINT128 keys[], +; void *out, +; UINT64 len_bytes); +; +; x = direction (enc/dec) +; y = key size (128/192/256) +; arg 1: IN: pointer to input (cipher text) +; arg 2: KEYS: pointer to keys +; arg 3: OUT: pointer to output (plain text) +; arg 4: LEN: length in bytes (multiple of 16) +; + +%include "include/os.asm" +%include "include/clear_regs.asm" +%include "include/aes_common.asm" + +%ifdef LINUX +%define IN rdi +%define KEYS rsi +%define OUT rdx +%define LEN rcx +%else +%define IN rcx +%define KEYS rdx +%define OUT r8 +%define LEN r9 +%endif +%define IDX rax +%define TMP r11 + +%define YKEY1 ymm1 +%define YDATA0 ymm2 +%define YDATA1 ymm3 +%define YDATA2 ymm4 +%define YDATA3 ymm5 +%define YDATA4 ymm6 +%define YDATA5 ymm7 +%define YDATA6 ymm8 +%define YDATA7 ymm9 + +%ifndef AES_ECB_NROUNDS +%define AES_ECB_NROUNDS 10 +%endif + +%if AES_ECB_NROUNDS == 10 +%define KEYSIZE 128 +%elif AES_ECB_NROUNDS == 12 +%define KEYSIZE 192 +%else +%define KEYSIZE 256 +%endif + +%define AES_ECB_ENC aes_ecb_enc_ %+ KEYSIZE %+ _vaes_avx2 +%define AES_ECB_DEC aes_ecb_dec_ %+ KEYSIZE %+ _vaes_avx2 + +%macro AES_ECB 1 +%define %%DIR %1 ; [in] Direction (ENC/DIR) +%ifidn %%DIR, ENC +%define AES YMM_AESENC_ROUND_BLOCKS_AVX2_0_16 +%else ; DIR = DEC +%define AES YMM_AESDEC_ROUND_BLOCKS_AVX2_0_16 +%endif + + or LEN, LEN + jz %%done + + xor IDX, IDX + mov TMP, LEN + and TMP, 255 ; number of initial bytes (0 to 15 AES blocks) + jz %%main_loop + + ; branch to different code block based on remainder + cmp TMP, 8*16 + je %%initial_num_blocks_is_8 + jb %%initial_num_blocks_is_7_1 + cmp TMP, 12*16 + je %%initial_num_blocks_is_12 + jb %%initial_num_blocks_is_11_9 + ;; 15, 14 or 13 + cmp TMP, 14*16 + ja %%initial_num_blocks_is_15 + je %%initial_num_blocks_is_14 + jmp %%initial_num_blocks_is_13 +%%initial_num_blocks_is_11_9: + ;; 11, 10 or 9 + cmp TMP, 10*16 + ja %%initial_num_blocks_is_11 + je %%initial_num_blocks_is_10 + jmp %%initial_num_blocks_is_9 +%%initial_num_blocks_is_7_1: + cmp TMP, 4*16 + je %%initial_num_blocks_is_4 + jb %%initial_num_blocks_is_3_1 + ;; 7, 6 or 5 + cmp TMP, 6*16 + ja %%initial_num_blocks_is_7 + je %%initial_num_blocks_is_6 + jmp %%initial_num_blocks_is_5 +%%initial_num_blocks_is_3_1: + ;; 3, 2 or 1 + cmp TMP, 2*16 + ja %%initial_num_blocks_is_3 + je %%initial_num_blocks_is_2 + ;; fall through for `jmp %%initial_num_blocks_is_1` + +%assign num_blocks 1 +%rep 15 + + %%initial_num_blocks_is_ %+ num_blocks : +%assign %%I 0 + ; load initial blocks + YMM_LOAD_BLOCKS_AVX2_0_16 num_blocks, IN, 0, YDATA0,\ + YDATA1, YDATA2, YDATA3, YDATA4, YDATA5,\ + YDATA6, YDATA7 + +; Perform AES encryption/decryption on initial blocks +%rep (AES_ECB_NROUNDS + 1) ; 10/12/14 + vbroadcasti128 YKEY1, [KEYS + %%I*16] + AES YDATA0, YDATA1, YDATA2, YDATA3, YDATA4,\ + YDATA5, YDATA6, YDATA7, YKEY1, %%I, no_data,\ + no_data, no_data, no_data, no_data, no_data,\ + no_data, no_data, num_blocks, (AES_ECB_NROUNDS - 1) +%assign %%I (%%I + 1) +%endrep + + ; store initial blocks + YMM_STORE_BLOCKS_AVX2_0_16 num_blocks, OUT, 0, YDATA0, YDATA1,\ + YDATA2, YDATA3, YDATA4, YDATA5, YDATA6, YDATA7 + + add IDX, num_blocks*16 + cmp IDX, LEN + je %%done + +%assign num_blocks (num_blocks + 1) + jmp %%main_loop +%endrep + +align 16 +%%main_loop: + ; load the next 16 blocks into ymm registers + YMM_LOAD_BLOCKS_AVX2_0_16 16, {IN + IDX}, 0, YDATA0, YDATA1,\ + YDATA2, YDATA3, YDATA4, YDATA5, YDATA6, YDATA7 + + ; Perform AES encryption/decryption on 16 blocks +%assign %%ROUNDNO 0 ; current key number +%rep (AES_ECB_NROUNDS + 1) ; 10/12/14 + vbroadcasti128 YKEY1, [KEYS + %%ROUNDNO*16] + + AES YDATA0, YDATA1, YDATA2, YDATA3, YDATA4, YDATA5,\ + YDATA6, YDATA7, YKEY1, %%ROUNDNO, no_data, no_data,\ + no_data, no_data, no_data, no_data, no_data, no_data,\ + 16, (AES_ECB_NROUNDS - 1) + +%assign %%ROUNDNO (%%ROUNDNO + 1) +%endrep + + ; store 16 blocks + YMM_STORE_BLOCKS_AVX2_0_16 16, {OUT + IDX}, 0, YDATA0, YDATA1,\ + YDATA2, YDATA3, YDATA4, YDATA5, YDATA6, YDATA7 + + add IDX, 16*16 + cmp IDX, LEN + jne %%main_loop + +%%done: + +%ifdef SAFE_DATA + clear_all_ymms_asm +%else + vzeroupper +%endif +%endmacro + +mksection .text +align 16 +MKGLOBAL(AES_ECB_ENC,function,internal) +AES_ECB_ENC: + AES_ECB ENC + ret +align 16 +MKGLOBAL(AES_ECB_DEC,function,internal) +AES_ECB_DEC: + AES_ECB DEC + ret + +mksection stack-noexec diff --git a/lib/avx/aes192_ecb_by4_avx.asm b/lib/avx2_t2/aes192_ecb_vaes_avx2.asm similarity index 89% rename from lib/avx/aes192_ecb_by4_avx.asm rename to lib/avx2_t2/aes192_ecb_vaes_avx2.asm index d8272830bc20dc9949dd2ca27e49b53ba68edb7f..d400c3b3af619595a6567dba328bdd65b9979153 100644 --- a/lib/avx/aes192_ecb_by4_avx.asm +++ b/lib/avx2_t2/aes192_ecb_vaes_avx2.asm @@ -1,5 +1,5 @@ ;; -;; Copyright (c) 2021-2022, Intel Corporation +;; Copyright (c) 2022, Intel Corporation ;; ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: @@ -25,9 +25,9 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; -; routine to do AES ECB 192 encrypt/decrypt on 16n bytes doing AES by 4 +; routine to do AES ECB 192 encrypt/decrypt on 16n bytes doing AES by 8 -%define AES_ECB_ENC_192 aes_ecb_enc_192_avx -%define AES_ECB_DEC_192 aes_ecb_dec_192_avx +%define AES_ECB_NROUNDS 12 -%include "avx/aes128_ecb_by4_avx.asm" +%include "include/os.asm" +%include "avx2_t2/aes128_ecb_vaes_avx2.asm" diff --git a/lib/avx/aes256_ecb_by4_avx.asm b/lib/avx2_t2/aes256_ecb_vaes_avx2.asm similarity index 89% rename from lib/avx/aes256_ecb_by4_avx.asm rename to lib/avx2_t2/aes256_ecb_vaes_avx2.asm index 9bc759022fd199c8c077f16a4d47b47e1e6485ec..8c8a730802b0aa79aa56c96c832ae11eba72cf01 100644 --- a/lib/avx/aes256_ecb_by4_avx.asm +++ b/lib/avx2_t2/aes256_ecb_vaes_avx2.asm @@ -1,5 +1,5 @@ ;; -;; Copyright (c) 2021-2022, Intel Corporation +;; Copyright (c) 2022, Intel Corporation ;; ;; Redistribution and use in source and binary forms, with or without ;; modification, are permitted provided that the following conditions are met: @@ -25,9 +25,9 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; -; routine to do AES ECB 256 encrypt/decrypt on 16n bytes doing AES by 4 +; routine to do AES ECB 256 encrypt/decrypt on 16n bytes doing AES by 8 -%define AES_ECB_ENC_256 aes_ecb_enc_256_avx -%define AES_ECB_DEC_256 aes_ecb_dec_256_avx +%define AES_ECB_NROUNDS 14 -%include "avx/aes128_ecb_by4_avx.asm" +%include "include/os.asm" +%include "avx2_t2/aes128_ecb_vaes_avx2.asm" diff --git a/lib/avx2_t2/mb_mgr_avx2_t2.c b/lib/avx2_t2/mb_mgr_avx2_t2.c new file mode 100644 index 0000000000000000000000000000000000000000..55f33beb8b4e3877f6cb078a3465151c3a572a10 --- /dev/null +++ b/lib/avx2_t2/mb_mgr_avx2_t2.c @@ -0,0 +1,485 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define AVX2 + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/snow3g_submit.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_sse_type1.h" /* poly1305, snow3g */ +#include "include/arch_sse_type2.h" /* shani */ +#include "include/arch_avx_type1.h" +#include "include/arch_avx2_type1.h" +#include "include/arch_avx2_type2.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms_avx +#define RESTORE_XMMS restore_xmms_avx + +/* JOB API */ +#define SUBMIT_JOB submit_job_avx2_t2 +#define FLUSH_JOB flush_job_avx2_t2 +#define QUEUE_SIZE queue_size_avx2_t2 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_avx2_t2 +#define GET_NEXT_JOB get_next_job_avx2_t2 +#define GET_COMPLETED_JOB get_completed_job_avx2_t2 +#define GET_NEXT_BURST get_next_burst_avx2_t2 +#define SUBMIT_BURST submit_burst_avx2_t2 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx2_t2 +#define FLUSH_BURST flush_burst_avx2_t2 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx2_t2 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx2_t2 +#define SUBMIT_HASH_BURST submit_hash_burst_avx2_t2 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx2_t2 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_AVX2 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_AVX2 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_AVX2 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_AVX2 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_AVX2 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_avx_gen4 +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_avx_gen4 +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_avx_gen4 +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_avx_gen4 +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_avx_gen4 +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_avx_gen4 + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_gcm_dec_avx2 +#define SUBMIT_JOB_AES_GCM_ENC submit_job_gcm_enc_avx2 + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_avx +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_avx +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_avx + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_avx +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_avx +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_avx + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_avx +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_avx +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_avx + +#define AES_CBC_DEC_128 aes_cbc_dec_128_avx +#define AES_CBC_DEC_192 aes_cbc_dec_192_avx +#define AES_CBC_DEC_256 aes_cbc_dec_256_avx + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_avx +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_avx +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_avx +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_avx + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_vaes_avx2 +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_vaes_avx2 +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_vaes_avx2 +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_vaes_avx2 +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_vaes_avx2 +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_vaes_avx2 + +#define AES_ECB_ENC_128 aes_ecb_enc_128_vaes_avx2 +#define AES_ECB_ENC_192 aes_ecb_enc_192_vaes_avx2 +#define AES_ECB_ENC_256 aes_ecb_enc_256_vaes_avx2 +#define AES_ECB_DEC_128 aes_ecb_dec_128_vaes_avx2 +#define AES_ECB_DEC_192 aes_ecb_dec_192_vaes_avx2 +#define AES_ECB_DEC_256 aes_ecb_dec_256_vaes_avx2 + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_avx +#define AES_CTR_192 aes_cntr_192_avx +#define AES_CTR_256 aes_cntr_256_avx +#define AES_CTR_128_BIT aes_cntr_bit_128_avx +#define AES_CTR_192_BIT aes_cntr_bit_192_avx +#define AES_CTR_256_BIT aes_cntr_bit_256_avx + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_avx +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_avx + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_avx +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_avx + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_avx +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_avx + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_avx +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_avx + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_avx +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_avx + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_avx2 +#define AES_CFB_256_ONE aes_cfb_256_one_avx2 + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_avx +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_avx + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_avx +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_avx +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_avx +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_avx + +/* SHA1/224/256/384/512 */ +/* note: SHA1 MB is better than SHANI on Xeon processors */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_avx2 +#define FLUSH_JOB_SHA1 flush_job_sha1_avx2 +#define SUBMIT_JOB_SHA224 submit_job_sha224_ni_sse +#define FLUSH_JOB_SHA224 flush_job_sha224_ni_sse +#define SUBMIT_JOB_SHA256 submit_job_sha256_ni_sse +#define FLUSH_JOB_SHA256 flush_job_sha256_ni_sse +#define SUBMIT_JOB_SHA384 submit_job_sha384_avx2 +#define FLUSH_JOB_SHA384 flush_job_sha384_avx2 +#define SUBMIT_JOB_SHA512 submit_job_sha512_avx2 +#define FLUSH_JOB_SHA512 flush_job_sha512_avx2 + +/* HMAC-SHA1/224/256/384/512 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_avx2 +#define FLUSH_JOB_HMAC flush_job_hmac_avx2 +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_ni_sse +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_ni_sse +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_ni_sse +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_ni_sse +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_avx2 +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_avx2 +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_avx2 +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_avx2 +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_avx2 +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_avx2 + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_avx2 +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_avx2 +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_avx2 +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_avx2 +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_avx2 +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_avx2 +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_avx2 +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_avx2 +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_avx2 +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_avx2 +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_avx2 + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_avx +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_avx + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_avx2_t2(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_avx2_t2(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_avx2_t2 +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_avx2_t2 + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_avx_local + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 8); + ooo_mgr_aes_reset(state->aes192_ooo, 8); + ooo_mgr_aes_reset(state->aes256_ooo, 8); + + /* DOCSIS SEC BPI (AES CBC + AES CFB for partial block) + * uses same settings as AES CBC. + */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 8); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 8); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 8); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 8); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 8); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, AVX2_NUM_SHA1_LANES); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, 2); + + /* Init HMAC/SHA_256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, 2); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + AVX2_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + AVX2_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, AVX2_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 8); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 8); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 8); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 8); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 8); + + /* Init AES CBC-S out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 8); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, AVX2_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, 2); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, 2); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, AVX2_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, AVX2_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_avx2_t2_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for AVX2 interface are present */ + if ((state->features & IMB_CPUFLAGS_AVX2) != IMB_CPUFLAGS_AVX2) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_AVX2; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_avx2; + state->keyexp_192 = aes_keyexp_192_avx2; + state->keyexp_256 = aes_keyexp_256_avx2; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_avx2; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_avx2; + + state->xcbc_keyexp = aes_xcbc_expand_key_avx2; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_avx2; + state->sha1 = sha1_avx2; + state->sha224_one_block = sha224_one_block_avx2; + state->sha224 = sha224_avx2; + state->sha256_one_block = sha256_one_block_avx2; + state->sha256 = sha256_avx2; + state->sha384_one_block = sha384_one_block_avx2; + state->sha384 = sha384_avx2; + state->sha512_one_block = sha512_one_block_avx2; + state->sha512 = sha512_avx2; + state->md5_one_block = md5_one_block_avx2; + + state->aes128_cfb_one = aes_cfb_128_one_avx2; + + state->eea3_1_buffer = zuc_eea3_1_buffer_avx2; + state->eea3_4_buffer = zuc_eea3_4_buffer_avx; + state->eea3_n_buffer = zuc_eea3_n_buffer_avx2; + state->eia3_1_buffer = zuc_eia3_1_buffer_avx2; + state->eia3_n_buffer = zuc_eia3_n_buffer_avx2; + + state->f8_1_buffer = kasumi_f8_1_buffer_avx; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_avx; + state->f8_2_buffer = kasumi_f8_2_buffer_avx; + state->f8_3_buffer = kasumi_f8_3_buffer_avx; + state->f8_4_buffer = kasumi_f8_4_buffer_avx; + state->f8_n_buffer = kasumi_f8_n_buffer_avx; + state->f9_1_buffer = kasumi_f9_1_buffer_avx; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_avx; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_avx; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_avx; + state->kasumi_key_sched_size = kasumi_key_sched_size_avx; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_avx2; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_avx2; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_avx2; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_avx2; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_avx2; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_avx2; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_avx2; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_avx2; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_avx2; + state->snow3g_init_key_sched = snow3g_init_key_sched_avx2; + state->snow3g_key_sched_size = snow3g_key_sched_size_avx2; + + state->hec_32 = hec_32_avx; + state->hec_64 = hec_64_avx; + + state->crc32_ethernet_fcs = ethernet_fcs_avx; + state->crc16_x25 = crc16_x25_avx; + state->crc32_sctp = crc32_sctp_avx; + state->crc24_lte_a = crc24_lte_a_avx; + state->crc24_lte_b = crc24_lte_b_avx; + state->crc16_fp_data = crc16_fp_data_avx; + state->crc11_fp_header = crc11_fp_header_avx; + state->crc7_fp_header = crc7_fp_header_avx; + state->crc10_iuup_data = crc10_iuup_data_avx; + state->crc6_iuup_header = crc6_iuup_header_avx; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_avx; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_avx; + + state->chacha20_poly1305_init = init_chacha20_poly1305_avx; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_avx2; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_avx2; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_avx; + + state->gcm128_enc = aes_gcm_enc_128_avx_gen4; + state->gcm192_enc = aes_gcm_enc_192_avx_gen4; + state->gcm256_enc = aes_gcm_enc_256_avx_gen4; + state->gcm128_dec = aes_gcm_dec_128_avx_gen4; + state->gcm192_dec = aes_gcm_dec_192_avx_gen4; + state->gcm256_dec = aes_gcm_dec_256_avx_gen4; + state->gcm128_init = aes_gcm_init_128_avx_gen4; + state->gcm192_init = aes_gcm_init_192_avx_gen4; + state->gcm256_init = aes_gcm_init_256_avx_gen4; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_avx_gen4; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_avx_gen4; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_avx_gen4; + state->gcm128_enc_update = aes_gcm_enc_128_update_avx_gen4; + state->gcm192_enc_update = aes_gcm_enc_192_update_avx_gen4; + state->gcm256_enc_update = aes_gcm_enc_256_update_avx_gen4; + state->gcm128_dec_update = aes_gcm_dec_128_update_avx_gen4; + state->gcm192_dec_update = aes_gcm_dec_192_update_avx_gen4; + state->gcm256_dec_update = aes_gcm_dec_256_update_avx_gen4; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_avx_gen4; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_avx_gen4; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_avx_gen4; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_avx_gen4; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_avx_gen4; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_avx_gen4; + state->gcm128_precomp = aes_gcm_precomp_128_avx_gen4; + state->gcm192_precomp = aes_gcm_precomp_192_avx_gen4; + state->gcm256_precomp = aes_gcm_precomp_256_avx_gen4; + state->gcm128_pre = aes_gcm_pre_128_avx_gen4; + state->gcm192_pre = aes_gcm_pre_192_avx_gen4; + state->gcm256_pre = aes_gcm_pre_256_avx_gen4; + + state->ghash = ghash_avx_gen4; + state->ghash_pre = ghash_pre_avx_gen2; + + state->gmac128_init = imb_aes_gmac_init_128_avx_gen4; + state->gmac192_init = imb_aes_gmac_init_192_avx_gen4; + state->gmac256_init = imb_aes_gmac_init_256_avx_gen4; + state->gmac128_update = imb_aes_gmac_update_128_avx_gen4; + state->gmac192_update = imb_aes_gmac_update_192_avx_gen4; + state->gmac256_update = imb_aes_gmac_update_256_avx_gen4; + state->gmac128_finalize = imb_aes_gmac_finalize_128_avx_gen4; + state->gmac192_finalize = imb_aes_gmac_finalize_192_avx_gen4; + state->gmac256_finalize = imb_aes_gmac_finalize_256_avx_gen4; +} + +#include "mb_mgr_code.h" diff --git a/lib/avx512/zuc_x16_avx512.asm b/lib/avx512/zuc_x16_avx512.asm deleted file mode 100755 index 36dfb0e5ffe2d015896b160f973a783e53e530fb..0000000000000000000000000000000000000000 --- a/lib/avx512/zuc_x16_avx512.asm +++ /dev/null @@ -1,3087 +0,0 @@ -;; -;; Copyright (c) 2020-2022, Intel Corporation -;; -;; Redistribution and use in source and binary forms, with or without -;; modification, are permitted provided that the following conditions are met: -;; -;; * Redistributions of source code must retain the above copyright notice, -;; this list of conditions and the following disclaimer. -;; * Redistributions in binary form must reproduce the above copyright -;; notice, this list of conditions and the following disclaimer in the -;; documentation and/or other materials provided with the distribution. -;; * Neither the name of Intel Corporation nor the names of its contributors -;; may be used to endorse or promote products derived from this software -;; without specific prior written permission. -;; -;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -;; - -%include "include/os.asm" -%include "include/reg_sizes.asm" -%include "include/zuc_sbox.inc" -%include "include/transpose_avx512.asm" -%include "include/const.inc" -%include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" -%define APPEND(a,b) a %+ b -%define APPEND3(a,b,c) a %+ b %+ c - -%ifndef CIPHER_16 -%define USE_GFNI_VAES_VPCLMUL 0 -%define CIPHER_16 asm_ZucCipher_16_avx512 -%define ZUC128_INIT asm_ZucInitialization_16_avx512 -%define ZUC256_INIT asm_Zuc256Initialization_16_avx512 -%define ZUC128_REMAINDER_16 asm_Eia3RemainderAVX512_16 -%define ZUC256_REMAINDER_16 asm_Eia3_256_RemainderAVX512_16 -%define ZUC_KEYGEN64B_16 asm_ZucGenKeystream64B_16_avx512 -%define ZUC_KEYGEN8B_16 asm_ZucGenKeystream8B_16_avx512 -%define ZUC_KEYGEN4B_16 asm_ZucGenKeystream4B_16_avx512 -%define ZUC_KEYGEN_16 asm_ZucGenKeystream_16_avx512 -%define ZUC_KEYGEN64B_SKIP8_16 asm_ZucGenKeystream64B_16_skip8_avx512 -%define ZUC_KEYGEN8B_SKIP8_16 asm_ZucGenKeystream8B_16_skip8_avx512 -%define ZUC_KEYGEN_SKIP8_16 asm_ZucGenKeystream_16_skip8_avx512 -%define ZUC_ROUND64B_16 asm_Eia3Round64BAVX512_16 -%define ZUC_EIA3_N64B asm_Eia3_Nx64B_AVX512_16 -%endif - -mksection .rodata -default rel - -align 64 -EK_d64: -dd 0x0044D700, 0x0026BC00, 0x00626B00, 0x00135E00, 0x00578900, 0x0035E200, 0x00713500, 0x0009AF00 -dd 0x004D7800, 0x002F1300, 0x006BC400, 0x001AF100, 0x005E2600, 0x003C4D00, 0x00789A00, 0x0047AC00 - -align 64 -EK256_d64: -dd 0x00220000, 0x002F0000, 0x00240000, 0x002A0000, 0x006D0000, 0x00400000, 0x00400000, 0x00400000 -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 64 -EK256_EIA3_4: -dd 0x00220000, 0x002F0000, 0x00250000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 64 -EK256_EIA3_8: -dd 0x00230000, 0x002F0000, 0x00240000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 64 -EK256_EIA3_16: -dd 0x00230000, 0x002F0000, 0x00250000, 0x002A0000, -dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, -dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 - -align 64 -shuf_mask_key: -dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, 0x04FFFFFF, 0x05FFFFFF, 0x06FFFFFF, 0x07FFFFFF, -dd 0x08FFFFFF, 0x09FFFFFF, 0x0AFFFFFF, 0x0BFFFFFF, 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, - -align 64 -shuf_mask_iv: -dd 0xFFFFFF00, 0xFFFFFF01, 0xFFFFFF02, 0xFFFFFF03, 0xFFFFFF04, 0xFFFFFF05, 0xFFFFFF06, 0xFFFFFF07, -dd 0xFFFFFF08, 0xFFFFFF09, 0xFFFFFF0A, 0xFFFFFF0B, 0xFFFFFF0C, 0xFFFFFF0D, 0xFFFFFF0E, 0xFFFFFF0F, - -align 64 -shuf_mask_key256_first_high: -dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, 0x04FFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, -dd 0x08FFFFFF, 0x09FFFFFF, 0xFFFFFFFF, 0x0BFFFFFF, 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, - -align 64 -shuf_mask_key256_first_low: -dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFF05FF, 0xFFFF06FF, 0xFFFF07FF, -dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFF0AFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, - -align 64 -shuf_mask_key256_second: -dd 0xFFFF0500, 0xFFFF0601, 0xFFFF0702, 0xFFFF0803, 0xFFFF0904, 0xFFFFFF0A, 0xFFFFFF0B, 0xFFFFFFFF, -dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFF0C, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFF0FFFFF, 0xFF0F0E0D, - -align 64 -shuf_mask_iv256_first_high: -dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00FFFFFF, 0x01FFFFFF, 0x0AFFFFFF, -dd 0xFFFFFFFF, 0xFFFFFFFF, 0x05FFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, - -align 64 -shuf_mask_iv256_first_low: -dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFF02, -dd 0xFFFF030B, 0xFFFF0C04, 0xFFFFFFFF, 0xFFFF060D, 0xFFFF070E, 0xFFFF0F08, 0xFFFFFF09, 0xFFFFFFFF, - -align 64 -shuf_mask_iv256_second: -dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFF01FFFF, 0xFF02FFFF, 0xFF03FFFF, -dd 0xFF04FFFF, 0xFF05FFFF, 0xFF06FFFF, 0xFF07FFFF, 0xFF08FFFF, 0xFFFFFFFF, 0xFFFF00FF, 0xFFFFFFFF, - -align 64 -key_mask_low_4: -dq 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff -dq 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xff0fffffffff0fff - -align 64 -iv_mask_low_6: -dq 0x3f3f3f3f3f3f3fff, 0x000000000000003f - -align 64 -mask31: -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, -dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, - -align 64 -swap_mask: -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c - -align 64 -S1_S0_shuf: -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F - -align 64 -S0_S1_shuf: -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, -db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, - -align 64 -rev_S1_S0_shuf: -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F - -align 64 -rev_S0_S1_shuf: -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 -db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 - -align 64 -bit_reverse_table_l: -db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f -db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f -db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f -db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f - -align 64 -bit_reverse_table_h: -db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 -db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 -db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 -db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 - -align 64 -bit_reverse_and_table: -db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f -db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f -db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f -db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f - -align 64 -bit_reverse_table: -times 8 db 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80 - -align 64 -shuf_mask_tags_0_1_2_3: -dd 0x01, 0x05, 0x09, 0x0D, 0x11, 0x15, 0x19, 0x1D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF -dd 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x05, 0x09, 0x0D, 0x11, 0x15, 0x19, 0x1D - -align 64 -shuf_mask_tags_0_4_8_12: -dd 0x01, 0x11, 0xFF, 0xFF, 0x05, 0x15, 0xFF, 0xFF, 0x09, 0x19, 0xFF, 0xFF, 0x0D, 0x1D, 0xFF, 0xFF -dd 0xFF, 0xFF, 0x01, 0x11, 0xFF, 0xFF, 0x05, 0x15, 0xFF, 0xFF, 0x09, 0x19, 0xFF, 0xFF, 0x0D, 0x1D - -align 64 -all_ffs: -dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff -dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff -dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff -dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff - -align 64 -all_threes: -dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 -dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 - -align 64 -all_fffcs: -dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc -dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc -dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc -dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc - -align 64 -all_3fs: -dw 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f -dw 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f - -align 16 -bit_mask_table: -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x80 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc0 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe0 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf0 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8 -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc -db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe - -byte64_len_to_mask_table: - dq 0xffffffffffffffff, 0x0000000000000001 - dq 0x0000000000000003, 0x0000000000000007 - dq 0x000000000000000f, 0x000000000000001f - dq 0x000000000000003f, 0x000000000000007f - dq 0x00000000000000ff, 0x00000000000001ff - dq 0x00000000000003ff, 0x00000000000007ff - dq 0x0000000000000fff, 0x0000000000001fff - dq 0x0000000000003fff, 0x0000000000007fff - dq 0x000000000000ffff, 0x000000000001ffff - dq 0x000000000003ffff, 0x000000000007ffff - dq 0x00000000000fffff, 0x00000000001fffff - dq 0x00000000003fffff, 0x00000000007fffff - dq 0x0000000000ffffff, 0x0000000001ffffff - dq 0x0000000003ffffff, 0x0000000007ffffff - dq 0x000000000fffffff, 0x000000001fffffff - dq 0x000000003fffffff, 0x000000007fffffff - dq 0x00000000ffffffff, 0x00000001ffffffff - dq 0x00000003ffffffff, 0x00000007ffffffff - dq 0x0000000fffffffff, 0x0000001fffffffff - dq 0x0000003fffffffff, 0x0000007fffffffff - dq 0x000000ffffffffff, 0x000001ffffffffff - dq 0x000003ffffffffff, 0x000007ffffffffff - dq 0x00000fffffffffff, 0x00001fffffffffff - dq 0x00003fffffffffff, 0x00007fffffffffff - dq 0x0000ffffffffffff, 0x0001ffffffffffff - dq 0x0003ffffffffffff, 0x0007ffffffffffff - dq 0x000fffffffffffff, 0x001fffffffffffff - dq 0x003fffffffffffff, 0x007fffffffffffff - dq 0x00ffffffffffffff, 0x01ffffffffffffff - dq 0x03ffffffffffffff, 0x07ffffffffffffff - dq 0x0fffffffffffffff, 0x1fffffffffffffff - dq 0x3fffffffffffffff, 0x7fffffffffffffff - dq 0xffffffffffffffff - -align 64 -add_64: -dq 64, 64, 64, 64, 64, 64, 64, 64 - -align 32 -all_512w: -dw 512, 512, 512, 512, 512, 512, 512, 512 -dw 512, 512, 512, 512, 512, 512, 512, 512 - -align 64 -bswap_mask: -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 -db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c - -align 64 -all_31w: -dw 31, 31, 31, 31, 31, 31, 31, 31 -dw 31, 31, 31, 31, 31, 31, 31, 31 - -align 64 -all_ffe0w: -dw 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0 -dw 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0 - -align 32 -permw_mask: -dw 0, 4, 8, 12, 1, 5, 8, 13, 2, 6, 10, 14, 3, 7, 11, 15 - -extr_bits_0_4_8_12: -db 00010001b, 00010001b, 00000000b, 00000000b - -extr_bits_1_5_9_13: -db 00100010b, 00100010b, 00000000b, 00000000b - -extr_bits_2_6_10_14: -db 01000100b, 01000100b, 00000000b, 00000000b - -extr_bits_3_7_11_15: -db 10001000b, 10001000b, 00000000b, 00000000b - -alignr_mask: -dw 0xffff, 0xffff, 0xffff, 0xffff -dw 0x0000, 0xffff, 0xffff, 0xffff -dw 0xffff, 0x0000, 0xffff, 0xffff -dw 0x0000, 0x0000, 0xffff, 0xffff -dw 0xffff, 0xffff, 0x0000, 0xffff -dw 0x0000, 0xffff, 0x0000, 0xffff -dw 0xffff, 0x0000, 0x0000, 0xffff -dw 0x0000, 0x0000, 0x0000, 0xffff -dw 0xffff, 0xffff, 0xffff, 0x0000 -dw 0x0000, 0xffff, 0xffff, 0x0000 -dw 0xffff, 0x0000, 0xffff, 0x0000 -dw 0x0000, 0x0000, 0xffff, 0x0000 -dw 0xffff, 0xffff, 0x0000, 0x0000 -dw 0x0000, 0xffff, 0x0000, 0x0000 -dw 0xffff, 0x0000, 0x0000, 0x0000 -dw 0x0000, 0x0000, 0x0000, 0x0000 - -mov_mask: -db 10101010b, 10101011b, 10101110b, 10101111b -db 10111010b, 10111011b, 10111110b, 10111111b -db 11101010b, 11101011b, 11101110b, 11101111b -db 11111010b, 11111011b, 11111110b, 11111111b - -;; Calculate address for next bytes of keystream (KS) -;; Memory for KS is laid out in the following way: -;; - There are 128 bytes of KS for each buffer spread in chunks of 16 bytes, -;; interleaving with KS from other 3 buffers, every 512 bytes -;; - There are 16 bytes of KS every 64 bytes, for every buffer - -;; - To access the 512-byte chunk, containing the 128 bytes of KS for the 4 buffers, -;; lane4_idx -;; - To access the next 16 bytes of KS for a buffer, bytes16_idx is used -;; - To access a 16-byte chunk inside a 64-byte chunk, ks_idx is used -%define GET_KS(base, lane4_idx, bytes16_idx, ks_idx) (base + lane4_idx * 512 + bytes16_idx * 64 + ks_idx * 16) - -mksection .text -align 64 - -%ifdef LINUX -%define arg1 rdi -%define arg2 rsi -%define arg3 rdx -%define arg4 rcx -%define arg5 r8 -%define arg6 r9d -%else -%define arg1 rcx -%define arg2 rdx -%define arg3 r8 -%define arg4 r9 -%define arg5 [rsp + 40] -%define arg6 [rsp + 48] -%endif - -%define OFS_R1 (16*(4*16)) -%define OFS_R2 (OFS_R1 + (4*16)) - -%ifidn __OUTPUT_FORMAT__, win64 - %define XMM_STORAGE 16*10 - %define GP_STORAGE 8*8 -%else - %define XMM_STORAGE 0 - %define GP_STORAGE 6*8 -%endif -%define LANE_STORAGE 64 - -%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE + LANE_STORAGE -%define GP_OFFSET XMM_STORAGE - -%macro FUNC_SAVE 0 - mov rax, rsp - sub rsp, VARIABLE_OFFSET - and rsp, ~15 - -%ifidn __OUTPUT_FORMAT__, win64 - ; xmm6:xmm15 need to be maintained for Windows - vmovdqa [rsp + 0*16], xmm6 - vmovdqa [rsp + 1*16], xmm7 - vmovdqa [rsp + 2*16], xmm8 - vmovdqa [rsp + 3*16], xmm9 - vmovdqa [rsp + 4*16], xmm10 - vmovdqa [rsp + 5*16], xmm11 - vmovdqa [rsp + 6*16], xmm12 - vmovdqa [rsp + 7*16], xmm13 - vmovdqa [rsp + 8*16], xmm14 - vmovdqa [rsp + 9*16], xmm15 - mov [rsp + GP_OFFSET + 48], rdi - mov [rsp + GP_OFFSET + 56], rsi -%endif - mov [rsp + GP_OFFSET], r12 - mov [rsp + GP_OFFSET + 8], r13 - mov [rsp + GP_OFFSET + 16], r14 - mov [rsp + GP_OFFSET + 24], r15 - mov [rsp + GP_OFFSET + 32], rbx - mov [rsp + GP_OFFSET + 40], rax ;; rsp pointer -%endmacro - -%macro FUNC_RESTORE 0 - -%ifidn __OUTPUT_FORMAT__, win64 - vmovdqa xmm6, [rsp + 0*16] - vmovdqa xmm7, [rsp + 1*16] - vmovdqa xmm8, [rsp + 2*16] - vmovdqa xmm9, [rsp + 3*16] - vmovdqa xmm10, [rsp + 4*16] - vmovdqa xmm11, [rsp + 5*16] - vmovdqa xmm12, [rsp + 6*16] - vmovdqa xmm13, [rsp + 7*16] - vmovdqa xmm14, [rsp + 8*16] - vmovdqa xmm15, [rsp + 9*16] - mov rdi, [rsp + GP_OFFSET + 48] - mov rsi, [rsp + GP_OFFSET + 56] -%endif - mov r12, [rsp + GP_OFFSET] - mov r13, [rsp + GP_OFFSET + 8] - mov r14, [rsp + GP_OFFSET + 16] - mov r15, [rsp + GP_OFFSET + 24] - mov rbx, [rsp + GP_OFFSET + 32] - mov rsp, [rsp + GP_OFFSET + 40] -%endmacro - -; This macro reorder the LFSR registers -; after N rounds (1 <= N <= 15), since the registers -; are shifted every round -; -; The macro clobbers ZMM0-15 -; -%macro REORDER_LFSR 3 -%define %%STATE %1 -%define %%NUM_ROUNDS %2 -%define %%LANE_MASK %3 - -%if %%NUM_ROUNDS != 16 -%assign i 0 -%rep 16 - vmovdqa32 APPEND(zmm,i){%%LANE_MASK}, [%%STATE + 64*i] -%assign i (i+1) -%endrep - -%assign i 0 -%assign j %%NUM_ROUNDS -%rep 16 - vmovdqa32 [%%STATE + 64*i]{%%LANE_MASK}, APPEND(zmm,j) -%assign i (i+1) -%assign j ((j+1) % 16) -%endrep -%endif ;; %%NUM_ROUNDS != 16 - -%endmacro - -; -; Perform a partial 16x16 transpose (as opposed to a full 16x16 transpose), -; where the output is chunks of 16 bytes from 4 different buffers interleaved -; in each register (all ZMM registers) -; -; Input: -; a0 a1 a2 a3 a4 a5 a6 a7 .... a15 -; b0 b1 b2 b3 b4 b5 b6 b7 .... b15 -; c0 c1 c2 c3 c4 c5 c6 c7 .... c15 -; d0 d1 d2 d3 d4 d5 d6 d7 .... d15 -; -; Output: -; a0 b0 c0 d0 a4 b4 c4 d4 .... d12 -; a1 b1 c1 d1 a5 b5 c5 d5 .... d13 -; a2 b2 c2 d2 a6 b6 c6 d6 .... d14 -; a3 b3 c3 d3 a7 b7 c7 d7 .... d15 -; -%macro TRANSPOSE16_U32_INTERLEAVED 26 -%define %%IN00 %1 ; [in/out] Bytes 0-3 for all buffers (in) / Bytes 0-15 for buffers 3,7,11,15 (out) -%define %%IN01 %2 ; [in/out] Bytes 4-7 for all buffers (in) / Bytes 16-31 for buffers 3,7,11,15 (out) -%define %%IN02 %3 ; [in/out] Bytes 8-11 for all buffers (in) / Bytes 32-47 for buffers 3,7,11,15 (out) -%define %%IN03 %4 ; [in/out] Bytes 12-15 for all buffers (in) / Bytes 48-63 for buffers 3,7,11,15 (out) -%define %%IN04 %5 ; [in/clobbered] Bytes 16-19 for all buffers (in) -%define %%IN05 %6 ; [in/clobbered] Bytes 20-23 for all buffers (in) -%define %%IN06 %7 ; [in/clobbered] Bytes 24-27 for all buffers (in) -%define %%IN07 %8 ; [in/clobbered] Bytes 28-31 for all buffers (in) -%define %%IN08 %9 ; [in/clobbered] Bytes 32-35 for all buffers (in) -%define %%IN09 %10 ; [in/clobbered] Bytes 36-39 for all buffers (in) -%define %%IN10 %11 ; [in/clobbered] Bytes 40-43 for all buffers (in) -%define %%IN11 %12 ; [in/clobbered] Bytes 44-47 for all buffers (in) -%define %%IN12 %13 ; [in/out] Bytes 48-51 for all buffers (in) / Bytes 0-15 for buffers 2,6,10,14 (out) -%define %%IN13 %14 ; [in/out] Bytes 52-55 for all buffers (in) / Bytes 16-31 for buffers 2,6,10,14 (out) -%define %%IN14 %15 ; [in/out] Bytes 56-59 for all buffers (in) / Bytes 32-47 for buffers 2,6,10,14 (out) -%define %%IN15 %16 ; [in/out] Bytes 60-63 for all buffers (in) / Bytes 48-63 for buffers 2,6,10,14 (out) -%define %%T0 %17 ; [out] Bytes 32-47 for buffers 1,5,9,13 (out) -%define %%T1 %18 ; [out] Bytes 48-63 for buffers 1,5,9,13 (out) -%define %%T2 %19 ; [out] Bytes 32-47 for buffers 0,4,8,12 (out) -%define %%T3 %20 ; [out] Bytes 48-63 for buffers 0,4,8,12 (out) -%define %%K0 %21 ; [out] Bytes 0-15 for buffers 1,5,9,13 (out) -%define %%K1 %22 ; [out] Bytes 16-31for buffers 1,5,9,13 (out) -%define %%K2 %23 ; [out] Bytes 0-15 for buffers 0,4,8,12 (out) -%define %%K3 %24 ; [out] Bytes 16-31 for buffers 0,4,8,12 (out) -%define %%K4 %25 ; [clobbered] Temporary register -%define %%K5 %26 ; [clobbered] Temporary register - - vpunpckldq %%K0, %%IN00, %%IN01 - vpunpckhdq %%K1, %%IN00, %%IN01 - vpunpckldq %%T0, %%IN02, %%IN03 - vpunpckhdq %%T1, %%IN02, %%IN03 - - vpunpckldq %%IN00, %%IN04, %%IN05 - vpunpckhdq %%IN01, %%IN04, %%IN05 - vpunpckldq %%IN02, %%IN06, %%IN07 - vpunpckhdq %%IN03, %%IN06, %%IN07 - - vpunpcklqdq %%K2, %%K0, %%T0 - vpunpckhqdq %%K3, %%K0, %%T0 - vpunpcklqdq %%T2, %%K1, %%T1 - vpunpckhqdq %%T3, %%K1, %%T1 - - vpunpcklqdq %%K0, %%IN00, %%IN02 - vpunpckhqdq %%K1, %%IN00, %%IN02 - vpunpcklqdq %%T0, %%IN01, %%IN03 - vpunpckhqdq %%T1, %%IN01, %%IN03 - - vpunpckldq %%K4, %%IN08, %%IN09 - vpunpckhdq %%K5, %%IN08, %%IN09 - vpunpckldq %%IN04, %%IN10, %%IN11 - vpunpckhdq %%IN05, %%IN10, %%IN11 - vpunpckldq %%IN06, %%IN12, %%IN13 - vpunpckhdq %%IN07, %%IN12, %%IN13 - vpunpckldq %%IN10, %%IN14, %%IN15 - vpunpckhdq %%IN11, %%IN14, %%IN15 - - vpunpcklqdq %%IN12, %%K4, %%IN04 - vpunpckhqdq %%IN13, %%K4, %%IN04 - vpunpcklqdq %%IN14, %%K5, %%IN05 - vpunpckhqdq %%IN15, %%K5, %%IN05 - vpunpcklqdq %%IN00, %%IN06, %%IN10 - vpunpckhqdq %%IN01, %%IN06, %%IN10 - vpunpcklqdq %%IN02, %%IN07, %%IN11 - vpunpckhqdq %%IN03, %%IN07, %%IN11 -%endmacro - -; -; Perform a partial 4x16 transpose -; where the output is chunks of 16 bytes from 4 different buffers interleaved -; in each register (all ZMM registers) -; -; Input: -; a0 a1 a2 a3 a4 a5 a6 a7 .... a15 -; b0 b1 b2 b3 b4 b5 b6 b7 .... b15 -; c0 c1 c2 c3 c4 c5 c6 c7 .... c15 -; d0 d1 d2 d3 d4 d5 d6 d7 .... d15 -; -; Output: -; a0 b0 c0 d0 a4 b4 c4 d4 .... d12 -; a1 b1 c1 d1 a5 b5 c5 d5 .... d13 -; a2 b2 c2 d2 a6 b6 c6 d6 .... d14 -; a3 b3 c3 d3 a7 b7 c7 d7 .... d15 -; -%macro TRANSPOSE4_U32_INTERLEAVED 8 -%define %%IN00 %1 ; [in/out] Bytes 0-3 for all buffers (in) / Bytes 0-15 for buffers 0,4,8,12 (out) -%define %%IN01 %2 ; [in/out] Bytes 4-7 for all buffers (in) / Bytes 0-15 for buffers 1,5,9,13 (out) -%define %%IN02 %3 ; [in/out] Bytes 8-11 for all buffers (in) / Bytes 0-15 for buffers 2,6,10,14 (out) -%define %%IN03 %4 ; [in/out] Bytes 12-15 for all buffers (in) / Bytes 0-15 for buffers 3,7,11,15 (out) -%define %%T0 %5 ; [clobbered] Temporary ZMM register -%define %%T1 %6 ; [clobbered] Temporary ZMM register -%define %%K0 %7 ; [clobbered] Temporary ZMM register -%define %%K1 %8 ; [clobbered] Temporary ZMM register - - vpunpckldq %%K0, %%IN00, %%IN01 - vpunpckhdq %%K1, %%IN00, %%IN01 - vpunpckldq %%T0, %%IN02, %%IN03 - vpunpckhdq %%T1, %%IN02, %%IN03 - - vpunpcklqdq %%IN00, %%K0, %%T0 - vpunpckhqdq %%IN01, %%K0, %%T0 - vpunpcklqdq %%IN02, %%K1, %%T1 - vpunpckhqdq %%IN03, %%K1, %%T1 - -%endmacro - -; -; Calculates X0-X3 from LFSR registers -; -%macro BITS_REORG16 16-17 -%define %%STATE %1 ; [in] ZUC state -%define %%ROUND_NUM %2 ; [in] Round number -%define %%LANE_MASK %3 ; [in] Mask register with lanes to update -%define %%LFSR_0 %4 ; [clobbered] LFSR_0 -%define %%LFSR_2 %5 ; [clobbered] LFSR_2 -%define %%LFSR_5 %6 ; [clobbered] LFSR_5 -%define %%LFSR_7 %7 ; [clobbered] LFSR_7 -%define %%LFSR_9 %8 ; [clobbered] LFSR_9 -%define %%LFSR_11 %9 ; [clobbered] LFSR_11 -%define %%LFSR_14 %10 ; [clobbered] LFSR_14 -%define %%LFSR_15 %11 ; [clobbered] LFSR_15 -%define %%ZTMP %12 ; [clobbered] Temporary ZMM register -%define %%BLEND_KMASK %13 ; [in] Blend K-mask -%define %%X0 %14 ; [out] ZMM register containing X0 of all lanes -%define %%X1 %15 ; [out] ZMM register containing X1 of all lanes -%define %%X2 %16 ; [out] ZMM register containing X2 of all lanes -%define %%X3 %17 ; [out] ZMM register containing X3 of all lanes (only for work mode) - - vmovdqa64 %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_14, [%%STATE + ((14 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_11, [%%STATE + ((11 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_9, [%%STATE + (( 9 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_7, [%%STATE + (( 7 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_5, [%%STATE + (( 5 + %%ROUND_NUM) % 16)*64] -%if (%0 == 17) ; Only needed when generating X3 (for "working" mode) - vmovdqa64 %%LFSR_2, [%%STATE + (( 2 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*64] -%endif - -%if USE_GFNI_VAES_VPCLMUL == 1 - vpsrld %%LFSR_15, 15 - vpslld %%LFSR_14, 16 - vpslld %%LFSR_9, 1 - vpslld %%LFSR_5, 1 - vpshldd %%X0, %%LFSR_15, %%LFSR_14, 16 - vpshldd %%X1, %%LFSR_11, %%LFSR_9, 16 - vpshldd %%X2, %%LFSR_7, %%LFSR_5, 16 -%if (%0 == 17) - vpslld %%LFSR_0, 1 - vpshldd %%X3, %%LFSR_2, %%LFSR_0, 16 -%endif -%else ; USE_GFNI_VAES_VPCLMUL == 1 - vpxorq %%ZTMP, %%ZTMP - vpslld %%LFSR_15, 1 - vpblendmw %%ZTMP{%%BLEND_KMASK}, %%LFSR_14, %%ZTMP - vpblendmw %%X0{%%BLEND_KMASK}, %%ZTMP, %%LFSR_15 - vpslld %%LFSR_11, 16 - vpsrld %%LFSR_9, 15 - vporq %%X1, %%LFSR_11, %%LFSR_9 - vpslld %%LFSR_7, 16 - vpsrld %%LFSR_5, 15 - vporq %%X2, %%LFSR_7, %%LFSR_5 -%if (%0 == 17) - vpslld %%LFSR_2, 16 - vpsrld %%LFSR_0, 15 - vporq %%X3, %%LFSR_2, %%LFSR_0 ; Store BRC_X3 in ZMM register -%endif ; %0 == 17 -%endif ; USE_GFNI_VAES_VPCLMUL == 1 -%endmacro - -; -; Updates R1-R2, using X0-X3 and generates W (if needed) -; -%macro NONLIN_FUN16 13-14 -%define %%STATE %1 ; [in] ZUC state -%define %%LANE_MASK %2 ; [in] Mask register with lanes to update -%define %%X0 %3 ; [in] ZMM register containing X0 of all lanes -%define %%X1 %4 ; [in] ZMM register containing X1 of all lanes -%define %%X2 %5 ; [in] ZMM register containing X2 of all lanes -%define %%R1 %6 ; [in/out] ZMM register to contain R1 for all lanes -%define %%R2 %7 ; [in/out] ZMM register to contain R2 for all lanes -%define %%ZTMP1 %8 ; [clobbered] Temporary ZMM register -%define %%ZTMP2 %9 ; [clobbered] Temporary ZMM register -%define %%ZTMP3 %10 ; [clobbered] Temporary ZMM register -%define %%ZTMP4 %11 ; [clobbered] Temporary ZMM register -%define %%ZTMP5 %12 ; [clobbered] Temporary ZMM register -%define %%ZTMP6 %13 ; [clobbered] Temporary ZMM register -%define %%W %14 ; [out] ZMM register to contain W for all lanes - -%define %%W1 %%ZTMP5 -%define %%W2 %%ZTMP6 - -%if (%0 == 14) - vpxorq %%W, %%X0, %%R1 - vpaddd %%W, %%R2 ; W = (BRC_X0 ^ F_R1) + F_R2 -%endif - - vpaddd %%W1, %%R1, %%X1 ; W1 = F_R1 + BRC_X1 - vpxorq %%W2, %%R2, %%X2 ; W2 = F_R2 ^ BRC_X2 - -%if USE_GFNI_VAES_VPCLMUL == 1 - vpshldd %%ZTMP1, %%W1, %%W2, 16 - vpshldd %%ZTMP2, %%W2, %%W1, 16 -%else - vpslld %%ZTMP3, %%W1, 16 - vpsrld %%ZTMP4, %%W1, 16 - vpslld %%ZTMP5, %%W2, 16 - vpsrld %%ZTMP6, %%W2, 16 - vporq %%ZTMP1, %%ZTMP3, %%ZTMP6 - vporq %%ZTMP2, %%ZTMP4, %%ZTMP5 -%endif - - vprold %%ZTMP3, %%ZTMP1, 10 - vprold %%ZTMP4, %%ZTMP1, 18 - vprold %%ZTMP5, %%ZTMP1, 24 - vprold %%ZTMP6, %%ZTMP1, 2 - ; ZMM1 = U = L1(P) - vpternlogq %%ZTMP1, %%ZTMP3, %%ZTMP4, 0x96 ; (A ^ B) ^ C - vpternlogq %%ZTMP1, %%ZTMP5, %%ZTMP6, 0x96 ; (A ^ B) ^ C - - vprold %%ZTMP3, %%ZTMP2, 8 - vprold %%ZTMP4, %%ZTMP2, 14 - vprold %%ZTMP5, %%ZTMP2, 22 - vprold %%ZTMP6, %%ZTMP2, 30 - ; ZMM2 = V = L2(Q) - vpternlogq %%ZTMP2, %%ZTMP3, %%ZTMP4, 0x96 ; (A ^ B) ^ C - vpternlogq %%ZTMP2, %%ZTMP5, %%ZTMP6, 0x96 ; (A ^ B) ^ C - - ; Shuffle U and V to have all S0 lookups in XMM1 and all S1 lookups in XMM2 - - ; Compress all S0 and S1 input values in each register - ; S0: Bytes 0-7,16-23,32-39,48-55 S1: Bytes 8-15,24-31,40-47,56-63 - vpshufb %%ZTMP1, [rel S0_S1_shuf] - ; S1: Bytes 0-7,16-23,32-39,48-55 S0: Bytes 8-15,24-31,40-47,56-63 - vpshufb %%ZTMP2, [rel S1_S0_shuf] - - vshufpd %%ZTMP3, %%ZTMP1, %%ZTMP2, 0xAA ; All S0 input values - vshufpd %%ZTMP4, %%ZTMP2, %%ZTMP1, 0xAA ; All S1 input values - - ; Compute S0 and S1 values - S0_comput_AVX512 %%ZTMP3, %%ZTMP1, %%ZTMP2, USE_GFNI_VAES_VPCLMUL - S1_comput_AVX512 %%ZTMP4, %%ZTMP1, %%ZTMP2, %%ZTMP5, %%ZTMP6, USE_GFNI_VAES_VPCLMUL - - ; Need to shuffle back %%ZTMP1 & %%ZTMP2 before storing output - ; (revert what was done before S0 and S1 computations) - vshufpd %%ZTMP1, %%ZTMP3, %%ZTMP4, 0xAA - vshufpd %%ZTMP2, %%ZTMP4, %%ZTMP3, 0xAA - - vpshufb %%R1, %%ZTMP1, [rel rev_S0_S1_shuf] - vpshufb %%R2, %%ZTMP2, [rel rev_S1_S0_shuf] -%endmacro - -; -; Function to store 64 bytes of keystream for 16 buffers -; Note: all the 64*16 bytes are not store contiguously, -; the first 256 bytes (containing 64 bytes from 4 buffers) -; are stored in the first half of the first 512 bytes, -; then there is a gap of 256 bytes and then the next 256 bytes -; are written, and so on. -; -%macro STORE_KSTR16 18-24 -%define %%KS %1 ; [in] Pointer to keystream -%define %%DATA64B_L0 %2 ; [in] 64 bytes of keystream for lane 0 -%define %%DATA64B_L1 %3 ; [in] 64 bytes of keystream for lane 1 -%define %%DATA64B_L2 %4 ; [in] 64 bytes of keystream for lane 2 -%define %%DATA64B_L3 %5 ; [in] 64 bytes of keystream for lane 3 -%define %%DATA64B_L4 %6 ; [in] 64 bytes of keystream for lane 4 -%define %%DATA64B_L5 %7 ; [in] 64 bytes of keystream for lane 5 -%define %%DATA64B_L6 %8 ; [in] 64 bytes of keystream for lane 6 -%define %%DATA64B_L7 %9 ; [in] 64 bytes of keystream for lane 7 -%define %%DATA64B_L8 %10 ; [in] 64 bytes of keystream for lane 8 -%define %%DATA64B_L9 %11 ; [in] 64 bytes of keystream for lane 9 -%define %%DATA64B_L10 %12 ; [in] 64 bytes of keystream for lane 10 -%define %%DATA64B_L11 %13 ; [in] 64 bytes of keystream for lane 11 -%define %%DATA64B_L12 %14 ; [in] 64 bytes of keystream for lane 12 -%define %%DATA64B_L13 %15 ; [in] 64 bytes of keystream for lane 13 -%define %%DATA64B_L14 %16 ; [in] 64 bytes of keystream for lane 14 -%define %%DATA64B_L15 %17 ; [in] 64 bytes of keystream for lane 15 -%define %%KEY_OFF %18 ; [in] Offset to start writing Keystream -%define %%LANE_MASK %19 ; [in] Lane mask with lanes to generate all keystream words -%define %%ALIGN_MASK %20 ; [in] Address with alignr masks -%define %%MOV_MASK %21 ; [in] Address with move masks -%define %%TMP %22 ; [in] Temporary GP register -%define %%KMASK1 %23 ; [clobbered] Temporary K mask -%define %%KMASK2 %24 ; [clobbered] Temporary K mask - -%if (%0 == 18) - vmovdqu64 [%%KS + %%KEY_OFF*4], %%DATA64B_L0 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 64], %%DATA64B_L1 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 2*64], %%DATA64B_L2 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 3*64], %%DATA64B_L3 - - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512], %%DATA64B_L4 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 64], %%DATA64B_L5 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 2*64], %%DATA64B_L6 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 3*64], %%DATA64B_L7 - - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2], %%DATA64B_L8 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64], %%DATA64B_L9 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64*2], %%DATA64B_L10 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64*3], %%DATA64B_L11 - - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3], %%DATA64B_L12 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64], %%DATA64B_L13 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64*2], %%DATA64B_L14 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64*3], %%DATA64B_L15 -%else - pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_0_4_8_12] - kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] - kmovb %%KMASK2, [%%MOV_MASK + %%TMP] - ; Shifting left 8 bytes of KS for lanes which first 8 bytes are skipped - vpalignr %%DATA64B_L3{%%KMASK1}, %%DATA64B_L3, %%DATA64B_L2, 8 - vpalignr %%DATA64B_L2{%%KMASK1}, %%DATA64B_L2, %%DATA64B_L1, 8 - vpalignr %%DATA64B_L1{%%KMASK1}, %%DATA64B_L1, %%DATA64B_L0, 8 - vpalignr %%DATA64B_L0{%%KMASK1}, %%DATA64B_L0, %%DATA64B_L3, 8 - vmovdqu64 [%%KS + %%KEY_OFF*4]{%%KMASK2}, %%DATA64B_L0 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 64], %%DATA64B_L1 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 2*64], %%DATA64B_L2 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 3*64], %%DATA64B_L3 - - pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_1_5_9_13] - kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] - kmovb %%KMASK2, [%%MOV_MASK + %%TMP] - vpalignr %%DATA64B_L7{%%KMASK1}, %%DATA64B_L7, %%DATA64B_L6, 8 - vpalignr %%DATA64B_L6{%%KMASK1}, %%DATA64B_L6, %%DATA64B_L5, 8 - vpalignr %%DATA64B_L5{%%KMASK1}, %%DATA64B_L5, %%DATA64B_L4, 8 - vpalignr %%DATA64B_L4{%%KMASK1}, %%DATA64B_L4, %%DATA64B_L7, 8 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512]{%%KMASK2}, %%DATA64B_L4 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 64], %%DATA64B_L5 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 64*2], %%DATA64B_L6 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 64*3], %%DATA64B_L7 - - pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_2_6_10_14] - kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] - kmovb %%KMASK2, [%%MOV_MASK + %%TMP] - vpalignr %%DATA64B_L11{%%KMASK1}, %%DATA64B_L11, %%DATA64B_L10, 8 - vpalignr %%DATA64B_L10{%%KMASK1}, %%DATA64B_L10, %%DATA64B_L9, 8 - vpalignr %%DATA64B_L9{%%KMASK1}, %%DATA64B_L9, %%DATA64B_L8, 8 - vpalignr %%DATA64B_L8{%%KMASK1}, %%DATA64B_L8, %%DATA64B_L11, 8 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2]{%%KMASK2}, %%DATA64B_L8 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64], %%DATA64B_L9 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64*2], %%DATA64B_L10 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64*3], %%DATA64B_L11 - - pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_3_7_11_15] - kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] - kmovb %%KMASK2, [%%MOV_MASK + %%TMP] - vpalignr %%DATA64B_L15{%%KMASK1}, %%DATA64B_L15, %%DATA64B_L14, 8 - vpalignr %%DATA64B_L14{%%KMASK1}, %%DATA64B_L14, %%DATA64B_L13, 8 - vpalignr %%DATA64B_L13{%%KMASK1}, %%DATA64B_L13, %%DATA64B_L12, 8 - vpalignr %%DATA64B_L12{%%KMASK1}, %%DATA64B_L12, %%DATA64B_L15, 8 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3]{%%KMASK2}, %%DATA64B_L12 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64], %%DATA64B_L13 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64*2], %%DATA64B_L14 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64*3], %%DATA64B_L15 -%endif -%endmacro - -; -; Function to store 64 bytes of keystream for 4 buffers -; Note: all the 64*4 bytes are not store contiguously. -; Each 64 bytes are stored every 512 bytes, being written in -; qword index 0, 1, 2 or 3 inside the 512 bytes, depending on the lane. -%macro STORE_KSTR4 7 -%define %%KS %1 ; [in] Pointer to keystream -%define %%DATA64B_L0 %2 ; [in] 64 bytes of keystream for lane 0 -%define %%DATA64B_L1 %3 ; [in] 64 bytes of keystream for lane 1 -%define %%DATA64B_L2 %4 ; [in] 64 bytes of keystream for lane 2 -%define %%DATA64B_L3 %5 ; [in] 64 bytes of keystream for lane 3 -%define %%KEY_OFF %6 ; [in] Offset to start writing Keystream -%define %%LANE_GROUP %7 ; [immediate] 0, 1, 2 or 3 - - vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP], %%DATA64B_L0 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP + 512], %%DATA64B_L1 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP + 512*2], %%DATA64B_L2 - vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP + 512*3], %%DATA64B_L3 -%endmacro - -; -; Add two 32-bit args and reduce mod (2^31-1) -; -%macro ADD_MOD31 4 -%define %%IN_OUT %1 ; [in/out] ZMM register with first input and output -%define %%IN2 %2 ; [in] ZMM register with second input -%define %%ZTMP %3 ; [clobbered] Temporary ZMM register -%define %%MASK31 %4 ; [in] ZMM register containing 0x7FFFFFFF's in all dwords - - vpaddd %%IN_OUT, %%IN2 - vpsrld %%ZTMP, %%IN_OUT, 31 - vpandq %%IN_OUT, %%MASK31 - vpaddd %%IN_OUT, %%ZTMP -%endmacro - -; -; Rotate (mult by pow of 2) 32-bit arg and reduce mod (2^31-1) -; -%macro ROT_MOD31 4 -%define %%IN_OUT %1 ; [in/out] ZMM register with input and output -%define %%ZTMP %2 ; [clobbered] Temporary ZMM register -%define %%MASK31 %3 ; [in] ZMM register containing 0x7FFFFFFF's in all dwords -%define %%N_BITS %4 ; [immediate] Number of bits to rotate for each dword - - vpslld %%ZTMP, %%IN_OUT, %%N_BITS - vpsrld %%IN_OUT, %%IN_OUT, (31 - %%N_BITS) - vpternlogq %%IN_OUT, %%ZTMP, %%MASK31, 0xA8 ; (A | B) & C -%endmacro - -; -; Update LFSR registers, calculating S_16 -; -; S_16 = [ 2^15*S_15 + 2^17*S_13 + 2^21*S_10 + 2^20*S_4 + (1 + 2^8)*S_0 ] mod (2^31 - 1) -; If init mode, add W to the calculation above. -; S_16 -> S_15 for next round -; -%macro LFSR_UPDT16 13 -%define %%STATE %1 ; [in] ZUC state -%define %%ROUND_NUM %2 ; [in] Round number -%define %%LANE_MASK %3 ; [in] Mask register with lanes to update -%define %%LFSR_0 %4 ; [clobbered] LFSR_0 -%define %%LFSR_4 %5 ; [clobbered] LFSR_2 -%define %%LFSR_10 %6 ; [clobbered] LFSR_5 -%define %%LFSR_13 %7 ; [clobbered] LFSR_7 -%define %%LFSR_15 %8 ; [clobbered] LFSR_9 -%define %%ZTMP %9 ; [clobbered] Temporary ZMM register -%define %%MASK_31 %10 ; [in] Mask_31 -%define %%W %11 ; [in/clobbered] In init mode, contains W for all 16 lanes -%define %%KTMP %12 ; [clobbered] Temporary K mask -%define %%MODE %13 ; [constant] "init" / "work" mode - - vmovdqa64 %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_4, [%%STATE + (( 4 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_10, [%%STATE + ((10 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_13, [%%STATE + ((13 + %%ROUND_NUM) % 16)*64] - vmovdqa64 %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*64] - - ; Calculate LFSR feedback (S_16) - - ; In Init mode, W is added to the S_16 calculation -%ifidn %%MODE, init - ADD_MOD31 %%W, %%LFSR_0, %%ZTMP, %%MASK_31 -%else - vmovdqa64 %%W, %%LFSR_0 -%endif - ROT_MOD31 %%LFSR_0, %%ZTMP, %%MASK_31, 8 - ADD_MOD31 %%W, %%LFSR_0, %%ZTMP, %%MASK_31 - ROT_MOD31 %%LFSR_4, %%ZTMP, %%MASK_31, 20 - ADD_MOD31 %%W, %%LFSR_4, %%ZTMP, %%MASK_31 - ROT_MOD31 %%LFSR_10, %%ZTMP, %%MASK_31, 21 - ADD_MOD31 %%W, %%LFSR_10, %%ZTMP, %%MASK_31 - ROT_MOD31 %%LFSR_13, %%ZTMP, %%MASK_31, 17 - ADD_MOD31 %%W, %%LFSR_13, %%ZTMP, %%MASK_31 - ROT_MOD31 %%LFSR_15, %%ZTMP, %%MASK_31, 15 - ADD_MOD31 %%W, %%LFSR_15, %%ZTMP, %%MASK_31 - - vmovdqa32 [%%STATE + (( 0 + %%ROUND_NUM) % 16)*64]{%%LANE_MASK}, %%W - - ; LFSR_S16 = (LFSR_S15++) = eax -%endmacro - -; -; Initialize LFSR registers for a single lane, for ZUC-128 -; -; From spec, s_i (LFSR) registers need to be loaded as follows: -; -; For 0 <= i <= 15, let s_i= k_i || d_i || iv_i. -; Where k_i is each byte of the key, d_i is a 15-bit constant -; and iv_i is each byte of the IV. -; -%macro INIT_LFSR_128 4 -%define %%KEY %1 ;; [in] Key pointer -%define %%IV %2 ;; [in] IV pointer -%define %%LFSR %3 ;; [out] ZMM register to contain initialized LFSR regs -%define %%ZTMP %4 ;; [clobbered] ZMM temporary register - - vbroadcasti64x2 %%LFSR, [%%KEY] - vbroadcasti64x2 %%ZTMP, [%%IV] - vpshufb %%LFSR, [rel shuf_mask_key] - vpsrld %%LFSR, 1 - vpshufb %%ZTMP, [rel shuf_mask_iv] - vpternlogq %%LFSR, %%ZTMP, [rel EK_d64], 0xFE ; A OR B OR C - -%endmacro - -; -; Initialize LFSR registers for a single lane, for ZUC-256 -; -%macro INIT_LFSR_256 11 -%define %%KEY %1 ;; [in] Key pointer -%define %%IV %2 ;; [in] IV pointer -%define %%LFSR %3 ;; [out] ZMM register to contain initialized LFSR regs -%define %%ZTMP1 %4 ;; [clobbered] ZMM temporary register -%define %%ZTMP2 %5 ;; [clobbered] ZMM temporary register -%define %%ZTMP3 %6 ;; [clobbered] ZMM temporary register -%define %%ZTMP4 %7 ;; [clobbered] ZMM temporary register -%define %%ZTMP5 %8 ;; [clobbered] ZMM temporary register -%define %%CONSTANTS %9 ;; [in] Address to constants -%define %%SHIFT_MASK %10 ;; [in] Mask register to shift K_31 -%define %%IV_MASK %11 ;; [in] Mask register to read IV (last 10 bytes) - - vmovdqu8 XWORD(%%ZTMP4){%%IV_MASK}, [%%IV + 16] - ; Zero out first 2 bits of IV bytes 17-24 - vpandq XWORD(%%ZTMP4), [rel iv_mask_low_6] - vshufi32x4 %%ZTMP4, %%ZTMP4, 0 - vbroadcasti64x2 %%ZTMP1, [%%KEY] - vbroadcasti64x2 %%ZTMP2, [%%KEY + 16] - vbroadcasti64x2 %%ZTMP3, [%%IV] - - vpshufb %%ZTMP5, %%ZTMP1, [rel shuf_mask_key256_first_high] - vpshufb %%LFSR, %%ZTMP3, [rel shuf_mask_iv256_first_high] - vporq %%LFSR, %%ZTMP5 - vpsrld %%LFSR, 1 - - vpshufb %%ZTMP5, %%ZTMP2, [rel shuf_mask_key256_second] - vpsrld %%ZTMP5{%%SHIFT_MASK}, 4 - vpandq %%ZTMP5, [rel key_mask_low_4] - - vpshufb %%ZTMP1, [rel shuf_mask_key256_first_low] - vpshufb %%ZTMP3, [rel shuf_mask_iv256_first_low] - vpshufb %%ZTMP4, [rel shuf_mask_iv256_second] - - vpternlogq %%LFSR, %%ZTMP5, %%ZTMP1, 0xFE - vpternlogq %%LFSR, %%ZTMP3, %%ZTMP4, 0xFE - - vporq %%LFSR, [%%CONSTANTS] -%endmacro - -%macro INIT_16_AVX512 1 -%define %%KEY_SIZE %1 ; [in] Key size (128 or 256) - -%ifdef LINUX - %define pKe rdi - %define pIv rsi - %define pState rdx - %define lane_mask ecx -%else - %define pKe rcx - %define pIv rdx - %define pState r8 - %define lane_mask r9d -%endif -%define tag_sz r10d ; Only used in ZUC-256 (caller written in assembly, so using a hardcoded register) -%define tag_sz_q r10 - -%define %%X0 zmm10 -%define %%X1 zmm11 -%define %%X2 zmm12 -%define %%W zmm13 -%define %%R1 zmm14 -%define %%R2 zmm15 - - FUNC_SAVE - - mov rax, pState - - kmovw k2, lane_mask - -%if %%KEY_SIZE == 256 - ; Get pointer to constants (depending on tag size, this will point at - ; constants for encryption, authentication with 4-byte, 8-byte or 16-byte tags) - lea r13, [rel EK256_d64] - bsf tag_sz, tag_sz - dec tag_sz - shl tag_sz, 6 - add r13, tag_sz_q - mov r11, 0x4000 ; Mask to shift 4 bits only in the 15th dword - kmovq k1, r11 - mov r11, 0x3ff ; Mask to read 10 bytes of IV - kmovq k3, r11 -%endif - - ; Set LFSR registers for Packet 1 - mov r9, [pKe] ; Load Key 1 pointer - lea r10, [pIv] ; Load IV 1 pointer - -%if %%KEY_SIZE == 128 - INIT_LFSR_128 r9, r10, zmm0, zmm1 -%else - INIT_LFSR_256 r9, r10, zmm0, zmm3, zmm5, zmm7, zmm9, zmm11, r13, k1, k3 -%endif - ; Set LFSR registers for Packets 2-15 -%assign idx 1 -%assign reg_lfsr 2 -%assign reg_tmp 3 -%rep 14 - mov r9, [pKe + 8*idx] ; Load Key N pointer - lea r10, [pIv + 32*idx] ; Load IV N pointer -%if %%KEY_SIZE == 128 - INIT_LFSR_128 r9, r10, APPEND(zmm, reg_lfsr), APPEND(zmm, reg_tmp) -%else - INIT_LFSR_256 r9, r10, APPEND(zmm, reg_lfsr), zmm3, zmm5, zmm7, zmm9, zmm11, r13, k1, k3 -%endif -%assign idx (idx + 1) -%assign reg_lfsr (reg_lfsr + 2) -%assign reg_tmp (reg_tmp + 2) -%endrep - - ; Set LFSR registers for Packet 16 - mov r9, [pKe + 8*15] ; Load Key 16 pointer - lea r10, [pIv + 32*15] ; Load IV 16 pointer -%if %%KEY_SIZE == 128 - INIT_LFSR_128 r9, r10, zmm30, zmm31 -%else - INIT_LFSR_256 r9, r10, zmm30, zmm3, zmm5, zmm7, zmm9, zmm11, r13, k1, k3 -%endif - ; Store LFSR registers in memory (reordering first, so all S0 regs - ; are together, then all S1 regs... until S15) - TRANSPOSE16_U32 zmm0, zmm2, zmm4, zmm6, zmm8, zmm10, zmm12, zmm14, \ - zmm16, zmm18, zmm20, zmm22, zmm24, zmm26, zmm28, zmm30, \ - zmm1, zmm3, zmm5, zmm7, zmm9, zmm11, zmm13, zmm15, \ - zmm17, zmm19, zmm21, zmm23, zmm25, zmm27 - -%assign i 0 -%assign j 0 -%rep 16 - vmovdqa32 [pState + 64*i]{k2}, APPEND(zmm, j) -%assign i (i+1) -%assign j (j+2) -%endrep - - ; Load read-only registers - vmovdqa64 zmm0, [rel mask31] - mov edx, 0xAAAAAAAA - kmovd k1, edx - - ; Zero out R1, R2 - vpxorq %%R1, %%R1 - vpxorq %%R2, %%R2 - - ; Shift LFSR 32-times, update state variables -%assign N 0 -%rep 32 - BITS_REORG16 rax, N, k2, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, \ - zmm7, zmm8, zmm9, k1, %%X0, %%X1, %%X2 - NONLIN_FUN16 rax, k2, %%X0, %%X1, %%X2, %%R1, %%R2, \ - zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, %%W - vpsrld %%W,1 ; Shift out LSB of W - - LFSR_UPDT16 rax, N, k2, zmm1, zmm2, zmm3, zmm4, zmm5, \ - zmm6, zmm0, %%W, k7, init ; W used in LFSR update -%assign N N+1 -%endrep - - ; And once more, initial round from keygen phase = 33 times - BITS_REORG16 rax, 0, k2, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7, \ - zmm8, zmm9, k1, %%X0, %%X1, %%X2 - NONLIN_FUN16 rax, k2, %%X0, %%X1, %%X2, %%R1, %%R2, \ - zmm1, zmm2, zmm3, zmm4, zmm5, zmm6 - - LFSR_UPDT16 rax, 0, k2, zmm1, zmm2, zmm3, zmm4, zmm5, \ - zmm6, zmm0, %%W, k7, work - - ; Update R1, R2 - vmovdqa32 [rax + OFS_R1]{k2}, %%R1 - vmovdqa32 [rax + OFS_R2]{k2}, %%R2 - FUNC_RESTORE - -%endmacro - -;; -;; void asm_ZucInitialization_16_avx512(ZucKey16_t *pKeys, ZucIv16_t *pIvs, -;; ZucState16_t *pState) -;; -MKGLOBAL(ZUC128_INIT,function,internal) -ZUC128_INIT: - endbranch64 - INIT_16_AVX512 128 - - ret - -;; -;; void asm_Zuc256Initialization_16_avx512(ZucKey16_t *pKeys, ZucIv16_t *pIvs, -;; ZucState16_t *pState, uint32_t tag_sz) -;; -MKGLOBAL(ZUC256_INIT,function,internal) -ZUC256_INIT: - endbranch64 - INIT_16_AVX512 256 - - ret - -; -; Generate N*4 bytes of keystream -; for 16 buffers (where N is number of rounds) -; -%macro KEYGEN_16_AVX512 3-4 -%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds -%define %%STORE_SINGLE %2 ; [in] If 1, KS will be stored continuously in a single buffer -%define %%KEY_OFF %3 ; [in] Offset to start writing Keystream -%define %%LANE_MASK %4 ; [in] Lane mask with lanes to generate all keystream words - - %define pState arg1 - %define pKS arg2 - -%define %%X0 zmm10 -%define %%X1 zmm11 -%define %%X2 zmm12 -%define %%W zmm13 -%define %%R1 zmm14 -%define %%R2 zmm15 - - FUNC_SAVE - - ; Load read-only registers - vmovdqa64 zmm0, [rel mask31] - mov r10d, 0xAAAAAAAA - kmovd k1, r10d - -%if (%0 == 4) - kmovd k2, DWORD(%%LANE_MASK) - knotd k4, k2 - mov r10d, 0x0000FFFF - kmovd k3, r10d -%else - mov r10d, 0x0000FFFF - kmovd k2, r10d - kmovd k3, k2 -%endif - - ; Read R1/R2 - vmovdqa32 %%R1, [pState + OFS_R1] - vmovdqa32 %%R2, [pState + OFS_R2] -; Store all 4 bytes of keystream in a single 64-byte buffer -%if (%%NUM_ROUNDS == 1) - BITS_REORG16 pState, 1, k2, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, \ - zmm7, zmm8, zmm9, k1, %%X0, %%X1, %%X2, zmm16 - NONLIN_FUN16 pState, k2, %%X0, %%X1, %%X2, %%R1, %%R2, \ - zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7 - ; OFS_X3 XOR W (zmm7) - vpxorq zmm16, zmm7 - LFSR_UPDT16 pState, 1, k2, zmm1, zmm2, zmm3, zmm4, zmm5, \ - zmm6, zmm0, zmm7, k7, work - vmovdqa32 [pState + OFS_R1]{k2}, %%R1 - vmovdqa32 [pState + OFS_R2]{k2}, %%R2 -%else ;; %%NUM_ROUNDS != 1 - ; Generate N*4B of keystream in N rounds - ; Generate first bytes of KS for all lanes -%assign N 1 -%assign idx 16 -%rep (%%NUM_ROUNDS-2) - BITS_REORG16 pState, N, k3, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, \ - zmm7, zmm8, zmm9, k1, %%X0, %%X1, %%X2, APPEND(zmm, idx) - NONLIN_FUN16 pState, k3, %%X0, %%X1, %%X2, %%R1, %%R2, \ - zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7 - ; OFS_X3 XOR W (zmm7) - vpxorq APPEND(zmm, idx), zmm7 - LFSR_UPDT16 pState, N, k3, zmm1, zmm2, zmm3, zmm4, zmm5, \ - zmm6, zmm0, zmm7, k7, work -%assign N N+1 -%assign idx (idx + 1) -%endrep -%if (%%NUM_ROUNDS > 2) - vmovdqa32 [pState + OFS_R1]{k3}, %%R1 - vmovdqa32 [pState + OFS_R2]{k3}, %%R2 -%endif - - ; Generate rest of the KS bytes (last 8 bytes) for selected lanes -%rep 2 - BITS_REORG16 pState, N, k2, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, \ - zmm7, zmm8, zmm9, k1, %%X0, %%X1, %%X2, APPEND(zmm, idx) - NONLIN_FUN16 pState, k2, %%X0, %%X1, %%X2, %%R1, %%R2, \ - zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7 - ; OFS_X3 XOR W (zmm7) - vpxorq APPEND(zmm, idx), zmm7 - LFSR_UPDT16 pState, N, k2, zmm1, zmm2, zmm3, zmm4, zmm5, \ - zmm6, zmm0, zmm7, k7, work -%assign N N+1 -%assign idx (idx + 1) -%endrep - vmovdqa32 [pState + OFS_R1]{k2}, %%R1 - vmovdqa32 [pState + OFS_R2]{k2}, %%R2 -%endif ;; (%%NUM_ROUNDS == 1) - -%if (%%STORE_SINGLE == 1) - vmovdqa32 [pKS]{k2}, zmm16 -%else - ; ZMM16-31 contain the keystreams for each round - ; Perform a 32-bit 16x16 transpose to have up to 64 bytes - ; (NUM_ROUNDS * 4B) of each lane in a different register - TRANSPOSE16_U32_INTERLEAVED zmm16, zmm17, zmm18, zmm19, zmm20, zmm21, zmm22, zmm23, \ - zmm24, zmm25, zmm26, zmm27, zmm28, zmm29, zmm30, zmm31, \ - zmm0, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7, \ - zmm8, zmm9 - -%if (%0 == 4) - lea r12, [rel alignr_mask] - lea r13, [rel mov_mask] - STORE_KSTR16 pKS, zmm6, zmm4, zmm28, zmm16, zmm7, zmm5, zmm29, zmm17, \ - zmm2, zmm0, zmm30, zmm18, zmm3, zmm1, zmm31, zmm19, %%KEY_OFF, \ - %%LANE_MASK, r12, r13, r10, k3, k5 -%else - STORE_KSTR16 pKS, zmm6, zmm4, zmm28, zmm16, zmm7, zmm5, zmm29, zmm17, \ - zmm2, zmm0, zmm30, zmm18, zmm3, zmm1, zmm31, zmm19, %%KEY_OFF -%endif -%endif ;; %%STORE_SINGLE == 1 - - ; Reorder LFSR registers -%if (%0 == 4) - REORDER_LFSR pState, %%NUM_ROUNDS, k2 -%if (%%NUM_ROUNDS >= 2) - REORDER_LFSR pState, (%%NUM_ROUNDS - 2), k4 ; 2 less rounds for "old" buffers -%endif -%else - REORDER_LFSR pState, %%NUM_ROUNDS, k2 -%endif - - FUNC_RESTORE - -%endmacro - -;; -;; Reverse bits of each byte of a XMM register -;; -%macro REVERSE_BITS 7 -%define %%DATA_IN %1 ; [in] Input data -%define %%DATA_OUT %2 ; [out] Output data -%define %%TABLE_L %3 ; [in] Table to shuffle low nibbles -%define %%TABLE_H %4 ; [in] Table to shuffle high nibbles -%define %%REV_AND_TABLE %5 ; [in] Mask to keep low nibble of each byte -%define %%XTMP1 %6 ; [clobbered] Temporary XMM register -%define %%XTMP2 %7 ; [clobbered] Temporary XMM register - - vpandq %%XTMP1, %%DATA_IN, %%REV_AND_TABLE - - vpandnq %%XTMP2, %%REV_AND_TABLE, %%DATA_IN - vpsrld %%XTMP2, 4 - - vpshufb %%DATA_OUT, %%TABLE_H, %%XTMP1 ; bit reverse low nibbles (use high table) - vpshufb %%XTMP2, %%TABLE_L, %%XTMP2 ; bit reverse high nibbles (use low table) - - vporq %%DATA_OUT, %%XTMP2 -%endmacro - -;; -;; Set up data and KS bytes and use PCLMUL to digest data, -;; then the result gets XOR'ed with the previous digest. -;; This macro can be used with XMM (for 1 buffer), -;; YMM (for 2 buffers) or ZMM registers (for 4 buffers). -;; To use it with YMM and ZMM registers, VPCMULQDQ must be -;; supported. -;; -%macro DIGEST_DATA 11 -%define %%DATA %1 ; [in] Input data (16 bytes) -%define %%KS_L %2 ; [in] Lower 16 bytes of KS -%define %%KS_H %3 ; [in] Higher 16 bytes of KS -%define %%IN_OUT %4 ; [in/out] Accumulated digest -%define %%KMASK %5 ; [in] Shuffle mask register -%define %%TMP1 %6 ; [clobbered] Temporary XMM/YMM/ZMM register -%define %%TMP2 %7 ; [clobbered] Temporary XMM/YMM/ZMM register -%define %%TMP3 %8 ; [clobbered] Temporary XMM/YMM/ZMM register -%define %%TMP4 %9 ; [clobbered] Temporary XMM/YMM/ZMM register -%define %%TMP5 %10 ; [clobbered] Temporary XMM/YMM/ZMM register -%define %%TMP6 %11 ; [clobbered] Temporary XMM/YMM/ZMM register - - ;; Set up KS - ;; - ;; KS_L contains bytes 15:0 of KS (for 1, 2 or 4 buffers) - ;; KS_H contains bytes 31:16 of KS (for 1, 2 or 4 buffers) - ;; TMP1 to contain bytes in the following order [7:4 11:8 3:0 7:4] - ;; TMP2 to contain bytes in the following order [15:12 19:16 11:8 15:12] - vpalignr %%TMP1, %%KS_H, %%KS_L, 8 - vpshufd %%TMP2, %%KS_L, 0x61 - vpshufd %%TMP1, %%TMP1, 0x61 - - ;; Set up DATA - ;; - ;; DATA contains 16 bytes of input data (for 1, 2 or 4 buffers) - ;; TMP3 to contain bytes in the following order [4*0's 7:4 4*0's 3:0] - ;; TMP3 to contain bytes in the following order [4*0's 15:12 4*0's 11:8] - vpshufd %%TMP3{%%KMASK}{z}, %%DATA, 0x10 - vpshufd %%TMP4{%%KMASK}{z}, %%DATA, 0x32 - - ;; PCMUL the KS's with the DATA - ;; XOR the results from 4 32-bit words together - vpclmulqdq %%TMP5, %%TMP3, %%TMP2, 0x00 - vpclmulqdq %%TMP3, %%TMP3, %%TMP2, 0x11 - vpclmulqdq %%TMP6, %%TMP4, %%TMP1, 0x00 - vpclmulqdq %%TMP4, %%TMP4, %%TMP1, 0x11 - vpternlogq %%TMP5, %%TMP3, %%TMP6, 0x96 - vpternlogq %%IN_OUT, %%TMP5, %%TMP4, 0x96 -%endmacro - -; -; Generate 64 bytes of keystream -; for 16 buffers and authenticate 64 bytes of data -; -%macro ZUC_EIA3_16_64B_AVX512 6 -%define %%STATE %1 ; [in] ZUC state -%define %%KS %2 ; [in] Pointer to keystream (128x16 bytes) -%define %%T %3 ; [in] Pointer to digests -%define %%DATA %4 ; [in] Pointer to array of pointers to data buffers -%define %%LEN %5 ; [in] Pointer to array of remaining length to digest -%define %%NROUNDS %6 ; [in/clobbered] Number of rounds of 64 bytes of data to digest - -%define %%DATA_ADDR0 rbx -%define %%DATA_ADDR1 r12 -%define %%DATA_ADDR2 r13 -%define %%DATA_ADDR3 r14 -%define %%OFFSET r15 - -%define %%DIGEST_0 zmm28 -%define %%DIGEST_1 zmm29 -%define %%DIGEST_2 zmm30 -%define %%DIGEST_3 zmm31 - -%define %%ZTMP1 zmm1 -%define %%ZTMP2 zmm2 -%define %%ZTMP3 zmm3 -%define %%ZTMP4 zmm4 -%define %%ZTMP5 zmm5 -%define %%ZTMP6 zmm6 -%define %%ZTMP7 zmm7 -%define %%ZTMP8 zmm8 -%define %%ZTMP9 zmm9 - -%define %%ZKS_L %%ZTMP9 -%define %%ZKS_H zmm21 - -%define %%XTMP1 xmm1 -%define %%XTMP2 xmm2 -%define %%XTMP3 xmm3 -%define %%XTMP4 xmm4 -%define %%XTMP5 xmm5 -%define %%XTMP6 xmm6 -%define %%XTMP7 xmm7 -%define %%XTMP9 xmm9 -%define %%KS_L %%XTMP9 -%define %%KS_H xmm21 -%define %%XDIGEST_0 xmm13 -%define %%XDIGEST_1 xmm14 -%define %%XDIGEST_2 xmm19 -%define %%XDIGEST_3 xmm20 -%define %%Z_TEMP_DIGEST zmm15 -%define %%REV_TABLE_L xmm16 -%define %%REV_TABLE_H xmm17 -%define %%REV_AND_TABLE xmm18 - -; Defines used in KEYGEN -%define %%MASK31 zmm0 - -%define %%X0 zmm10 -%define %%X1 zmm11 -%define %%X2 zmm12 -%define %%R1 zmm22 -%define %%R2 zmm23 - -%define %%KS_0 zmm24 -%define %%KS_1 zmm25 -%define %%KS_2 zmm26 -%define %%KS_3 zmm27 - - xor %%OFFSET, %%OFFSET - - mov r12d, 0xAAAAAAAA - kmovd k1, r12d - - mov r12d, 0x0000FFFF - kmovd k2, r12d - - mov r12d, 0x55555555 - kmovd k3, r12d - - mov r12d, 0x3333 - kmovd k4, r12d - mov r12d, 0xCCCC - kmovd k5, r12d - - vpxorq %%DIGEST_0, %%DIGEST_0 - vpxorq %%DIGEST_1, %%DIGEST_1 - vpxorq %%DIGEST_2, %%DIGEST_2 - vpxorq %%DIGEST_3, %%DIGEST_3 - - ; Load read-only registers - vmovdqa64 %%MASK31, [rel mask31] - -%if USE_GFNI_VAES_VPCLMUL == 0 - vmovdqa64 %%REV_TABLE_L, [bit_reverse_table_l] - vmovdqa64 %%REV_TABLE_H, [bit_reverse_table_h] - vmovdqa64 %%REV_AND_TABLE, [bit_reverse_and_table] -%endif - - ; Read R1/R2 - vmovdqa32 %%R1, [%%STATE + OFS_R1] - vmovdqa32 %%R2, [%%STATE + OFS_R2] - - ;; - ;; Generate keystream and digest 64 bytes on each iteration - ;; -%%_loop: - ;; Generate 64B of keystream in 16 (4x4) rounds - ;; N goes from 1 to 16, within two nested reps of 4 iterations - ;; The outer "rep" loop iterates through 4 groups of lanes (4 buffers each), - ;; the inner "rep" loop iterates through the data for each group: - ;; each iteration digests 16 bytes of data (in case of having VPCLMUL - ;; data from the 4 buffers is digested in one go (using ZMM registers), otherwise, - ;; data is digested in 4 iterations (using XMM registers) -%assign %%N 1 -%assign %%LANE_GROUP 0 -%rep 4 - mov %%DATA_ADDR0, [%%DATA + %%LANE_GROUP*8 + 0*32] - mov %%DATA_ADDR1, [%%DATA + %%LANE_GROUP*8 + 1*32] - mov %%DATA_ADDR2, [%%DATA + %%LANE_GROUP*8 + 2*32] - mov %%DATA_ADDR3, [%%DATA + %%LANE_GROUP*8 + 3*32] - -%assign %%idx 0 -%rep 4 - BITS_REORG16 %%STATE, %%N, k2, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ - %%ZTMP7, %%ZTMP8, %%ZTMP9, k1, %%X0, %%X1, %%X2, APPEND(%%KS_, %%idx) - NONLIN_FUN16 %%STATE, k2, %%X0, %%X1, %%X2, %%R1, %%R2, \ - %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%ZTMP7 - ; OFS_X3 XOR W (%%ZTMP7) - vpxorq APPEND(%%KS_, %%idx), %%ZTMP7 - LFSR_UPDT16 %%STATE, %%N, k2, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ - %%ZTMP6, %%MASK31, %%ZTMP7, k7, work - - ;; Transpose and store KS every 16 bytes -%if %%idx == 3 - TRANSPOSE4_U32_INTERLEAVED %%KS_0, %%KS_1, %%KS_2, %%KS_3, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 - - STORE_KSTR4 %%KS, %%KS_0, %%KS_1, %%KS_2, %%KS_3, 64, %%LANE_GROUP -%endif - - ;; Digest next 16 bytes of data for 4 buffers -%if USE_GFNI_VAES_VPCLMUL == 1 - ;; If VPCMUL is available, read chunks of 16x4 bytes of data - ;; and digest them with 24x4 bytes of KS, then XOR their digest - ;; with previous digest (with DIGEST_DATA) - - ; Read 4 blocks of 16 bytes of data and put them in a register - vmovdqu64 %%XTMP1, [%%DATA_ADDR0 + 16*%%idx + %%OFFSET] - vinserti32x4 %%ZTMP1, [%%DATA_ADDR1 + 16*%%idx + %%OFFSET], 1 - vinserti32x4 %%ZTMP1, [%%DATA_ADDR2 + 16*%%idx + %%OFFSET], 2 - vinserti32x4 %%ZTMP1, [%%DATA_ADDR3 + 16*%%idx + %%OFFSET], 3 - - ; Read 8 blocks of 16 bytes of KS - vmovdqa64 %%ZKS_L, [GET_KS(%%KS, %%LANE_GROUP, %%idx, 0)] - vmovdqa64 %%ZKS_H, [GET_KS(%%KS, %%LANE_GROUP, (%%idx + 1), 0)] - - ; Reverse bits of next 16 bytes from all 4 buffers - vgf2p8affineqb %%ZTMP7, %%ZTMP1, [rel bit_reverse_table], 0x00 - - ; Digest 16 bytes of data with 24 bytes of KS, for 4 buffers - DIGEST_DATA %%ZTMP7, %%ZKS_L, %%ZKS_H, APPEND(%%DIGEST_, %%LANE_GROUP), k3, \ - %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6 - -%else ; USE_GFNI_VAES_VPCLMUL == 1 - ;; If VPCMUL is NOT available, read chunks of 16 bytes of data - ;; and digest them with 24 bytes of KS, and repeat this for 4 different buffers - ;; then insert these digests into a ZMM register and XOR with previous digest - -%assign %%J 0 -%rep 4 -%if %%idx == 0 - ; Reset temporary digests (for the first 16 bytes) - vpxorq APPEND(%%XDIGEST_, %%J), APPEND(%%XDIGEST_, %%J) -%endif - ; Read the next 2 blocks of 16 bytes of KS - vmovdqa64 %%KS_L, [GET_KS(%%KS, %%LANE_GROUP, %%idx, %%J)] - vmovdqa64 %%KS_H, [GET_KS(%%KS, %%LANE_GROUP, (%%idx + 1), %%J)] - - ;; read 16 bytes and reverse bits - vmovdqu64 %%XTMP1, [APPEND(%%DATA_ADDR, %%J) + %%idx*16 + %%OFFSET] - REVERSE_BITS %%XTMP1, %%XTMP7, %%REV_TABLE_L, %%REV_TABLE_H, \ - %%REV_AND_TABLE, %%XTMP2, %%XTMP3 - - ; Digest 16 bytes of data with 24 bytes of KS, for one buffer - DIGEST_DATA %%XTMP7, %%KS_L, %%KS_H, APPEND(%%XDIGEST_, %%J), k3, \ - %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6 - - ; Once all 64 bytes of data have been digested, insert them in temporary ZMM register -%if %%idx == 3 - vinserti32x4 %%Z_TEMP_DIGEST, APPEND(%%XDIGEST_, %%J), %%J -%endif -%assign %%J (%%J + 1) -%endrep ; %rep 4 %%J - - ; XOR with previous digest -%if %%idx == 3 - vpxorq APPEND(%%DIGEST_, %%LANE_GROUP), %%Z_TEMP_DIGEST -%endif -%endif ;; USE_GFNI_VAES_VPCLMUL == 0 -%assign %%idx (%%idx + 1) -%assign %%N %%N+1 -%endrep ; %rep 4 %%idx - -%assign %%LANE_GROUP (%%LANE_GROUP + 1) -%endrep ; %rep 4 %%LANE_GROUP - -%assign %%LANE_GROUP 0 -%rep 4 - ; Memcpy KS 64-127 bytes to 0-63 bytes - vmovdqa64 %%ZTMP3, [%%KS + %%LANE_GROUP*512 + 64*4] - vmovdqa64 %%ZTMP4, [%%KS + %%LANE_GROUP*512 + 64*5] - vmovdqa64 %%ZTMP5, [%%KS + %%LANE_GROUP*512 + 64*6] - vmovdqa64 %%ZTMP6, [%%KS + %%LANE_GROUP*512 + 64*7] - vmovdqa64 [%%KS + %%LANE_GROUP*512], %%ZTMP3 - vmovdqa64 [%%KS + %%LANE_GROUP*512 + 64], %%ZTMP4 - vmovdqa64 [%%KS + %%LANE_GROUP*512 + 64*2], %%ZTMP5 - vmovdqa64 [%%KS + %%LANE_GROUP*512 + 64*3], %%ZTMP6 -%assign %%LANE_GROUP (%%LANE_GROUP + 1) -%endrep ; %rep 4 %%LANE_GROUP - - add %%OFFSET, 64 - - dec %%NROUNDS - jnz %%_loop - - ;; - update tags - vmovdqu64 %%ZTMP1, [%%T] ; Input tags - vmovdqa64 %%ZTMP2, [rel shuf_mask_tags_0_4_8_12] - vmovdqa64 %%ZTMP3, [rel shuf_mask_tags_0_4_8_12 + 64] - ; Get result tags for 16 buffers in different position in each lane - ; and blend these tags into an ZMM register. - ; Then, XOR the results with the previous tags and write out the result. - vpermt2d %%DIGEST_0{k4}{z}, %%ZTMP2, %%DIGEST_1 - vpermt2d %%DIGEST_2{k5}{z}, %%ZTMP3, %%DIGEST_3 - vpternlogq %%ZTMP1, %%DIGEST_0, %%DIGEST_2, 0x96 ; A XOR B XOR C - vmovdqu64 [%%T], %%ZTMP1 - - ; Update R1/R2 - vmovdqa64 [%%STATE + OFS_R1], %%R1 - vmovdqa64 [%%STATE + OFS_R2], %%R2 - - ; Update data pointers - vmovdqu64 %%ZTMP1, [%%DATA] - vmovdqu64 %%ZTMP2, [%%DATA + 64] - vpbroadcastq %%ZTMP3, %%OFFSET - vpaddq %%ZTMP1, %%ZTMP3 - vpaddq %%ZTMP2, %%ZTMP3 - vmovdqu64 [%%DATA], %%ZTMP1 - vmovdqu64 [%%DATA + 64], %%ZTMP2 - - ; Update array of lengths (if lane is valid, so length < UINT16_MAX) - vmovdqa64 YWORD(%%ZTMP2), [%%LEN] - vpcmpw k1, YWORD(%%ZTMP2), [rel all_ffs], 4 ; k1 -> valid lanes - shl %%OFFSET, 3 ; Convert to bits - vpbroadcastw YWORD(%%ZTMP1), DWORD(%%OFFSET) - vpsubw YWORD(%%ZTMP2){k1}, YWORD(%%ZTMP1) - vmovdqa64 [%%LEN], YWORD(%%ZTMP2) - -%endmacro - -;; -;; void asm_ZucGenKeystream64B_16_avx512(state16_t *pSta, u32* pKeyStr[16], -;; const u32 key_off) -;; -MKGLOBAL(ZUC_KEYGEN64B_16,function,internal) -ZUC_KEYGEN64B_16: - endbranch64 - KEYGEN_16_AVX512 16, 0, arg3 - - ret -;; -;; void asm_Eia3_Nx64B_AVX512_16(ZucState16_t *pState, -;; uint32_t *pKeyStr, -;; uint32_t *T, -;; const void **data, -;; uint16_t *len); -MKGLOBAL(ZUC_EIA3_N64B,function,internal) -ZUC_EIA3_N64B: -%define STATE arg1 -%define KS arg2 -%define T arg3 -%define DATA arg4 - -%ifdef LINUX -%define LEN arg5 -%define NROUNDS arg6 -%else -%define LEN r10 -%define NROUNDS r11 -%endif - endbranch64 - -%ifndef LINUX - mov LEN, arg5 - mov NROUNDS, arg6 -%endif - - FUNC_SAVE - - ZUC_EIA3_16_64B_AVX512 STATE, KS, T, DATA, LEN, NROUNDS - - FUNC_RESTORE - - ret - -; -;; void asm_ZucGenKeystream64B_16_skip8_avx512(state16_t *pSta, u32* pKeyStr[16], -;; const u32 key_off, -;; const u16 lane_mask) -;; -MKGLOBAL(ZUC_KEYGEN64B_SKIP8_16,function,internal) -ZUC_KEYGEN64B_SKIP8_16: - endbranch64 - KEYGEN_16_AVX512 16, 0, arg3, arg4 - - ret - -;; -;; void asm_ZucGenKeystream8B_16_avx512(state16_t *pSta, u32* pKeyStr[16], -;; const u32 key_off) -;; -MKGLOBAL(ZUC_KEYGEN8B_16,function,internal) -ZUC_KEYGEN8B_16: - endbranch64 - KEYGEN_16_AVX512 2, 0, arg3 - - ret - -;; -;; void asm_ZucGenKeystream4B_16_avx512(state16_t *pSta, u32 pKeyStr[16], -;; const u32 lane_mask) -;; -MKGLOBAL(ZUC_KEYGEN4B_16,function,internal) -ZUC_KEYGEN4B_16: - endbranch64 - KEYGEN_16_AVX512 1, 1, 0, arg3 - - ret - -%macro KEYGEN_VAR_16_AVX512 2-3 -%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds (GP dowrd register) -%define %%KEY_OFF %2 ; [in] Offset to start writing Keystream -%define %%LANE_MASK %3 ; [in] Lane mask with lanes to generate full keystream (rest 2 words less) - - cmp %%NUM_ROUNDS, 16 - je %%_num_rounds_is_16 - cmp %%NUM_ROUNDS, 8 - je %%_num_rounds_is_8 - jb %%_rounds_is_1_7 - - ; Final blocks 9-16 - cmp %%NUM_ROUNDS, 12 - je %%_num_rounds_is_12 - jb %%_rounds_is_9_11 - - ; Final blocks 13-15 - cmp %%NUM_ROUNDS, 14 - je %%_num_rounds_is_14 - ja %%_num_rounds_is_15 - jb %%_num_rounds_is_13 - -%%_rounds_is_9_11: - cmp %%NUM_ROUNDS, 10 - je %%_num_rounds_is_10 - ja %%_num_rounds_is_11 - jb %%_num_rounds_is_9 - -%%_rounds_is_1_7: - cmp %%NUM_ROUNDS, 4 - je %%_num_rounds_is_4 - jb %%_rounds_is_1_3 - - ; Final blocks 5-7 - cmp %%NUM_ROUNDS, 6 - je %%_num_rounds_is_6 - ja %%_num_rounds_is_7 - jb %%_num_rounds_is_5 - -%%_rounds_is_1_3: - cmp %%NUM_ROUNDS, 2 - je %%_num_rounds_is_2 - ja %%_num_rounds_is_3 - - ; Rounds = 1 if fall-through -%assign I 1 -%rep 16 -APPEND(%%_num_rounds_is_,I): -%if (%0 == 3) - KEYGEN_16_AVX512 I, 0, %%KEY_OFF, %%LANE_MASK -%else - KEYGEN_16_AVX512 I, 0, %%KEY_OFF -%endif - jmp %%_done - -%assign I (I + 1) -%endrep - -%%_done: -%endmacro - -;; -;; void asm_ZucGenKeystream_16_avx512(state16_t *pSta, u32* pKeyStr[16], -;; const u32 key_off, -;; const u32 numRounds) -;; -MKGLOBAL(ZUC_KEYGEN_16,function,internal) -ZUC_KEYGEN_16: - endbranch64 - - KEYGEN_VAR_16_AVX512 arg4, arg3 - - ret - -;; -;; void asm_ZucGenKeystream_16_skip8_avx512(state16_t *pSta, u32* pKeyStr[16], -;; const u32 key_off, -;; const u16 lane_mask, -;; u32 numRounds) -;; -MKGLOBAL(ZUC_KEYGEN_SKIP8_16,function,internal) -ZUC_KEYGEN_SKIP8_16: -%ifdef LINUX - %define arg5 r8d -%else - %define arg5 [rsp + 40] -%endif - endbranch64 - - mov r10d, arg5 - KEYGEN_VAR_16_AVX512 r10d, arg3, arg4 - - ret - -;; -;; Encrypts up to 64 bytes of data -;; -;; 1 - Reads R1 & R2 -;; 2 - Generates up to 64 bytes of keystream (16 rounds of 4 bytes) -;; 3 - Writes R1 & R2 -;; 4 - Transposes the registers containing chunks of 4 bytes of KS for each buffer -;; 5 - ZMM16-31 will contain 64 bytes of KS for each buffer -;; 6 - Reads 64 bytes of data for each buffer, XOR with KS and writes the ciphertext -;; -%macro CIPHER64B 12 -%define %%NROUNDS %1 -%define %%BYTE_MASK %2 -%define %%LANE_MASK %3 -%define %%OFFSET %4 -%define %%LAST_ROUND %5 -%define %%MASK_31 %6 -%define %%X0 %7 -%define %%X1 %8 -%define %%X2 %9 -%define %%W %10 -%define %%R1 %11 -%define %%R2 %12 - - ; Read R1/R2 - vmovdqa32 %%R1, [rax + OFS_R1] - vmovdqa32 %%R2, [rax + OFS_R2] - - ; Generate N*4B of keystream in N rounds -%assign N 1 -%assign idx 16 -%rep %%NROUNDS - BITS_REORG16 rax, N, %%LANE_MASK, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, \ - zmm7, zmm8, zmm9, k1, %%X0, %%X1, %%X2, APPEND(zmm, idx) - NONLIN_FUN16 rax, %%LANE_MASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ - zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7 - ; OFS_X3 XOR W (zmm7) - vpxorq APPEND(zmm, idx), zmm7 - ; Shuffle bytes within KS words to XOR with plaintext later - vpshufb APPEND(zmm, idx), [rel swap_mask] - LFSR_UPDT16 rax, N, %%LANE_MASK, zmm1, zmm2, zmm3, zmm4, zmm5, \ - zmm6, %%MASK_31, zmm7, k7, work -%assign N (N + 1) -%assign idx (idx + 1) -%endrep - vmovdqa32 [rax + OFS_R1]{%%LANE_MASK}, %%R1 - vmovdqa32 [rax + OFS_R2]{%%LANE_MASK}, %%R2 - - ; ZMM16-31 contain the keystreams for each round - ; Perform a 32-bit 16x16 transpose to have the 64 bytes - ; of each lane in a different register - TRANSPOSE16_U32 zmm16, zmm17, zmm18, zmm19, zmm20, zmm21, zmm22, zmm23, \ - zmm24, zmm25, zmm26, zmm27, zmm28, zmm29, zmm30, zmm31, \ - zmm0, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7, \ - zmm8, zmm9, zmm10, zmm11, zmm12, zmm13 - - ;; XOR Input buffer with keystream -%if %%LAST_ROUND == 1 - lea rbx, [rel byte64_len_to_mask_table] -%endif - ;; Read all 16 streams using registers r12-15 into registers zmm0-15 -%assign i 0 -%assign j 0 -%assign k 12 -%rep 16 -%if %%LAST_ROUND == 1 - ;; Read number of bytes left to encrypt for the lane stored in stack - ;; and construct byte mask to read from input pointer - movzx r12d, word [rsp + j*2] - kmovq %%BYTE_MASK, [rbx + r12*8] -%endif - mov APPEND(r, k), [pIn + i] - vmovdqu8 APPEND(zmm, j){%%BYTE_MASK}{z}, [APPEND(r, k) + %%OFFSET] -%assign k 12 + ((j + 1) % 4) -%assign j (j + 1) -%assign i (i + 8) -%endrep - - ;; XOR Input (zmm0-15) with Keystreams (zmm16-31) -%assign i 0 -%assign j 16 -%rep 16 - vpxorq zmm %+j, zmm %+i -%assign i (i + 1) -%assign j (j + 1) -%endrep - - ;; Write output for all 16 buffers (zmm16-31) using registers r12-15 -%assign i 0 -%assign j 16 -%assign k 12 -%rep 16 -%if %%LAST_ROUND == 1 - ;; Read length to encrypt for the lane stored in stack - ;; and construct byte mask to write to output pointer - movzx r12d, word [rsp + (j-16)*2] - kmovq %%BYTE_MASK, [rbx + r12*8] -%endif - mov APPEND(r, k), [pOut + i] - vmovdqu8 [APPEND(r, k) + %%OFFSET]{%%BYTE_MASK}, APPEND(zmm, j) -%assign k 12 + ((j + 1) % 4) -%assign j (j + 1) -%assign i (i + 8) -%endrep - -%endmacro - -;; -;; void asm_ZucCipher_16_avx512(state16_t *pSta, u64 *pIn[16], -;; u64 *pOut[16], u16 lengths[16], -;; u64 min_length); -MKGLOBAL(CIPHER_16,function,internal) -CIPHER_16: - -%ifdef LINUX - %define pState rdi - %define pIn rsi - %define pOut rdx - %define lengths rcx - %define arg5 r8 -%else - %define pState rcx - %define pIn rdx - %define pOut r8 - %define lengths r9 - %define arg5 [rsp + 40] -%endif - -%define min_length r10 -%define buf_idx r11 - - mov min_length, arg5 - - FUNC_SAVE - - ; Convert all lengths set to UINT16_MAX (indicating that lane is not valid) to min length - vpbroadcastw ymm0, min_length - vmovdqa ymm1, [lengths] - vpcmpw k1, ymm1, [rel all_ffs], 0 - vmovdqu16 ymm1{k1}, ymm0 ; YMM1 contain updated lengths - - ; Round up to nearest multiple of 4 bytes - vpaddw ymm0, [rel all_threes] - vpandq ymm0, [rel all_fffcs] - - ; Calculate remaining bytes to encrypt after function call - vpsubw ymm2, ymm1, ymm0 - vpxorq ymm3, ymm3 - vpcmpw k1, ymm2, ymm3, 1 ; Get mask of lengths < 0 - ; Set to zero the lengths of the lanes which are going to be completed - vmovdqu16 ymm2{k1}, ymm3 ; YMM2 contain final lengths - vmovdqa [lengths], ymm2 ; Update in memory the final updated lengths - - ; Calculate number of bytes to encrypt after round of 64 bytes (up to 63 bytes), - ; for each lane, and store it in stack to be used in the last round - vpsubw ymm1, ymm2 ; Bytes to encrypt in all lanes - vpandq ymm1, [rel all_3fs] ; Number of final bytes (up to 63 bytes) for each lane - sub rsp, 32 - vmovdqu [rsp], ymm1 - - ; Load state pointer in RAX - mov rax, pState - - ; Load read-only registers - mov r12d, 0xAAAAAAAA - kmovd k1, r12d - mov r12, 0xFFFFFFFFFFFFFFFF - kmovq k2, r12 - mov r12d, 0x0000FFFF - kmovd k3, r12d - - xor buf_idx, buf_idx - - ;; Perform rounds of 64 bytes, where LFSR reordering is not needed -loop: - cmp min_length, 64 - jl exit_loop - - vmovdqa64 zmm0, [rel mask31] - - CIPHER64B 16, k2, k3, buf_idx, 0, zmm0, zmm10, zmm11, zmm12, zmm13, zmm14, zmm15 - - sub min_length, 64 - add buf_idx, 64 - jmp loop - -exit_loop: - - mov r15, min_length - add r15, 3 - shr r15, 2 ;; numbers of rounds left (round up length to nearest multiple of 4B) - jz _no_final_rounds - - vmovdqa64 zmm0, [rel mask31] - - cmp r15, 8 - je _num_final_rounds_is_8 - jl _final_rounds_is_1_7 - - ; Final blocks 9-16 - cmp r15, 12 - je _num_final_rounds_is_12 - jl _final_rounds_is_9_11 - - ; Final blocks 13-16 - cmp r15, 16 - je _num_final_rounds_is_16 - cmp r15, 15 - je _num_final_rounds_is_15 - cmp r15, 14 - je _num_final_rounds_is_14 - cmp r15, 13 - je _num_final_rounds_is_13 - -_final_rounds_is_9_11: - cmp r15, 11 - je _num_final_rounds_is_11 - cmp r15, 10 - je _num_final_rounds_is_10 - cmp r15, 9 - je _num_final_rounds_is_9 - -_final_rounds_is_1_7: - cmp r15, 4 - je _num_final_rounds_is_4 - jl _final_rounds_is_1_3 - - ; Final blocks 5-7 - cmp r15, 7 - je _num_final_rounds_is_7 - cmp r15, 6 - je _num_final_rounds_is_6 - cmp r15, 5 - je _num_final_rounds_is_5 - -_final_rounds_is_1_3: - cmp r15, 3 - je _num_final_rounds_is_3 - cmp r15, 2 - je _num_final_rounds_is_2 - - jmp _num_final_rounds_is_1 - - ; Perform encryption of last bytes (<= 64 bytes) and reorder LFSR registers - ; if needed (if not all 16 rounds of 4 bytes are done) -%assign I 1 -%rep 16 -APPEND(_num_final_rounds_is_,I): - CIPHER64B I, k2, k3, buf_idx, 1, zmm0, zmm10, zmm11, zmm12, zmm13, zmm14, zmm15 - REORDER_LFSR rax, I, k3 - add buf_idx, min_length - jmp _no_final_rounds -%assign I (I + 1) -%endrep - -_no_final_rounds: - add rsp, 32 - ;; update in/out pointers - add buf_idx, 3 - and buf_idx, 0xfffffffffffffffc - vpbroadcastq zmm0, buf_idx - vpaddq zmm1, zmm0, [pIn] - vpaddq zmm2, zmm0, [pIn + 64] - vmovdqa64 [pIn], zmm1 - vmovdqa64 [pIn + 64], zmm2 - vpaddq zmm1, zmm0, [pOut] - vpaddq zmm2, zmm0, [pOut + 64] - vmovdqa64 [pOut], zmm1 - vmovdqa64 [pOut + 64], zmm2 - - FUNC_RESTORE - - ret - -;; -;;extern void asm_Eia3Round64B_16(uint32_t *T, const void *KS, -;; const void **DATA, uint16_t *LEN); -;; -;; Updates authentication tag T of 16 buffers based on keystream KS and DATA. -;; - it processes 64 bytes of DATA of buffers -;; - reads data in 16 byte chunks from different buffers -;; (first buffers 0,4,8,12; then 1,5,9,13; etc) and bit reverses them -;; - reads KS (when utilizing VPCLMUL instructions, it reads 64 bytes directly, -;; containing 16 bytes of KS for 4 different buffers) -;; - employs clmul for the XOR & ROL part -;; - copies top 64 bytes of KS to bottom (for the next round) -;; - Updates Data pointers for next rounds -;; - Updates array of lengths -;; -;; @param [in] T: Array of digests for all 16 buffers -;; @param [in] KS: Pointer to 128 bytes of keystream for all 16 buffers (2048 bytes in total) -;; @param [in] DATA: Array of pointers to data for all 16 buffers -;; @param [in] LEN: Array of lengths for all 16 buffers -;; -align 64 -MKGLOBAL(ZUC_ROUND64B_16,function,internal) -ZUC_ROUND64B_16: - endbranch64 -%ifdef LINUX - %define T rdi - %define KS rsi - %define DATA rdx - %define LEN rcx -%else - %define T rcx - %define KS rdx - %define DATA r8 - %define LEN r9 -%endif - -%if USE_GFNI_VAES_VPCLMUL == 1 -%define DATA_ADDR0 rbx -%define DATA_ADDR1 r10 -%define DATA_ADDR2 r11 -%define DATA_ADDR3 r12 - -%define DATA_TRANS0 zmm19 -%define DATA_TRANS1 zmm20 -%define DATA_TRANS2 zmm21 -%define DATA_TRANS3 zmm22 -%define DATA_TRANS0x xmm19 -%define DATA_TRANS1x xmm20 -%define DATA_TRANS2x xmm21 -%define DATA_TRANS3x xmm22 - -%define KS_TRANS0 zmm23 -%define KS_TRANS1 zmm24 -%define KS_TRANS2 zmm25 -%define KS_TRANS3 zmm26 -%define KS_TRANS4 zmm27 -%define KS_TRANS0x xmm23 -%define KS_TRANS1x xmm24 -%define KS_TRANS2x xmm25 -%define KS_TRANS3x xmm26 -%define KS_TRANS4x xmm27 - -%define DIGEST_0 zmm28 -%define DIGEST_1 zmm29 -%define DIGEST_2 zmm30 -%define DIGEST_3 zmm31 - -%define ZTMP1 zmm0 -%define ZTMP2 zmm1 -%define ZTMP3 zmm2 -%define ZTMP4 zmm3 -%define ZTMP5 zmm4 -%define ZTMP6 zmm5 -%define ZTMP7 zmm6 -%define ZTMP8 zmm7 - -%define YTMP1 YWORD(ZTMP1) - - FUNC_SAVE - - mov r12d, 0x55555555 - kmovd k1, r12d - ;; Read first buffers 0,4,8,12; then 1,5,9,13, and so on, - ;; since the keystream is laid out this way, which chunks of - ;; 16 bytes interleved. First the 128 bytes for - ;; buffers 0,4,8,12 (total of 512 bytes), then the 128 bytes - ;; for buffers 1,5,9,13, and so on -%assign IDX 0 -%rep 4 - vpxorq APPEND(DIGEST_, IDX), APPEND(DIGEST_, IDX) - - mov DATA_ADDR0, [DATA + IDX*8 + 0*32] - mov DATA_ADDR1, [DATA + IDX*8 + 1*32] - mov DATA_ADDR2, [DATA + IDX*8 + 2*32] - mov DATA_ADDR3, [DATA + IDX*8 + 3*32] - - vmovdqu64 KS_TRANS0, [KS + IDX*64*2*4] - -%assign I 0 -%assign J 1 -%rep 4 - vmovdqu64 XWORD(APPEND(DATA_TRANS, I)), [DATA_ADDR0 + 16*I] - vinserti32x4 APPEND(DATA_TRANS, I), [DATA_ADDR1 + 16*I], 1 - vinserti32x4 APPEND(DATA_TRANS, I), [DATA_ADDR2 + 16*I], 2 - vinserti32x4 APPEND(DATA_TRANS, I), [DATA_ADDR3 + 16*I], 3 - - vmovdqu64 APPEND(KS_TRANS, J), [KS + IDX*64*2*4 + 64*J] - - ;; Reverse bits of next 16 bytes from all 4 buffers - vgf2p8affineqb ZTMP1, APPEND(DATA_TRANS,I), [rel bit_reverse_table], 0x00 - - ;; ZUC authentication part - ;; - 4x32 data bits - ;; - set up KS - vpalignr ZTMP2, APPEND(KS_TRANS, J), APPEND(KS_TRANS, I), 8 - vpshufd ZTMP3, APPEND(KS_TRANS, I), 0x61 - vpshufd ZTMP4, ZTMP2, 0x61 - - ;; - set up DATA - vpshufd APPEND(DATA_TRANS, I){k1}{z}, ZTMP1, 0x10 - vpshufd ZTMP2{k1}{z}, ZTMP1, 0x32 - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq ZTMP5, APPEND(DATA_TRANS, I), ZTMP3, 0x00 - vpclmulqdq ZTMP6, APPEND(DATA_TRANS, I), ZTMP3, 0x11 - vpclmulqdq ZTMP7, ZTMP2, ZTMP4, 0x00 - vpclmulqdq ZTMP8, ZTMP2, ZTMP4, 0x11 - - vpternlogq ZTMP5, ZTMP6, ZTMP8, 0x96 - vpternlogq APPEND(DIGEST_, IDX), ZTMP5, ZTMP7, 0x96 - -%assign J (J + 1) -%assign I (I + 1) -%endrep - - ; Memcpy KS 64-127 bytes to 0-63 bytes - vmovdqa64 ZTMP4, [KS + IDX*4*64*2 + 64*4] - vmovdqa64 ZTMP1, [KS + IDX*4*64*2 + 64*5] - vmovdqa64 ZTMP2, [KS + IDX*4*64*2 + 64*6] - vmovdqa64 ZTMP3, [KS + IDX*4*64*2 + 64*7] - vmovdqa64 [KS + IDX*4*64*2], ZTMP4 - vmovdqa64 [KS + IDX*4*64*2 + 64], ZTMP1 - vmovdqa64 [KS + IDX*4*64*2 + 64*2], ZTMP2 - vmovdqa64 [KS + IDX*4*64*2 + 64*3], ZTMP3 - -%assign IDX (IDX + 1) -%endrep - - ;; - update tags - mov r12, 0x3333 - mov r13, 0xCCCC - kmovq k1, r12 - kmovq k2, r13 - - vmovdqu64 ZTMP1, [T] ; Input tags - vmovdqa64 ZTMP2, [rel shuf_mask_tags_0_4_8_12] - vmovdqa64 ZTMP3, [rel shuf_mask_tags_0_4_8_12 + 64] - ; Get result tags for 16 buffers in different position in each lane - ; and blend these tags into an ZMM register. - ; Then, XOR the results with the previous tags and write out the result. - vpermt2d DIGEST_0{k1}{z}, ZTMP2, DIGEST_1 - vpermt2d DIGEST_2{k2}{z}, ZTMP3, DIGEST_3 - vpternlogq ZTMP1, DIGEST_0, DIGEST_2, 0x96 ; A XOR B XOR C - vmovdqu64 [T], ZTMP1 - - ; Update data pointers - vmovdqu64 ZTMP1, [DATA] - vmovdqu64 ZTMP2, [DATA + 64] - vpaddq ZTMP1, [rel add_64] - vpaddq ZTMP2, [rel add_64] - vmovdqu64 [DATA], ZTMP1 - vmovdqu64 [DATA + 64], ZTMP2 - - ; Update array of lengths (subtract 512 bits from all lengths if valid lane) - vmovdqa YTMP1, [LEN] - vpcmpw k1, YTMP1, [rel all_ffs], 4 - vpsubw YTMP1{k1}, [rel all_512w] - vmovdqa [LEN], YTMP1 - -%else ; USE_GFNI_VAES_VPCLMUL == 1 - -%define DIGEST_0 zmm28 -%define DIGEST_1 zmm29 -%define DIGEST_2 zmm30 -%define DIGEST_3 zmm31 - -%define DATA_ADDR r10 - - FUNC_SAVE - - vmovdqa xmm5, [bit_reverse_table_l] - vmovdqa xmm6, [bit_reverse_table_h] - vmovdqa xmm7, [bit_reverse_and_table] - - mov r12d, 0x55555555 - kmovd k1, r12d - - ;; Read first buffers 0,4,8,12; then 1,5,9,13, and so on, - ;; since the keystream is laid out this way, which chunks of - ;; 16 bytes interleved. First the 128 bytes for - ;; buffers 0,4,8,12 (total of 512 bytes), then the 128 bytes - ;; for buffers 1,5,9,13, and so on -%assign I 0 -%rep 4 -%assign J 0 -%rep 4 - - vpxor xmm9, xmm9 - mov DATA_ADDR, [DATA + 8*(J*4 + I)] - -%assign K 0 -%rep 4 - ;; read 16 bytes and reverse bits - vmovdqu xmm0, [DATA_ADDR + 16*K] - vpand xmm1, xmm0, xmm7 - - vpandn xmm2, xmm7, xmm0 - vpsrld xmm2, 4 - - vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) - vpshufb xmm4, xmm5, xmm2 ; bit reverse high nibbles (use low table) - - vpor xmm8, xmm4 - ; xmm8 - bit reversed data bytes - - ;; ZUC authentication part - ;; - 4x32 data bits - ;; - set up KS -%if K != 0 - vmovdqa xmm11, xmm12 - vmovdqu xmm12, [KS + (16*J + I*512) + (K + 1)*(16*4)] -%else - vmovdqu xmm11, [KS + (16*J + I*512)] - vmovdqu xmm12, [KS + (16*J + I*512) + (16*4)] -%endif - vpalignr xmm13, xmm12, xmm11, 8 - vpshufd xmm2, xmm11, 0x61 - vpshufd xmm3, xmm13, 0x61 - - ;; - set up DATA - vpshufd xmm0{k1}{z}, xmm8, 0x10 - vpshufd xmm1{k1}{z}, xmm8, 0x32 - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq xmm13, xmm0, xmm2, 0x00 - vpclmulqdq xmm14, xmm0, xmm2, 0x11 - vpclmulqdq xmm15, xmm1, xmm3, 0x00 - vpclmulqdq xmm8, xmm1, xmm3, 0x11 - - vpternlogq xmm13, xmm14, xmm8, 0x96 - vpternlogq xmm9, xmm13, xmm15, 0x96 - -%assign K (K + 1) -%endrep - - vinserti32x4 APPEND(DIGEST_, I), xmm9, J -%assign J (J + 1) -%endrep - ; Memcpy KS 64-127 bytes to 0-63 bytes - vmovdqa64 zmm23, [KS + I*4*64*2 + 64*4] - vmovdqa64 zmm24, [KS + I*4*64*2 + 64*5] - vmovdqa64 zmm25, [KS + I*4*64*2 + 64*6] - vmovdqa64 zmm26, [KS + I*4*64*2 + 64*7] - vmovdqa64 [KS + I*4*64*2], zmm23 - vmovdqa64 [KS + I*4*64*2 + 64], zmm24 - vmovdqa64 [KS + I*4*64*2 + 64*2], zmm25 - vmovdqa64 [KS + I*4*64*2 + 64*3], zmm26 -%assign I (I + 1) -%endrep - - ;; - update tags - mov r12, 0x3333 - mov r13, 0xCCCC - kmovq k1, r12 - kmovq k2, r13 - - vmovdqu64 zmm4, [T] ; Input tags - vmovdqa64 zmm0, [rel shuf_mask_tags_0_4_8_12] - vmovdqa64 zmm1, [rel shuf_mask_tags_0_4_8_12 + 64] - ; Get result tags for 16 buffers in different position in each lane - ; and blend these tags into an ZMM register. - ; Then, XOR the results with the previous tags and write out the result. - vpermt2d DIGEST_0{k1}{z}, zmm0, DIGEST_1 - vpermt2d DIGEST_2{k2}{z}, zmm1, DIGEST_3 - vpternlogq zmm4, DIGEST_0, DIGEST_2, 0x96 ; A XOR B XOR C - vmovdqu64 [T], zmm4 - - ; Update data pointers - vmovdqu64 zmm0, [DATA] - vmovdqu64 zmm1, [DATA + 64] - vpaddq zmm0, [rel add_64] - vpaddq zmm1, [rel add_64] - vmovdqu64 [DATA], zmm0 - vmovdqu64 [DATA + 64], zmm1 - - ; Update array of lengths (if lane is valid, so length < UINT16_MAX) - vmovdqa ymm2, [LEN] - vpcmpw k1, ymm2, [rel all_ffs], 4 ; k1 -> valid lanes - vpsubw ymm2{k1}, [rel all_512w] - vmovdqa [LEN], ymm2 - -%endif ;; USE_GFNI_VAES_VPCLMUL == 0 - FUNC_RESTORE - - ret - -%macro REMAINDER_16 1 -%define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) - -%ifdef LINUX - %define T rdi - %define KS rsi - %define DATA rdx - %define LEN rcx - %define arg5 r8d -%else - %define T rcx - %define KS rdx - %define DATA r8 - %define LEN r9 - %define arg5 [rsp + 40] -%endif - -%define DIGEST_0 zmm28 -%define DIGEST_1 zmm29 -%define DIGEST_2 zmm30 -%define DIGEST_3 zmm31 - -%define DATA_ADDR r12 -%define KS_ADDR r13 - -%define N_BYTES r14 -%define OFFSET r15 - -%define MIN_LEN r10d -%define MIN_LEN_Q r10 -%define IDX rax -%define TMP rbx - - mov MIN_LEN, arg5 - - FUNC_SAVE - - vpbroadcastw ymm0, MIN_LEN - ; Get mask of non-NULL lanes (lengths not set to UINT16_MAX, indicating that lane is not valid) - vmovdqa ymm1, [LEN] - vpcmpw k1, ymm1, [rel all_ffs], 4 ; NEQ - - ; Round up to nearest multiple of 32 bits - vpaddw ymm0{k1}, [rel all_31w] - vpandq ymm0, [rel all_ffe0w] - - ; Calculate remaining bits to authenticate after function call - vpcmpuw k2, ymm1, ymm0, 1 ; Get mask of lengths that will be < 0 after subtracting - vpsubw ymm2{k1}, ymm1, ymm0 - vpxorq ymm3, ymm3 - ; Set to zero the lengths of the lanes which are going to be completed - vmovdqu16 ymm2{k2}, ymm3 ; YMM2 contain final lengths - vmovdqu16 [LEN]{k1}, ymm2 ; Update in memory the final updated lengths - - ; Calculate number of bits to authenticate (up to 511 bits), - ; for each lane, and store it in stack to be used later - vpsubw ymm1{k1}{z}, ymm2 ; Bits to authenticate in all lanes (zero out length of NULL lanes) - sub rsp, 32 - vmovdqu [rsp], ymm1 - - xor OFFSET, OFFSET - -%if USE_GFNI_VAES_VPCLMUL != 1 - vmovdqa xmm5, [bit_reverse_table_l] - vmovdqa xmm6, [bit_reverse_table_h] - vmovdqa xmm7, [bit_reverse_and_table] -%endif - - mov r12d, 0x55555555 - kmovd k2, r12d - - ;; Read first buffers 0,4,8,12; then 1,5,9,13, and so on, - ;; since the keystream is laid out this way, which chunks of - ;; 16 bytes interleved. First the 128 bytes for - ;; buffers 0,4,8,12 (total of 512 bytes), then the 128 bytes - ;; for buffers 1,5,9,13, and so on -%assign I 0 -%rep 4 -%assign J 0 -%rep 4 - - ; Read length to authenticate for each buffer - movzx TMP, word [rsp + 2*(I*4 + J)] - - vpxor xmm9, xmm9 - - xor OFFSET, OFFSET - mov DATA_ADDR, [DATA + 8*(I*4 + J)] - -%assign K 0 -%rep 4 - cmp TMP, 128 - jb APPEND3(%%Eia3RoundsAVX512_dq_end,I,J) - - ;; read 16 bytes and reverse bits - vmovdqu xmm0, [DATA_ADDR + OFFSET] -%if USE_GFNI_VAES_VPCLMUL == 1 - vgf2p8affineqb xmm8, xmm0, [rel bit_reverse_table], 0x00 -%else - vpand xmm1, xmm0, xmm7 - - vpandn xmm2, xmm7, xmm0 - vpsrld xmm2, 4 - - vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) - vpshufb xmm4, xmm5, xmm2 ; bit reverse high nibbles (use low table) - - vpor xmm8, xmm4 -%endif - ; xmm8 - bit reversed data bytes - - ;; ZUC authentication part - ;; - 4x32 data bits - ;; - set up KS -%if K != 0 - vmovdqa xmm11, xmm12 - vmovdqu xmm12, [KS + (16*I + J*512) + OFFSET*4 + (16*4)] -%else - vmovdqu xmm11, [KS + (16*I + J*512) + (0*4)] - vmovdqu xmm12, [KS + (16*I + J*512) + (16*4)] -%endif - vpalignr xmm13, xmm12, xmm11, 8 - vpshufd xmm2, xmm11, 0x61 - vpshufd xmm3, xmm13, 0x61 - - ;; - set up DATA - vpshufd xmm0{k2}{z}, xmm8, 0x10 - vpshufd xmm1{k2}{z}, xmm8, 0x32 - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq xmm13, xmm0, xmm2, 0x00 - vpclmulqdq xmm14, xmm0, xmm2, 0x11 - vpclmulqdq xmm15, xmm1, xmm3, 0x00 - vpclmulqdq xmm8, xmm1, xmm3, 0x11 - - vpternlogq xmm13, xmm14, xmm8, 0x96 - vpternlogq xmm9, xmm13, xmm15, 0x96 - add OFFSET, 16 - sub TMP, 128 -%assign K (K + 1) -%endrep -APPEND3(%%Eia3RoundsAVX512_dq_end,I,J): - - or TMP, TMP - jz APPEND3(%%Eia3RoundsAVX_end,I,J) - - ; Get number of bytes - mov N_BYTES, TMP - add N_BYTES, 7 - shr N_BYTES, 3 - - lea r11, [rel byte64_len_to_mask_table] - kmovq k1, [r11 + N_BYTES*8] - - ;; Set up KS - shl OFFSET, 2 - vmovdqu xmm1, [KS + (16*I + J*512) + OFFSET] - vmovdqu xmm2, [KS + (16*I + J*512) + OFFSET + 16*4] - shr OFFSET, 2 - vpalignr xmm13, xmm2, xmm1, 8 - vpshufd xmm11, xmm1, 0x61 - vpshufd xmm12, xmm13, 0x61 - - ;; read up to 16 bytes of data, zero bits not needed if partial byte and bit-reverse - vmovdqu8 xmm0{k1}{z}, [DATA_ADDR + OFFSET] - ; check if there is a partial byte (less than 8 bits in last byte) - mov rax, TMP - and rax, 0x7 - shl rax, 4 - lea r11, [rel bit_mask_table] - add r11, rax - - ; Get mask to clear last bits - vmovdqa xmm3, [r11] - - ; Shift left 16-N bytes to have the last byte always at the end of the XMM register - ; to apply mask, then restore by shifting right same amount of bytes - mov r11, 16 - sub r11, N_BYTES - ; r13 = DATA_ADDR can be used at this stage - XVPSLLB xmm0, r11, xmm4, r13 - vpandq xmm0, xmm3 - XVPSRLB xmm0, r11, xmm4, r13 - -%if USE_GFNI_VAES_VPCLMUL == 1 - vgf2p8affineqb xmm8, xmm0, [rel bit_reverse_table], 0x00 -%else - ; Bit reverse input data - vpand xmm1, xmm0, xmm7 - - vpandn xmm2, xmm7, xmm0 - vpsrld xmm2, 4 - - vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) - vpshufb xmm3, xmm5, xmm2 ; bit reverse high nibbles (use low table) - - vpor xmm8, xmm3 -%endif - - ;; - set up DATA - vpshufd xmm0{k2}{z}, xmm8, 0x10 ; D 0-3 || Os || D 4-7 || 0s - vpshufd xmm1{k2}{z}, xmm8, 0x32 ; D 8-11 || 0s || D 12-15 || 0s - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq xmm13, xmm0, xmm11, 0x00 - vpclmulqdq xmm14, xmm0, xmm11, 0x11 - vpclmulqdq xmm15, xmm1, xmm12, 0x00 - vpclmulqdq xmm8, xmm1, xmm12, 0x11 - vpternlogq xmm9, xmm14, xmm13, 0x96 - vpternlogq xmm9, xmm15, xmm8, 0x96 - -APPEND3(%%Eia3RoundsAVX_end,I,J): - vinserti32x4 APPEND(DIGEST_, I), xmm9, J -%assign J (J + 1) -%endrep -%assign I (I + 1) -%endrep - - ;; - update tags - mov TMP, 0x00FF - kmovq k1, TMP - mov TMP, 0xFF00 - kmovq k2, TMP - - vmovdqu64 zmm4, [T] ; Input tags - vmovdqa64 zmm0, [rel shuf_mask_tags_0_1_2_3] - vmovdqa64 zmm1, [rel shuf_mask_tags_0_1_2_3 + 64] - ; Get result tags for 16 buffers in different position in each lane - ; and blend these tags into an ZMM register. - ; Then, XOR the results with the previous tags and write out the result. - vpermt2d DIGEST_0{k1}{z}, zmm0, DIGEST_1 - vpermt2d DIGEST_2{k2}{z}, zmm1, DIGEST_3 - vpternlogq zmm4, DIGEST_0, DIGEST_2, 0x96 ; A XOR B XOR C - - vmovdqa64 [T], zmm4 ; Store temporary digests - - ; These last steps should be done only for the buffers that - ; have no more data to authenticate - xor IDX, IDX -%%start_loop: - ; Update data pointer - movzx r11d, word [rsp + IDX*2] - shr r11d, 3 ; length authenticated in bytes - add [DATA + IDX*8], r11 - - cmp word [LEN + 2*IDX], 0 - jnz %%skip_comput - - mov r11, IDX - and r11, 0x3 - shl r11, 9 ; * 512 - - mov r12, IDX - shr r12, 2 - shl r12, 4 ; * 16 - add r11, r12 - lea KS_ADDR, [KS + r11] - - ; Read digest - mov r12d, [T + 4*IDX] - - ; Read keyStr[MIN_LEN / 32] - movzx TMP, word [rsp + 2*IDX] - mov r15, TMP - shr r15, 5 - mov r11, r15 - shr r15, 2 - shl r15, (4+2) - and r11, 0x3 - shl r11, 2 - add r15, r11 - mov r11, r15 - and r11, 0xf - cmp r11, 12 - je %%_read_2dwords - mov r11, [KS_ADDR + r15] - jmp %%_ks_qword_read - - ;; The 8 bytes of KS are separated -%%_read_2dwords: - mov r11d, [KS_ADDR + r15] - mov r15d, [KS_ADDR + r15 + (4+48)] - shl r15, 32 - or r11, r15 -%%_ks_qword_read: - ; Rotate left by MIN_LEN % 32 - mov r15, rcx - mov rcx, TMP - and rcx, 0x1F - rol r11, cl - mov rcx, r15 - ; XOR with current digest - xor r12d, r11d - -%if %%KEY_SIZE == 128 - ; Read keystr[L - 1] (last dword of keyStr) - add TMP, (31 + 64) - shr TMP, 5 ; L - dec TMP - mov r11, TMP - shr r11, 2 - shl r11, (4+2) - and TMP, 0x3 - shl TMP, 2 - add TMP, r11 - mov r11d, [KS_ADDR + TMP] - ; XOR with current digest - xor r12d, r11d -%endif - - ; byte swap and write digest out - bswap r12d - mov [T + 4*IDX], r12d - -%%skip_comput: - inc IDX - cmp IDX, 16 - jne %%start_loop - - add rsp, 32 - - ; Memcpy last 8 bytes of KS into start - add MIN_LEN, 31 - shr MIN_LEN, 5 - shl MIN_LEN, 2 ; Offset where to copy the last 8 bytes from - - mov r12d, MIN_LEN - shr MIN_LEN, 4 - shl MIN_LEN, (4+2) - and r12d, 0xf - add MIN_LEN, r12d - cmp r12d, 12 - je %%_copy_2dwords - -%assign %%i 0 -%rep 4 -%assign %%j 0 -%rep 4 - mov TMP, [KS + 512*%%i + 16*%%j + MIN_LEN_Q] - mov [KS + 512*%%i + 16*%%j], TMP -%assign %%j (%%j + 1) -%endrep -%assign %%i (%%i + 1) -%endrep - jmp %%_ks_copied - - ;; The 8 bytes of KS are separated -%%_copy_2dwords: -%assign %%i 0 -%rep 4 -%assign %%j 0 -%rep 4 - mov DWORD(TMP), [KS + 512*%%i + 16*%%j + MIN_LEN_Q] - mov [KS + 512*%%i + 16*%%j], DWORD(TMP) - mov DWORD(TMP), [KS + 512*%%i + 16*%%j + (48+4) + MIN_LEN_Q] - mov [KS + 512*%%i + 16*%%j + 4], DWORD(TMP) -%assign %%j (%%j + 1) -%endrep -%assign %%i (%%i + 1) -%endrep -%%_ks_copied: - vzeroupper - FUNC_RESTORE - ret -%endmacro - -;; -;; extern void asm_Eia3RemainderAVX512_16(uint32_t *T, const void **ks, const void **data, uint64_t n_bits) -;; -;; @param [in] T: Array of digests for all 16 buffers -;; @param [in] KS : Array of pointers to key stream for all 16 buffers -;; @param [in] DATA : Array of pointers to data for all 16 buffers -;; @param [in] N_BITS (number data bits to process) -;; -align 64 -MKGLOBAL(ZUC128_REMAINDER_16,function,internal) -ZUC128_REMAINDER_16: - endbranch64 - REMAINDER_16 128 - -;; -;; extern void asm_Eia3_256_RemainderAVX512_16(uint32_t *T, const void **ks, const void **data, uint64_t n_bits) -;; -;; @param [in] T: Array of digests for all 16 buffers -;; @param [in] KS : Array of pointers to key stream for all 16 buffers -;; @param [in] DATA : Array of pointers to data for all 16 buffers -;; @param [in] N_BITS (number data bits to process) -;; -align 64 -MKGLOBAL(ZUC256_REMAINDER_16,function,internal) -ZUC256_REMAINDER_16: - endbranch64 - REMAINDER_16 256 - -; Following functions only need AVX512 instructions (no VAES, GFNI, etc.) -%if USE_GFNI_VAES_VPCLMUL == 0 -;; -;; extern void asm_Eia3RemainderAVX512(uint32_t *T, const void *ks, -;; const void *data, uint64_t n_bits) -;; -;; Returns authentication update value to be XOR'ed with current authentication tag -;; -;; @param [in] T (digest pointer) -;; @param [in] KS (key stream pointer) -;; @param [in] DATA (data pointer) -;; @param [in] N_BITS (number data bits to process) -;; -align 64 -MKGLOBAL(asm_Eia3RemainderAVX512,function,internal) -asm_Eia3RemainderAVX512: - endbranch64 -%ifdef LINUX - %define T rdi - %define KS rsi - %define DATA rdx - %define N_BITS rcx -%else - %define T rcx - %define KS rdx - %define DATA r8 - %define N_BITS r9 -%endif - -%define N_BYTES rbx -%define OFFSET r15 - - FUNC_SAVE - - vmovdqa xmm5, [bit_reverse_table_l] - vmovdqa xmm6, [bit_reverse_table_h] - vmovdqa xmm7, [bit_reverse_and_table] - vpxor xmm9, xmm9 - mov r12d, 0x55555555 - kmovd k2, r12d - - xor OFFSET, OFFSET -%assign I 0 -%rep 3 - cmp N_BITS, 128 - jb Eia3RoundsAVX512_dq_end - - ;; read 16 bytes and reverse bits - vmovdqu xmm0, [DATA + OFFSET] - vpand xmm1, xmm0, xmm7 - - vpandn xmm2, xmm7, xmm0 - vpsrld xmm2, 4 - - vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) - vpshufb xmm4, xmm5, xmm2 ; bit reverse high nibbles (use low table) - - vpor xmm8, xmm4 - ; xmm8 - bit reversed data bytes - - ;; ZUC authentication part - ;; - 4x32 data bits - ;; - set up KS -%if I != 0 - vmovdqa xmm11, xmm12 - vmovdqu xmm12, [KS + OFFSET + (4*4)] -%else - vmovdqu xmm11, [KS + (0*4)] - vmovdqu xmm12, [KS + (4*4)] -%endif - vpalignr xmm13, xmm12, xmm11, 8 - vpshufd xmm2, xmm11, 0x61 - vpshufd xmm3, xmm13, 0x61 - - ;; - set up DATA - vpshufd xmm0{k2}{z}, xmm8, 0x10 - vpshufd xmm1{k2}{z}, xmm8, 0x32 - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq xmm13, xmm0, xmm2, 0x00 - vpclmulqdq xmm14, xmm0, xmm2, 0x11 - vpclmulqdq xmm15, xmm1, xmm3, 0x00 - vpclmulqdq xmm8, xmm1, xmm3, 0x11 - - vpternlogq xmm13, xmm14, xmm8, 0x96 - vpternlogq xmm9, xmm13, xmm15, 0x96 - - add OFFSET, 16 - sub N_BITS, 128 -%assign I (I + 1) -%endrep -Eia3RoundsAVX512_dq_end: - - or N_BITS, N_BITS - jz Eia3RoundsAVX_end - - ; Get number of bytes - mov N_BYTES, N_BITS - add N_BYTES, 7 - shr N_BYTES, 3 - - lea r10, [rel byte64_len_to_mask_table] - kmovq k1, [r10 + N_BYTES*8] - - ;; Set up KS - vmovdqu xmm1, [KS + OFFSET] - vmovdqu xmm2, [KS + OFFSET + 16] - vpalignr xmm13, xmm2, xmm1, 8 - vpshufd xmm11, xmm1, 0x61 - vpshufd xmm12, xmm13, 0x61 - - ;; read up to 16 bytes of data, zero bits not needed if partial byte and bit-reverse - vmovdqu8 xmm0{k1}{z}, [DATA + OFFSET] - ; check if there is a partial byte (less than 8 bits in last byte) - mov rax, N_BITS - and rax, 0x7 - shl rax, 4 - lea r10, [rel bit_mask_table] - add r10, rax - - ; Get mask to clear last bits - vmovdqa xmm3, [r10] - - ; Shift left 16-N bytes to have the last byte always at the end of the XMM register - ; to apply mask, then restore by shifting right same amount of bytes - mov r10, 16 - sub r10, N_BYTES - XVPSLLB xmm0, r10, xmm4, r11 - vpandq xmm0, xmm3 - XVPSRLB xmm0, r10, xmm4, r11 - - ; Bit reverse input data - vpand xmm1, xmm0, xmm7 - - vpandn xmm2, xmm7, xmm0 - vpsrld xmm2, 4 - - vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) - vpshufb xmm3, xmm5, xmm2 ; bit reverse high nibbles (use low table) - - vpor xmm8, xmm3 - - ;; Set up DATA - vpshufd xmm0{k2}{z}, xmm8, 0x10 ; D 0-3 || Os || D 4-7 || 0s - vpshufd xmm1{k2}{z}, xmm8, 0x32 ; D 8-11 || 0s || D 12-15 || 0s - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq xmm13, xmm0, xmm11, 0x00 - vpclmulqdq xmm14, xmm0, xmm11, 0x11 - vpclmulqdq xmm15, xmm1, xmm12, 0x00 - vpclmulqdq xmm8, xmm1, xmm12, 0x11 - vpternlogq xmm9, xmm14, xmm13, 0x96 - vpternlogq xmm9, xmm15, xmm8, 0x96 - -Eia3RoundsAVX_end: - mov r11d, [T] - vmovq rax, xmm9 - shr rax, 32 - xor eax, r11d - - ; Read keyStr[N_BITS / 32] - lea r10, [N_BITS + OFFSET*8] ; Restore original N_BITS - shr r10, 5 - mov r11, [KS + r10*4] - - ; Rotate left by N_BITS % 32 - mov r12, rcx ; Save RCX - mov rcx, N_BITS - and rcx, 0x1F - rol r11, cl - mov rcx, r12 ; Restore RCX - - ; XOR with previous digest calculation - xor eax, r11d - - ; Read keyStr[L - 1] (last double word of keyStr) - lea r10, [N_BITS + OFFSET*8] ; Restore original N_BITS - add r10, (31 + 64) - shr r10, 5 ; L - dec r10 - mov r11d, [KS + r10 * 4] - - ; XOR with previous digest calculation and bswap it - xor eax, r11d - bswap eax - mov [T], eax - - FUNC_RESTORE - - ret - -;; -;;extern void asm_Eia3Round64BAVX512(uint32_t *T, const void *KS, const void *DATA) -;; -;; Updates authentication tag T based on keystream KS and DATA. -;; - it processes 64 bytes of DATA -;; - reads data in 16 byte chunks and bit reverses them -;; - reads and re-arranges KS -;; - employs clmul for the XOR & ROL part -;; -;; @param [in] T (digest pointer) -;; @param [in] KS (key stream pointer) -;; @param [in] DATA (data pointer) -;; -align 64 -MKGLOBAL(asm_Eia3Round64BAVX512,function,internal) -asm_Eia3Round64BAVX512: - endbranch64 -%ifdef LINUX - %define T rdi - %define KS rsi - %define DATA rdx -%else - %define T rcx - %define KS rdx - %define DATA r8 -%endif - - FUNC_SAVE - - vmovdqa xmm5, [bit_reverse_table_l] - vmovdqa xmm6, [bit_reverse_table_h] - vmovdqa xmm7, [bit_reverse_and_table] - vpxor xmm9, xmm9 - - mov r12d, 0x55555555 - kmovd k1, r12d -%assign I 0 -%rep 4 - ;; read 16 bytes and reverse bits - vmovdqu xmm0, [DATA + 16*I] - vpand xmm1, xmm0, xmm7 - - vpandn xmm2, xmm7, xmm0 - vpsrld xmm2, 4 - - vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) - vpshufb xmm4, xmm5, xmm2 ; bit reverse high nibbles (use low table) - - vpor xmm8, xmm4 - ; xmm8 - bit reversed data bytes - - ;; ZUC authentication part - ;; - 4x32 data bits - ;; - set up KS -%if I != 0 - vmovdqa xmm11, xmm12 - vmovdqu xmm12, [KS + (I*16) + (4*4)] -%else - vmovdqu xmm11, [KS + (I*16) + (0*4)] - vmovdqu xmm12, [KS + (I*16) + (4*4)] -%endif - vpalignr xmm13, xmm12, xmm11, 8 - vpshufd xmm2, xmm11, 0x61 - vpshufd xmm3, xmm13, 0x61 - - ;; - set up DATA - vpshufd xmm0{k1}{z}, xmm8, 0x10 - vpshufd xmm1{k1}{z}, xmm8, 0x32 - - ;; - clmul - ;; - xor the results from 4 32-bit words together - vpclmulqdq xmm13, xmm0, xmm2, 0x00 - vpclmulqdq xmm14, xmm0, xmm2, 0x11 - vpclmulqdq xmm15, xmm1, xmm3, 0x00 - vpclmulqdq xmm8, xmm1, xmm3, 0x11 - - vpternlogq xmm13, xmm14, xmm8, 0x96 - vpternlogq xmm9, xmm13, xmm15, 0x96 - -%assign I (I + 1) -%endrep - - ;; - update T - vmovq rax, xmm9 - shr rax, 32 - mov r10d, [T] - xor eax, r10d - mov [T], eax - - FUNC_RESTORE - - ret - -%endif ; USE_GFNI_VAES_VPCLMUL == 0 - -;---------------------------------------------------------------------------------------- -;---------------------------------------------------------------------------------------- - -mksection stack-noexec diff --git a/lib/avx512_t1/README b/lib/avx512_t1/README new file mode 100644 index 0000000000000000000000000000000000000000..629d62328b65779033686fa06d891f1a52a037b3 --- /dev/null +++ b/lib/avx512_t1/README @@ -0,0 +1,3 @@ +AVX512 TYPE1: +- AVX2 TYPE1 +- AVX512F, AVX512VL, AVX512DQ, AVX512BW, AVX512CD, AESNI, PCLMULQDQ diff --git a/lib/avx512/aes128_gcm_by8_avx512.asm b/lib/avx512_t1/aes128_gcm_by8_avx512.asm similarity index 97% rename from lib/avx512/aes128_gcm_by8_avx512.asm rename to lib/avx512_t1/aes128_gcm_by8_avx512.asm index 79d86495de04e8cfb250d76d5bb5e39babf0bf64..4415ca93f29fd126023e6903395b9eb889e4fc25 100644 --- a/lib/avx512/aes128_gcm_by8_avx512.asm +++ b/lib/avx512_t1/aes128_gcm_by8_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "avx512/gcm_avx512.asm" +%include "avx512_t1/gcm_avx512.asm" diff --git a/lib/avx512/aes192_gcm_by8_avx512.asm b/lib/avx512_t1/aes192_gcm_by8_avx512.asm similarity index 97% rename from lib/avx512/aes192_gcm_by8_avx512.asm rename to lib/avx512_t1/aes192_gcm_by8_avx512.asm index fee431cccec3a8222e0bedec9a93eada3d3cebdd..96ae8046af7c42a89e2eade05ed02c137e35741b 100644 --- a/lib/avx512/aes192_gcm_by8_avx512.asm +++ b/lib/avx512_t1/aes192_gcm_by8_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "avx512/gcm_avx512.asm" +%include "avx512_t1/gcm_avx512.asm" diff --git a/lib/avx512/aes256_gcm_by8_avx512.asm b/lib/avx512_t1/aes256_gcm_by8_avx512.asm similarity index 97% rename from lib/avx512/aes256_gcm_by8_avx512.asm rename to lib/avx512_t1/aes256_gcm_by8_avx512.asm index d25dbd353c0c0f6ffb021098ff49812a49721a52..5083081ac3160863f8f8490867ebc62d0b1c45b0 100644 --- a/lib/avx512/aes256_gcm_by8_avx512.asm +++ b/lib/avx512_t1/aes256_gcm_by8_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "avx512/gcm_avx512.asm" +%include "avx512_t1/gcm_avx512.asm" diff --git a/lib/avx512/chacha20_avx512.asm b/lib/avx512_t1/chacha20_avx512.asm similarity index 96% rename from lib/avx512/chacha20_avx512.asm rename to lib/avx512_t1/chacha20_avx512.asm index d22741871042dab79fb54adb10b01e6f536e067c..845e531d3f38379d08de82f1c8f89b3aa8b9934e 100644 --- a/lib/avx512/chacha20_avx512.asm +++ b/lib/avx512_t1/chacha20_avx512.asm @@ -130,6 +130,13 @@ poly_clamp_r: dq 0x0ffffffc0fffffff, 0x0ffffffc0ffffffc dq 0xffffffffffffffff, 0xffffffffffffffff +struc STACK +_XMM_WIN_SAVE: reso 10 ; Space to store up to 10 XMM registers +_GP_SAVE: resq 7 ; Space to store up to 7 GP registers +_RSP_SAVE: resq 1 ; Space to store rsp pointer +endstruc +%define STACK_SIZE STACK_size + %define APPEND(a,b) a %+ b %define APPEND3(a,b,c) a %+ b %+ c @@ -1302,10 +1309,22 @@ submit_job_chacha20_poly_enc_avx512: %define tmp r13 %define off rax - sub rsp, 16 - mov [rsp], r12 - mov [rsp + 8], r13 + mov rax, rsp + sub rsp, STACK_SIZE + and rsp, -16 + mov [rsp + _GP_SAVE], r12 + mov [rsp + _GP_SAVE + 8], r13 +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif + mov [rsp + _RSP_SAVE], rax ; save RSP mov added_len, 64 xor off, off @@ -1554,9 +1573,18 @@ no_partial_block_poly: mov rax, job or dword [rax + _status], IMB_STATUS_COMPLETED_CIPHER - mov r12, [rsp] - mov r13, [rsp + 8] - add rsp, 16 + mov r12, [rsp + _GP_SAVE] + mov r13, [rsp + _GP_SAVE + 8] +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif + mov rsp, [rsp + _RSP_SAVE] ret @@ -1571,6 +1599,19 @@ gen_keystr_poly_key_avx512: %define off rax +%ifndef LINUX + mov rax, rsp + sub rsp, STACK_SIZE + and rsp, -16 +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep + mov [rsp + _RSP_SAVE], rax ; save RSP +%endif ; Generate up to 1KB of keystream ; If less than or equal to 64*8 bytes, prepare directly states for up to 8 blocks @@ -1624,7 +1665,7 @@ gen_keystr_poly_key_avx512: vmovdqa64 [ks + 64*14], zmm30 vmovdqa64 [ks + 64*15], zmm18 - ret + jmp exit_gen_keystr less_than_512_ks: @@ -1648,7 +1689,7 @@ less_than_512_ks: vmovdqa64 [ks + 64*2], zmm17 vmovdqa64 [ks + 64*3], zmm29 - ret + jmp exit_gen_keystr more_than_256_ks: xor off, off @@ -1672,6 +1713,17 @@ more_than_256_ks: vmovdqa64 [ks + 64*6], zmm26 vmovdqa64 [ks + 64*7], zmm23 +exit_gen_keystr: +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep + mov rsp, [rsp + _RSP_SAVE] +%endif ret align 32 @@ -1694,7 +1746,18 @@ submit_job_chacha20_poly_dec_avx512: %define len_xor iv %ifndef LINUX - push rsi + mov rax, rsp + sub rsp, STACK_SIZE + and rsp, -16 + mov [rsp + _GP_SAVE], rsi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep + mov [rsp + _RSP_SAVE], rax ; save RSP %endif mov len_xor, arg3 @@ -1979,7 +2042,15 @@ no_partial_block_dec: or dword [rax + _status], IMB_STATUS_COMPLETED_CIPHER %ifndef LINUX - pop rsi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep + mov rsi, [rsp + _GP_SAVE] + mov rsp, [rsp + _RSP_SAVE] %endif ret @@ -2016,16 +2087,26 @@ chacha20_enc_dec_ks_avx512: mov ctx, arg5 - sub rsp, 8*7 - mov [rsp], r12 - mov [rsp + 8], r13 - mov [rsp + 16], r14 - mov [rsp + 24], r15 - mov [rsp + 32], rbx - mov [rsp + 40], rbp + mov rax, rsp + sub rsp, STACK_SIZE + and rsp, -16 + mov [rsp + _GP_SAVE], r12 + mov [rsp + _GP_SAVE + 8], r13 + mov [rsp + _GP_SAVE + 16], r14 + mov [rsp + _GP_SAVE + 24], r15 + mov [rsp + _GP_SAVE + 32], rbx + mov [rsp + _GP_SAVE + 40], rbp %ifndef LINUX - mov [rsp + 48], rdi + mov [rsp + _GP_SAVE + 48], rdi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif + mov [rsp + _RSP_SAVE], rax ; save RSP xor off, off mov blk_cnt, [ctx + LastBlkCount] @@ -2255,16 +2336,23 @@ no_partial_block_ks: mov [ctx + LastBlkCount], blk_cnt - mov r12, [rsp] - mov r13, [rsp + 8] - mov r14, [rsp + 16] - mov r15, [rsp + 24] - mov rbx, [rsp + 32] - mov rbp, [rsp + 40] + mov r12, [rsp + _GP_SAVE] + mov r13, [rsp + _GP_SAVE + 8] + mov r14, [rsp + _GP_SAVE + 16] + mov r15, [rsp + _GP_SAVE + 24] + mov rbx, [rsp + _GP_SAVE + 32] + mov rbp, [rsp + _GP_SAVE + 40] %ifndef LINUX - mov rdi, [rsp + 48] + mov rdi, [rsp + _GP_SAVE + 48] +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif - add rsp, 8*7 + mov rsp, [rsp + _RSP_SAVE]; restore RSP %ifdef SAFE_DATA clear_all_zmms_asm %else diff --git a/lib/avx512/des_x16_avx512.asm b/lib/avx512_t1/des_x16_avx512.asm similarity index 99% rename from lib/avx512/des_x16_avx512.asm rename to lib/avx512_t1/des_x16_avx512.asm index e7a223c135f2ebc311d79a0428c8f13bdfe4dddc..3dedfa2a35b3dab3d5ee1cc46879bbbb883b2aa7 100644 --- a/lib/avx512/des_x16_avx512.asm +++ b/lib/avx512_t1/des_x16_avx512.asm @@ -53,7 +53,7 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + %ifdef LINUX %define arg1 rdi %define arg2 rsi @@ -2364,7 +2364,6 @@ mksection .text align 64 MKGLOBAL(des_x16_cbc_enc_avx512,function,internal) des_x16_cbc_enc_avx512: - endbranch64 GENERIC_DES_ENC DES ret @@ -2373,7 +2372,6 @@ des_x16_cbc_enc_avx512: align 64 MKGLOBAL(des_x16_cbc_dec_avx512,function,internal) des_x16_cbc_dec_avx512: - endbranch64 GENERIC_DES_DEC DES ret @@ -2382,7 +2380,6 @@ des_x16_cbc_dec_avx512: align 64 MKGLOBAL(des3_x16_cbc_enc_avx512,function,internal) des3_x16_cbc_enc_avx512: - endbranch64 GENERIC_DES_ENC 3DES ret @@ -2391,7 +2388,6 @@ des3_x16_cbc_enc_avx512: align 64 MKGLOBAL(des3_x16_cbc_dec_avx512,function,internal) des3_x16_cbc_dec_avx512: - endbranch64 GENERIC_DES_DEC 3DES ret @@ -2400,7 +2396,6 @@ des3_x16_cbc_dec_avx512: align 64 MKGLOBAL(docsis_des_x16_enc_avx512,function,internal) docsis_des_x16_enc_avx512: - endbranch64 GENERIC_DES_ENC DOCSIS ret @@ -2409,7 +2404,6 @@ docsis_des_x16_enc_avx512: align 64 MKGLOBAL(docsis_des_x16_dec_avx512,function,internal) docsis_des_x16_dec_avx512: - endbranch64 GENERIC_DES_DEC DOCSIS ret diff --git a/lib/avx512/gcm_avx512.asm b/lib/avx512_t1/gcm_avx512.asm similarity index 99% rename from lib/avx512/gcm_avx512.asm rename to lib/avx512_t1/gcm_avx512.asm index f8c44a8c213551f17356aec30aa6d4c622bb624e..352a89c5aa9177fa9cb69e80d3992940b16084a5 100644 --- a/lib/avx512/gcm_avx512.asm +++ b/lib/avx512_t1/gcm_avx512.asm @@ -483,7 +483,7 @@ default rel %ifidn %%ENC_DEC, DEC vmovdqa xmm3, xmm1 %endif - vpxor xmm9, xmm1 ; Cyphertext XOR E(K, Yn) + vpxor xmm9, xmm1 ; Ciphertext XOR E(K, Yn) mov r15, %%PLAIN_CYPH_LEN add r15, r13 diff --git a/lib/avx512_t1/mb_mgr_avx512.c b/lib/avx512_t1/mb_mgr_avx512.c new file mode 100644 index 0000000000000000000000000000000000000000..ba04cb5b7bfa903cc9e821ca4c88c874faa3c529 --- /dev/null +++ b/lib/avx512_t1/mb_mgr_avx512.c @@ -0,0 +1,101 @@ +/******************************************************************************* + Copyright (c) 2012-2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/error.h" +#include "include/cpu_feature.h" +#include "include/aesni_emu.h" +#include "include/error.h" +#include "include/arch_x86_64.h" /* self-test */ + +IMB_DLL_LOCAL void +init_mb_mgr_avx512_internal(IMB_MGR *state, const int reset_mgrs) +{ +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return; + } +#endif + + if (!(state->features & IMB_FEATURE_AESNI)) { + fallback_no_aesni(state, 1); + return; + } + + /* reset error status */ + imb_set_errno(state, 0); + + state->features = cpu_feature_adjust(state->flags, + cpu_feature_detect()); + + if ((state->features & IMB_CPUFLAGS_AVX512_T2) == + IMB_CPUFLAGS_AVX512_T2) + init_mb_mgr_avx512_t2_internal(state, reset_mgrs); + else + init_mb_mgr_avx512_t1_internal(state, reset_mgrs); +} + +void +init_mb_mgr_avx512(IMB_MGR *state) +{ + init_mb_mgr_avx512_internal(state, 1); + + if (!self_test(state)) + imb_set_errno(state, IMB_ERR_SELFTEST); +} + +IMB_JOB *submit_job_avx512(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB(state); +} + +IMB_JOB *flush_job_avx512(IMB_MGR *state) +{ + return IMB_FLUSH_JOB(state); +} + +uint32_t queue_size_avx512(IMB_MGR *state) +{ + return IMB_QUEUE_SIZE(state); +} + +IMB_JOB *submit_job_nocheck_avx512(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB_NOCHECK(state); +} + +IMB_JOB *get_next_job_avx512(IMB_MGR *state) +{ + return IMB_GET_NEXT_JOB(state); +} + +IMB_JOB *get_completed_job_avx512(IMB_MGR *state) +{ + return IMB_GET_COMPLETED_JOB(state); +} diff --git a/lib/avx512_t1/mb_mgr_avx512_t1.c b/lib/avx512_t1/mb_mgr_avx512_t1.c new file mode 100644 index 0000000000000000000000000000000000000000..3dbbc17820e201c08de9ad2c9ea9ac27d85b3bab --- /dev/null +++ b/lib/avx512_t1/mb_mgr_avx512_t1.c @@ -0,0 +1,590 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define AVX512 + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/snow3g_submit.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/gcm.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_avx_type1.h" /* AESNI */ +#include "include/arch_avx2_type1.h" /* MD5 */ +#include "include/arch_avx512_type1.h" +#include "include/arch_avx512_type2.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms_avx +#define RESTORE_XMMS restore_xmms_avx + +/* JOB API */ +#define SUBMIT_JOB submit_job_avx512_t1 +#define FLUSH_JOB flush_job_avx512_t1 +#define QUEUE_SIZE queue_size_avx512_t1 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_avx512_t1 +#define GET_NEXT_JOB get_next_job_avx512_t1 +#define GET_COMPLETED_JOB get_completed_job_avx512_t1 +#define GET_NEXT_BURST get_next_burst_avx512_t1 +#define SUBMIT_BURST submit_burst_avx512_t1 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx512_t1 +#define FLUSH_BURST flush_burst_avx512_t1 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx512_t1 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx512_t1 +#define SUBMIT_HASH_BURST submit_hash_burst_avx512_t1 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx512_t1 + + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_AVX512 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_AVX512 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_AVX512 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_AVX512 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_AVX512 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_avx512 +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_avx512 +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_avx512 +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_avx512 +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_avx512 +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_avx512 + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_gcm_dec_avx512 +#define SUBMIT_JOB_AES_GCM_ENC submit_job_gcm_enc_avx512 + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_avx +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_avx +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_avx + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_avx +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_avx +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_avx + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_avx +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_avx +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_avx + +#define AES_CBC_DEC_128 aes_cbc_dec_128_avx +#define AES_CBC_DEC_192 aes_cbc_dec_192_avx +#define AES_CBC_DEC_256 aes_cbc_dec_256_avx + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_avx +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_avx +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_avx +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_avx + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_avx512 +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_avx512 +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_avx512 +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_avx512 +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_avx512 +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_avx512 + +#define AES_ECB_ENC_128 aes_ecb_enc_128_avx +#define AES_ECB_ENC_192 aes_ecb_enc_192_avx +#define AES_ECB_ENC_256 aes_ecb_enc_256_avx +#define AES_ECB_DEC_128 aes_ecb_dec_128_avx +#define AES_ECB_DEC_192 aes_ecb_dec_192_avx +#define AES_ECB_DEC_256 aes_ecb_dec_256_avx + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_avx +#define AES_CTR_192 aes_cntr_192_avx +#define AES_CTR_256 aes_cntr_256_avx +#define AES_CTR_128_BIT aes_cntr_bit_128_avx +#define AES_CTR_192_BIT aes_cntr_bit_192_avx +#define AES_CTR_256_BIT aes_cntr_bit_256_avx + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_avx +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_avx + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_avx +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_avx + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_avx +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_avx + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_avx +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_avx + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_avx +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_avx + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_avx512 +#define AES_CFB_256_ONE aes_cfb_256_one_avx512 + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_avx +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_avx + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_avx +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_avx +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_avx +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_avx + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_avx512 +#define FLUSH_JOB_SHA1 flush_job_sha1_avx512 +#define SUBMIT_JOB_SHA224 submit_job_sha224_avx512 +#define FLUSH_JOB_SHA224 flush_job_sha224_avx512 +#define SUBMIT_JOB_SHA256 submit_job_sha256_avx512 +#define FLUSH_JOB_SHA256 flush_job_sha256_avx512 +#define SUBMIT_JOB_SHA384 submit_job_sha384_avx512 +#define FLUSH_JOB_SHA384 flush_job_sha384_avx512 +#define SUBMIT_JOB_SHA512 submit_job_sha512_avx512 +#define FLUSH_JOB_SHA512 flush_job_sha512_avx512 + +/* HMAC-SHA1/224/256/384/512 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_avx512 +#define FLUSH_JOB_HMAC flush_job_hmac_avx512 +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_avx512 +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_avx512 +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_avx512 +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_avx512 +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_avx512 +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_avx512 +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_avx512 +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_avx512 +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_avx2 +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_avx2 + +/* DES & 3DES */ +#define SUBMIT_JOB_DES_CBC_ENC submit_job_des_cbc_enc_avx512 +#define FLUSH_JOB_DES_CBC_ENC flush_job_des_cbc_enc_avx512 + +#define SUBMIT_JOB_DES_CBC_DEC submit_job_des_cbc_dec_avx512 +#define FLUSH_JOB_DES_CBC_DEC flush_job_des_cbc_dec_avx512 + +#define SUBMIT_JOB_3DES_CBC_ENC submit_job_3des_cbc_enc_avx512 +#define FLUSH_JOB_3DES_CBC_ENC flush_job_3des_cbc_enc_avx512 + +#define SUBMIT_JOB_3DES_CBC_DEC submit_job_3des_cbc_dec_avx512 +#define FLUSH_JOB_3DES_CBC_DEC flush_job_3des_cbc_dec_avx512 + +/* DES-DOCSIS */ +#define SUBMIT_JOB_DOCSIS_DES_ENC submit_job_docsis_des_enc_avx512 +#define FLUSH_JOB_DOCSIS_DES_ENC flush_job_docsis_des_enc_avx512 + +#define SUBMIT_JOB_DOCSIS_DES_DEC submit_job_docsis_des_dec_avx512 +#define FLUSH_JOB_DOCSIS_DES_DEC flush_job_docsis_des_dec_avx512 + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_avx512 +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_avx512 +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_avx512 +#define POLY1305_MAC poly1305_mac_plain_avx512 + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_no_gfni_avx512 +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_no_gfni_avx512 +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_no_gfni_avx512 +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_no_gfni_avx512 +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_no_gfni_avx512 +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_no_gfni_avx512 +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_no_gfni_avx512 +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_no_gfni_avx512 + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_avx +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_avx + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB *submit_snow3g_uea2_job_avx512(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_avx512(snow3g_uea2_ooo, job); +} + +static IMB_JOB *flush_snow3g_uea2_job_avx512(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_avx512(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_avx512 +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_avx512 + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_avx512 +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_avx512 + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_avx_local + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS_SEC_CRC_ENC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, + const uint64_t key_size); +__forceinline +IMB_JOB * +FLUSH_JOB_DOCSIS_SEC_CRC_ENC(MB_MGR_DOCSIS_AES_OOO *state, + const uint64_t key_size); + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS_SEC_CRC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, + const uint64_t key_size); + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS128_SEC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job); + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS256_SEC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job); + +static IMB_JOB * +submit_aes_docsis128_dec_crc32_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job) +{ + (void) state; + + if (job->msg_len_to_hash_in_bytes == 0) { + if (job->msg_len_to_cipher_in_bytes == 0) { + /* NO cipher, NO CRC32 */ + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; + } + + /* Cipher, NO CRC32 */ + return SUBMIT_JOB_DOCSIS128_SEC_DEC(state, job); + } + + /* Cipher + CRC32 // CRC32 */ + aes_docsis128_dec_crc32_avx512(job); + + return job; +} + +static IMB_JOB * +submit_aes_docsis256_dec_crc32_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job) +{ + (void) state; + + if (job->msg_len_to_hash_in_bytes == 0) { + if (job->msg_len_to_cipher_in_bytes == 0) { + /* NO cipher, NO CRC32 */ + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; + } + + /* Cipher, NO CRC32 */ + return SUBMIT_JOB_DOCSIS256_SEC_DEC(state, job); + } + + /* Cipher + CRC32 // CRC32 */ + aes_docsis256_dec_crc32_avx512(job); + + return job; +} + +#define SUBMIT_JOB_DOCSIS128_SEC_CRC_ENC submit_job_aes_docsis128_enc_crc32_avx512 +#define SUBMIT_JOB_DOCSIS256_SEC_CRC_ENC submit_job_aes_docsis256_enc_crc32_avx512 +#define FLUSH_JOB_DOCSIS128_SEC_CRC_ENC flush_job_aes_docsis128_enc_crc32_avx512 +#define FLUSH_JOB_DOCSIS256_SEC_CRC_ENC flush_job_aes_docsis256_enc_crc32_avx512 +#define SUBMIT_JOB_DOCSIS128_SEC_CRC_DEC submit_aes_docsis128_dec_crc32_avx512 +#define SUBMIT_JOB_DOCSIS256_SEC_CRC_DEC submit_aes_docsis256_dec_crc32_avx512 + +/* ====================================================================== */ + +static void +reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 8); + ooo_mgr_aes_reset(state->aes192_ooo, 8); + ooo_mgr_aes_reset(state->aes256_ooo, 8); + + /* DOCSIS SEC BPI (AES CBC + AES CFB for partial block) + * uses same settings as AES CBC. + */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 8); + + /* DES, 3DES and DOCSIS DES (DES CBC + DES CFB for partial block) */ + ooo_mgr_des_reset(state->des_enc_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->des_dec_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->des3_enc_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->des3_dec_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->docsis_des_enc_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->docsis_des_dec_ooo, AVX512_NUM_DES_LANES); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 16); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 16); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 16); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 16); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, AVX512_NUM_SHA1_LANES); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, + AVX512_NUM_SHA256_LANES); + + /* Init HMAC/SHA256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, + AVX512_NUM_SHA256_LANES); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + AVX512_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + AVX512_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, AVX2_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 8); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 8); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 8); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 8); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 8); + + /* Init AES CBC-S out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 8); + + /* Init SNOW3G out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 16); + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 16); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, AVX512_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, AVX512_NUM_SHA256_LANES); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, AVX512_NUM_SHA256_LANES); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, AVX512_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, AVX512_NUM_SHA512_LANES); +} + +IMB_DLL_LOCAL void +init_mb_mgr_avx512_t1_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for AVX512 interface are present */ + if ((state->features & IMB_CPUFLAGS_AVX512) != IMB_CPUFLAGS_AVX512) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_AVX512; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_avx512; + state->keyexp_192 = aes_keyexp_192_avx512; + state->keyexp_256 = aes_keyexp_256_avx512; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_avx512; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_avx512; + + state->xcbc_keyexp = aes_xcbc_expand_key_avx512; + state->des_key_sched = des_key_schedule; + state->sha1_one_block = sha1_one_block_avx512; + state->sha1 = sha1_avx512; + state->sha224_one_block = sha224_one_block_avx512; + state->sha224 = sha224_avx512; + state->sha256_one_block = sha256_one_block_avx512; + state->sha256 = sha256_avx512; + state->sha384_one_block = sha384_one_block_avx512; + state->sha384 = sha384_avx512; + state->sha512_one_block = sha512_one_block_avx512; + state->sha512 = sha512_avx512; + state->md5_one_block = md5_one_block_avx512; + + state->aes128_cfb_one = aes_cfb_128_one_avx512; + + state->eea3_1_buffer = zuc_eea3_1_buffer_avx512; + state->eea3_4_buffer = zuc_eea3_4_buffer_avx; + state->eia3_1_buffer = zuc_eia3_1_buffer_avx512; + state->eea3_n_buffer = zuc_eea3_n_buffer_avx512; + state->eia3_n_buffer = zuc_eia3_n_buffer_avx512; + + state->f8_1_buffer = kasumi_f8_1_buffer_avx; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_avx; + state->f8_2_buffer = kasumi_f8_2_buffer_avx; + state->f8_3_buffer = kasumi_f8_3_buffer_avx; + state->f8_4_buffer = kasumi_f8_4_buffer_avx; + state->f8_n_buffer = kasumi_f8_n_buffer_avx; + state->f9_1_buffer = kasumi_f9_1_buffer_avx; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_avx; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_avx; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_avx; + state->kasumi_key_sched_size = kasumi_key_sched_size_avx; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_avx512; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_avx512; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_avx512; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_avx512; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_avx512; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_avx512; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_avx512; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_avx512; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_avx512; + state->snow3g_init_key_sched = snow3g_init_key_sched_avx512; + state->snow3g_key_sched_size = snow3g_key_sched_size_avx512; + + state->hec_32 = hec_32_avx; + state->hec_64 = hec_64_avx; + + state->crc32_ethernet_fcs = ethernet_fcs_avx; + state->crc16_x25 = crc16_x25_avx; + state->crc32_sctp = crc32_sctp_avx; + state->crc24_lte_a = crc24_lte_a_avx; + state->crc24_lte_b = crc24_lte_b_avx; + state->crc16_fp_data = crc16_fp_data_avx; + state->crc11_fp_header = crc11_fp_header_avx; + state->crc7_fp_header = crc7_fp_header_avx; + state->crc10_iuup_data = crc10_iuup_data_avx; + state->crc6_iuup_header = crc6_iuup_header_avx; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_avx; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_avx; + + state->chacha20_poly1305_init = init_chacha20_poly1305_avx512; + state->chacha20_poly1305_enc_update = + update_enc_chacha20_poly1305_avx512; + state->chacha20_poly1305_dec_update = + update_dec_chacha20_poly1305_avx512; + state->chacha20_poly1305_finalize = + finalize_chacha20_poly1305_avx512; + + state->gcm128_enc = aes_gcm_enc_128_avx512; + state->gcm192_enc = aes_gcm_enc_192_avx512; + state->gcm256_enc = aes_gcm_enc_256_avx512; + state->gcm128_dec = aes_gcm_dec_128_avx512; + state->gcm192_dec = aes_gcm_dec_192_avx512; + state->gcm256_dec = aes_gcm_dec_256_avx512; + state->gcm128_init = aes_gcm_init_128_avx512; + state->gcm192_init = aes_gcm_init_192_avx512; + state->gcm256_init = aes_gcm_init_256_avx512; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_avx512; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_avx512; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_avx512; + state->gcm128_enc_update = aes_gcm_enc_128_update_avx512; + state->gcm192_enc_update = aes_gcm_enc_192_update_avx512; + state->gcm256_enc_update = aes_gcm_enc_256_update_avx512; + state->gcm128_dec_update = aes_gcm_dec_128_update_avx512; + state->gcm192_dec_update = aes_gcm_dec_192_update_avx512; + state->gcm256_dec_update = aes_gcm_dec_256_update_avx512; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_avx512; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_avx512; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_avx512; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_avx512; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_avx512; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_avx512; + state->gcm128_precomp = aes_gcm_precomp_128_avx512; + state->gcm192_precomp = aes_gcm_precomp_192_avx512; + state->gcm256_precomp = aes_gcm_precomp_256_avx512; + state->gcm128_pre = aes_gcm_pre_128_avx512; + state->gcm192_pre = aes_gcm_pre_192_avx512; + state->gcm256_pre = aes_gcm_pre_256_avx512; + + state->ghash = ghash_avx512; + state->ghash_pre = ghash_pre_avx_gen2; + + state->gmac128_init = imb_aes_gmac_init_128_avx512; + state->gmac192_init = imb_aes_gmac_init_192_avx512; + state->gmac256_init = imb_aes_gmac_init_256_avx512; + state->gmac128_update = imb_aes_gmac_update_128_avx512; + state->gmac192_update = imb_aes_gmac_update_192_avx512; + state->gmac256_update = imb_aes_gmac_update_256_avx512; + state->gmac128_finalize = imb_aes_gmac_finalize_128_avx512; + state->gmac192_finalize = imb_aes_gmac_finalize_192_avx512; + state->gmac256_finalize = imb_aes_gmac_finalize_256_avx512; +} + +#include "mb_mgr_code.h" diff --git a/lib/avx512/mb_mgr_des_avx512.asm b/lib/avx512_t1/mb_mgr_des_avx512.asm similarity index 98% rename from lib/avx512/mb_mgr_des_avx512.asm rename to lib/avx512_t1/mb_mgr_des_avx512.asm index 2e470304dbe70579dc53976c74ed97bb498c2ff7..9a36428fac5e08db6a1270aa51240087c4861d07 100644 --- a/lib/avx512/mb_mgr_des_avx512.asm +++ b/lib/avx512_t1/mb_mgr_des_avx512.asm @@ -49,7 +49,7 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" %include "include/const.inc" -%include "include/cet.inc" + extern docsis_des_x16_enc_avx512 extern docsis_des_x16_dec_avx512 extern des_x16_cbc_enc_avx512 @@ -436,7 +436,6 @@ mksection .text align 64 MKGLOBAL(submit_job_des_cbc_enc_avx512,function,internal) submit_job_des_cbc_enc_avx512: - endbranch64 GENERIC_DES_SUBMIT DES, ENC ret @@ -445,7 +444,6 @@ submit_job_des_cbc_enc_avx512: align 64 MKGLOBAL(submit_job_des_cbc_dec_avx512,function,internal) submit_job_des_cbc_dec_avx512: - endbranch64 GENERIC_DES_SUBMIT DES, DEC ret @@ -454,7 +452,6 @@ submit_job_des_cbc_dec_avx512: align 64 MKGLOBAL(submit_job_docsis_des_enc_avx512,function,internal) submit_job_docsis_des_enc_avx512: - endbranch64 GENERIC_DES_SUBMIT DOCSIS, ENC ret @@ -463,7 +460,6 @@ submit_job_docsis_des_enc_avx512: align 64 MKGLOBAL(submit_job_docsis_des_dec_avx512,function,internal) submit_job_docsis_des_dec_avx512: - endbranch64 GENERIC_DES_SUBMIT DOCSIS, DEC ret @@ -472,7 +468,6 @@ submit_job_docsis_des_dec_avx512: align 64 MKGLOBAL(submit_job_3des_cbc_enc_avx512,function,internal) submit_job_3des_cbc_enc_avx512: - endbranch64 GENERIC_DES_SUBMIT 3DES, ENC ret @@ -481,7 +476,6 @@ submit_job_3des_cbc_enc_avx512: align 64 MKGLOBAL(submit_job_3des_cbc_dec_avx512,function,internal) submit_job_3des_cbc_dec_avx512: - endbranch64 GENERIC_DES_SUBMIT 3DES, DEC ret @@ -489,7 +483,6 @@ submit_job_3des_cbc_dec_avx512: align 64 MKGLOBAL(flush_job_des_cbc_enc_avx512,function,internal) flush_job_des_cbc_enc_avx512: - endbranch64 GENERIC_DES_FLUSH DES, ENC ret @@ -497,7 +490,6 @@ flush_job_des_cbc_enc_avx512: align 64 MKGLOBAL(flush_job_des_cbc_dec_avx512,function,internal) flush_job_des_cbc_dec_avx512: - endbranch64 GENERIC_DES_FLUSH DES, DEC ret @@ -505,7 +497,6 @@ flush_job_des_cbc_dec_avx512: align 64 MKGLOBAL(flush_job_docsis_des_enc_avx512,function,internal) flush_job_docsis_des_enc_avx512: - endbranch64 GENERIC_DES_FLUSH DOCSIS, ENC ret @@ -513,7 +504,6 @@ flush_job_docsis_des_enc_avx512: align 64 MKGLOBAL(flush_job_docsis_des_dec_avx512,function,internal) flush_job_docsis_des_dec_avx512: - endbranch64 GENERIC_DES_FLUSH DOCSIS, DEC ret @@ -521,7 +511,6 @@ flush_job_docsis_des_dec_avx512: align 64 MKGLOBAL(flush_job_3des_cbc_enc_avx512,function,internal) flush_job_3des_cbc_enc_avx512: - endbranch64 GENERIC_DES_FLUSH 3DES, ENC ret @@ -529,7 +518,6 @@ flush_job_3des_cbc_enc_avx512: align 64 MKGLOBAL(flush_job_3des_cbc_dec_avx512,function,internal) flush_job_3des_cbc_dec_avx512: - endbranch64 GENERIC_DES_FLUSH 3DES, DEC ret diff --git a/lib/avx512/mb_mgr_hmac_sha1_flush_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha1_flush_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_hmac_sha1_flush_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha1_flush_avx512.asm index 1c923c8a1755169558cc72b729034f4309900828..3cc4be23296625a842ba410fe86cb58b5c6ff20b 100644 --- a/lib/avx512/mb_mgr_hmac_sha1_flush_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha1_flush_avx512.asm @@ -44,6 +44,7 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" +%include "include/clear_regs.asm" ;; %define DO_DBGPRINT %include "include/dbgprint.asm" @@ -349,10 +350,14 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA -return: - DBGPRINTL "---------- exit hmac flush avx512 -----------" +%ifdef SAFE_DATA + clear_scratch_zmms_asm +%else vzeroupper +%endif +return: + DBGPRINTL "---------- exit hmac flush avx512 -----------" mov rbp, [rsp + _gpr_save + 8*0] mov r12, [rsp + _gpr_save + 8*1] mov r13, [rsp + _gpr_save + 8*2] diff --git a/lib/avx512/mb_mgr_hmac_sha1_submit_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha1_submit_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_hmac_sha1_submit_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha1_submit_avx512.asm index 3f7c505edf67136fd2f26b49083ad8b5bf6e61ab..947994bb03c2686302668121d2a7c04548c576aa 100644 --- a/lib/avx512/mb_mgr_hmac_sha1_submit_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha1_submit_avx512.asm @@ -46,10 +46,12 @@ %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" -%include "include/cet.inc" +%include "include/clear_regs.asm" ;; %define DO_DBGPRINT %include "include/dbgprint.asm" +%use smartalign + extern sha1_x16_avx512 mksection .rodata @@ -120,7 +122,6 @@ endstruc ; arg 2 : rdx : job MKGLOBAL(submit_job_hmac_avx512,function,internal) submit_job_hmac_avx512: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -32 ; align to 32 byte boundary @@ -212,7 +213,6 @@ ge64_bytes: mov DWORD(num_lanes_inuse), [state + _num_lanes_inuse_sha1] cmp num_lanes_inuse, 0x10 ; all 16 lanes used? jne return_null - jmp start_loop align 16 start_loop: @@ -381,7 +381,11 @@ clear_ret: %endif return: +%ifdef SAFE_DATA + clear_scratch_zmms_asm +%else vzeroupper +%endif DBGPRINTL "---------- exit sha1 submit -----------" mov rbp, [rsp + _gpr_save + 8*0] diff --git a/lib/avx512/mb_mgr_hmac_sha224_flush_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha224_flush_avx512.asm similarity index 96% rename from lib/avx512/mb_mgr_hmac_sha224_flush_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha224_flush_avx512.asm index 582d83cc20b510c52677b63e7a45314a665fc3d8..b7467c311de005c75ce036529db2347cdba7f29d 100644 --- a/lib/avx512/mb_mgr_hmac_sha224_flush_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha224_flush_avx512.asm @@ -25,4 +25,4 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; %define SHA224 -%include "avx512/mb_mgr_hmac_sha256_flush_avx512.asm" +%include "avx512_t1/mb_mgr_hmac_sha256_flush_avx512.asm" diff --git a/lib/avx512/mb_mgr_hmac_sha224_submit_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha224_submit_avx512.asm similarity index 96% rename from lib/avx512/mb_mgr_hmac_sha224_submit_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha224_submit_avx512.asm index 775bdbb47aec5249d479cec662c50091c3e3cddf..d8828b4553a0326114a7fb678c143b9920a67c9c 100644 --- a/lib/avx512/mb_mgr_hmac_sha224_submit_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha224_submit_avx512.asm @@ -25,4 +25,4 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; %define SHA224 -%include "avx512/mb_mgr_hmac_sha256_submit_avx512.asm" +%include "avx512_t1/mb_mgr_hmac_sha256_submit_avx512.asm" diff --git a/lib/avx512/mb_mgr_hmac_sha256_flush_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha256_flush_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_hmac_sha256_flush_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha256_flush_avx512.asm index 95b5c12d15692d6b755d5c298a2fbdf067053e94..e449e94f496c29c4a2c66eebeccf3a48bb1f5b55 100644 --- a/lib/avx512/mb_mgr_hmac_sha256_flush_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha256_flush_avx512.asm @@ -44,7 +44,7 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" +%include "include/clear_regs.asm" ;; %define DO_DBGPRINT %include "include/dbgprint.asm" @@ -147,11 +147,9 @@ align 32 %ifdef SHA224 MKGLOBAL(flush_job_hmac_sha_224_avx512,function,internal) flush_job_hmac_sha_224_avx512: - endbranch64 %else MKGLOBAL(flush_job_hmac_sha_256_avx512,function,internal) flush_job_hmac_sha_256_avx512: - endbranch64 %endif mov rax, rsp sub rsp, STACK_size @@ -411,9 +409,13 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA -return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif +return: mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] diff --git a/lib/avx512/mb_mgr_hmac_sha256_submit_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha256_submit_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_hmac_sha256_submit_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha256_submit_avx512.asm index 46144a516938ed6aed16a05e391b57091dd98875..0660b19d204e0aa3788d84e2d7b69d0a7a638dcf 100644 --- a/lib/avx512/mb_mgr_hmac_sha256_submit_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha256_submit_avx512.asm @@ -46,10 +46,12 @@ %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" -%include "include/cet.inc" +%include "include/clear_regs.asm" ;; %define DO_DBGPRINT %include "include/dbgprint.asm" +%use smartalign + extern sha256_x16_avx512 mksection .rodata @@ -117,11 +119,9 @@ endstruc %ifdef SHA224 MKGLOBAL(submit_job_hmac_sha_224_avx512,function,internal) submit_job_hmac_sha_224_avx512: - endbranch64 %else MKGLOBAL(submit_job_hmac_sha_256_avx512,function,internal) submit_job_hmac_sha_256_avx512: - endbranch64 %endif mov rax, rsp sub rsp, STACK_size @@ -212,7 +212,6 @@ lt64_bytes: ge64_bytes: cmp dword [state + _num_lanes_inuse_sha256], 0x10 ; all 16 lanes used? jne return_null - jmp start_loop align 16 start_loop: @@ -427,7 +426,11 @@ clear_ret: %endif ;; SAFE_DATA return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] diff --git a/lib/avx512/mb_mgr_hmac_sha384_flush_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha384_flush_avx512.asm similarity index 96% rename from lib/avx512/mb_mgr_hmac_sha384_flush_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha384_flush_avx512.asm index edb49f1dfb2d5d1d7b75915eaf2b00f89bbf604c..70037f6db6b9b673dd65e08e67f8dfacee1366f2 100644 --- a/lib/avx512/mb_mgr_hmac_sha384_flush_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha384_flush_avx512.asm @@ -26,4 +26,4 @@ ;; %define SHA384 -%include "avx512/mb_mgr_hmac_sha512_flush_avx512.asm" +%include "avx512_t1/mb_mgr_hmac_sha512_flush_avx512.asm" diff --git a/lib/avx512/mb_mgr_hmac_sha384_submit_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha384_submit_avx512.asm similarity index 96% rename from lib/avx512/mb_mgr_hmac_sha384_submit_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha384_submit_avx512.asm index f2e0f240f0d42c2d822c651c1d2554fde6ad8f85..2fc8063cc7d45f21b09a4cb44c2d79e79b17f43b 100644 --- a/lib/avx512/mb_mgr_hmac_sha384_submit_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha384_submit_avx512.asm @@ -26,4 +26,4 @@ ;; %define SHA384 -%include "avx512/mb_mgr_hmac_sha512_submit_avx512.asm" +%include "avx512_t1/mb_mgr_hmac_sha512_submit_avx512.asm" diff --git a/lib/avx512/mb_mgr_hmac_sha512_flush_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha512_flush_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_hmac_sha512_flush_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha512_flush_avx512.asm index dd079461ae84e5f4277d484145893b95d5b13624..9b825d68601ffbb4b468e078889df567d54d7069 100644 --- a/lib/avx512/mb_mgr_hmac_sha512_flush_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha512_flush_avx512.asm @@ -36,7 +36,8 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" +%include "include/clear_regs.asm" + extern sha512_x8_avx512 mksection .rodata @@ -125,7 +126,6 @@ endstruc MKGLOBAL(flush_job_hmac_sha_512_avx512,function,internal) align 64 flush_job_hmac_sha_512_avx512: - endbranch64 %else ; JOB* flush_job_hmac_sha_512_avx512(MB_MGR_HMAC_SHA_512_OOO *state) ; arg 1 : state @@ -133,7 +133,6 @@ flush_job_hmac_sha_512_avx512: MKGLOBAL(flush_job_hmac_sha_384_avx512,function,internal) align 64 flush_job_hmac_sha_384_avx512: - endbranch64 %endif mov rax, rsp sub rsp, STACK_size @@ -364,9 +363,13 @@ APPEND(skip_clear_,I): %endif ;; SAFE_DATA -return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif +return: mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] diff --git a/lib/avx512/mb_mgr_hmac_sha512_submit_avx512.asm b/lib/avx512_t1/mb_mgr_hmac_sha512_submit_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_hmac_sha512_submit_avx512.asm rename to lib/avx512_t1/mb_mgr_hmac_sha512_submit_avx512.asm index eead8a22ad9a3d4a0f037ba4d02f037f465c68fd..504815cc831856c5e487da928cb1c0b8e0b921ef 100644 --- a/lib/avx512/mb_mgr_hmac_sha512_submit_avx512.asm +++ b/lib/avx512_t1/mb_mgr_hmac_sha512_submit_avx512.asm @@ -38,7 +38,10 @@ %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" -%include "include/cet.inc" +%include "include/clear_regs.asm" + +%use smartalign + extern sha512_x8_avx512 mksection .rodata @@ -112,12 +115,10 @@ align 64 MKGLOBAL(submit_job_hmac_sha_512_avx512,function,internal) %define SHA_X_DIGEST_SIZE 512 submit_job_hmac_sha_512_avx512: - endbranch64 %else MKGLOBAL(submit_job_hmac_sha_384_avx512,function,internal) %define SHA_X_DIGEST_SIZE 384 submit_job_hmac_sha_384_avx512: - endbranch64 %endif mov rax, rsp @@ -206,7 +207,6 @@ lt128_bytes: ge128_bytes: cmp unused_lanes, 0xf jne return_null - jmp start_loop align 32 start_loop: @@ -395,7 +395,11 @@ clear_ret: %endif ;; SAFE_DATA return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] diff --git a/lib/avx512/mb_mgr_zuc_submit_flush_avx512.asm b/lib/avx512_t1/mb_mgr_zuc_submit_flush_avx512.asm similarity index 87% rename from lib/avx512/mb_mgr_zuc_submit_flush_avx512.asm rename to lib/avx512_t1/mb_mgr_zuc_submit_flush_avx512.asm index 9029e6b773efd3b66f11529210d6ae7e1caa71d0..be795516c31cd19dde5e94cc680a16e553dd9d84 100644 --- a/lib/avx512/mb_mgr_zuc_submit_flush_avx512.asm +++ b/lib/avx512_t1/mb_mgr_zuc_submit_flush_avx512.asm @@ -29,9 +29,9 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/constants.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" +%include "include/clear_regs.asm" %ifndef SUBMIT_JOB_ZUC128_EEA3 %define SUBMIT_JOB_ZUC128_EEA3 submit_job_zuc_eea3_no_gfni_avx512 @@ -48,8 +48,12 @@ %define ZUC_CIPHER asm_ZucCipher_16_avx512 %define ZUC_REMAINDER_16 asm_Eia3RemainderAVX512_16 %define ZUC256_REMAINDER_16 asm_Eia3_256_RemainderAVX512_16 +%define ZUC_KEYGEN_SKIP16_16 asm_ZucGenKeystream_16_skip16_avx512 +%define ZUC_KEYGEN64B_SKIP16_16 asm_ZucGenKeystream64B_16_skip16_avx512 %define ZUC_KEYGEN_SKIP8_16 asm_ZucGenKeystream_16_skip8_avx512 %define ZUC_KEYGEN64B_SKIP8_16 asm_ZucGenKeystream64B_16_skip8_avx512 +%define ZUC_KEYGEN_SKIP4_16 asm_ZucGenKeystream_16_skip4_avx512 +%define ZUC_KEYGEN64B_SKIP4_16 asm_ZucGenKeystream64B_16_skip4_avx512 %define ZUC_KEYGEN_16 asm_ZucGenKeystream_16_avx512 %define ZUC_KEYGEN64B_16 asm_ZucGenKeystream64B_16_avx512 %define ZUC_ROUND64B asm_Eia3Round64BAVX512_16 @@ -69,16 +73,22 @@ extern asm_ZucCipher_16_avx512 extern asm_ZucCipher_16_gfni_avx512 extern asm_Zuc256Initialization_16_avx512 extern asm_Zuc256Initialization_16_gfni_avx512 -extern asm_ZucGenKeystream4B_16_avx512 -extern asm_ZucGenKeystream4B_16_gfni_avx512 extern asm_Eia3RemainderAVX512_16 extern asm_Eia3RemainderAVX512_16_VPCLMUL extern asm_Eia3_256_RemainderAVX512_16 extern asm_Eia3_256_RemainderAVX512_16_VPCLMUL +extern asm_ZucGenKeystream_16_skip16_avx512 +extern asm_ZucGenKeystream_16_skip16_gfni_avx512 +extern asm_ZucGenKeystream64B_16_skip16_avx512 +extern asm_ZucGenKeystream64B_16_skip16_gfni_avx512 extern asm_ZucGenKeystream_16_skip8_avx512 extern asm_ZucGenKeystream_16_skip8_gfni_avx512 extern asm_ZucGenKeystream64B_16_skip8_avx512 extern asm_ZucGenKeystream64B_16_skip8_gfni_avx512 +extern asm_ZucGenKeystream_16_skip4_avx512 +extern asm_ZucGenKeystream_16_skip4_gfni_avx512 +extern asm_ZucGenKeystream64B_16_skip4_avx512 +extern asm_ZucGenKeystream64B_16_skip4_gfni_avx512 extern asm_ZucGenKeystream_16_avx512 extern asm_ZucGenKeystream_16_gfni_avx512 extern asm_ZucGenKeystream64B_16_avx512 @@ -95,13 +105,15 @@ extern asm_Eia3_Nx64B_AVX512_16_VPCLMUL %define arg4 rcx %define arg5 r8 %define arg6 r9 +%define arg7 qword [rsp] %else %define arg1 rcx %define arg2 rdx %define arg3 r8 %define arg4 r9 %define arg5 qword [rsp + 32] -%define arg6 dword [rsp + 40] +%define arg6 qword [rsp + 40] +%define arg7 qword [rsp + 48] %endif %define state arg1 @@ -177,14 +189,15 @@ mksection .text ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 - ; Expand to 8 bytes and write + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out mov tmp3, 0x3f3f3f3f3f3f3f3f pdep tmp2, tmp2, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -229,8 +242,9 @@ mksection .text ;; Find min length for lanes 0-7 vphminposuw xmm2, xmm0 + xor job_rax, job_rax cmp qword [state + _zuc_lanes_in_use], 16 - jne %%return_null_submit_eea3 + jne %%return_submit_eea3 ; Find min length for lanes 8-15 vpextrw DWORD(min_len), xmm2, 0 ; min value @@ -264,7 +278,7 @@ mksection .text lea arg2, [r11 + _zuc_args_IV] lea arg3, [r11 + _zuc_state] movzx DWORD(arg4), word [r11 + _zuc_init_not_done] - mov r10, 2 ; Argument 5 hardcoded to r10, as INIT is expecting it in that register + xor r10, r10 ; Argument 5 hardcoded to r10, as INIT is expecting it in that register call ZUC256_INIT_16 @@ -274,11 +288,8 @@ mksection .text mov word [r11 + _zuc_init_not_done], 0 ; Init done for all lanes - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r11 + _zuc_state] lea arg2, [r11 + _zuc_args_in] lea arg3, [r11 + _zuc_args_out] @@ -287,9 +298,8 @@ mksection .text call ZUC_CIPHER -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -320,8 +330,11 @@ mksection .text %endif %%return_submit_eea3: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper - +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -333,12 +346,6 @@ mksection .text mov rdi, [rsp + _gpr_save + 8*7] %endif mov rsp, [rsp + _rsp_save] ; original SP - - ret - -%%return_null_submit_eea3: - xor job_rax, job_rax - jmp %%return_submit_eea3 %endmacro %macro FLUSH_JOB_ZUC_EEA3 1 @@ -377,8 +384,9 @@ mksection .text mov [rsp + _rsp_save], rax ; original SP ; check for empty + xor job_rax, job_rax cmp qword [state + _zuc_lanes_in_use], 0 - jz %%return_null_flush_eea3 + jz %%return_flush_eea3 ; Find lanes with NULL jobs vpxorq zmm0, zmm0 @@ -461,7 +469,7 @@ mksection .text lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] movzx DWORD(arg4), word [r12 + _zuc_init_not_done] - mov r10, 2 ; Argument 5 hardcoded to r10, as INIT is expecting it in that register + xor r10, r10 ; Argument 5 hardcoded to r10, as INIT is expecting it in that register call ZUC256_INIT_16 @@ -488,11 +496,8 @@ mksection .text vpbroadcastd zmm0, DWORD(tmp4) vmovdqa32 [r12 + _zuc_state + OFS_R2]{k1}, zmm0 - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -501,9 +506,8 @@ mksection .text call ZUC_CIPHER -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] ; Prepare bitmask to clear ZUC state with lane @@ -549,7 +553,11 @@ mksection .text %endrep %endif +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif %%return_flush_eea3: @@ -564,12 +572,6 @@ mksection .text mov rdi, [rsp + _gpr_save + 8*7] %endif mov rsp, [rsp + _rsp_save] ; original SP - - ret - -%%return_null_flush_eea3: - xor job_rax, job_rax - jmp %%return_flush_eea3 %endmacro ; JOB* SUBMIT_JOB_ZUC128_EEA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -577,37 +579,38 @@ mksection .text ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EEA3,function,internal) SUBMIT_JOB_ZUC128_EEA3: - endbranch64 SUBMIT_JOB_ZUC_EEA3 128 + ret ; JOB* SUBMIT_JOB_ZUC256_EEA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) ; arg 1 : state ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC256_EEA3,function,internal) SUBMIT_JOB_ZUC256_EEA3: - endbranch64 SUBMIT_JOB_ZUC_EEA3 256 + ret ; JOB* FLUSH_JOB_ZUC128_EEA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC128_EEA3,function,internal) FLUSH_JOB_ZUC128_EEA3: - endbranch64 FLUSH_JOB_ZUC_EEA3 128 + ret ; JOB* FLUSH_JOB_ZUC256_EEA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC256_EEA3,function,internal) FLUSH_JOB_ZUC256_EEA3: - endbranch64 FLUSH_JOB_ZUC_EEA3 256 + ret -%macro ZUC_EIA3_16_BUFFER 5 +%macro ZUC_EIA3_16_BUFFER 6 %define %%OOO %1 ; [in] Pointer to ZUC OOO manager %define %%KEY_SIZE %2 ; [constant] Key size (16 or 32) -%define %%L %3 ; [clobbered] Temporary GP register (dword) -%define %%REMAIN_BITS %4 ; [clobbered] Temporary GP register (dword) -%define %%TMP %5 ; [clobbered] Temporary GP register +%define %%TAG_SIZE %3 ; [constant] Tag size (4, 8 or 16 bytes) +%define %%L %4 ; [clobbered] Temporary GP register (dword) +%define %%REMAIN_BITS %5 ; [clobbered] Temporary GP register (dword) +%define %%TMP %6 ; [clobbered] Temporary GP register ; Find minimum length vmovdqa xmm0, [%%OOO + _zuc_lens] @@ -619,8 +622,13 @@ FLUSH_JOB_ZUC256_EEA3: cmp DWORD(%%TMP), %%REMAIN_BITS cmovbe %%REMAIN_BITS, DWORD(%%TMP) - ; Get number of KS 32-bit words to generate ([length/32] + 2)) - lea %%L, [%%REMAIN_BITS + 31 + (2 << 5)] + ; Get number of KS 32-bit words to generate ([length/32] + tag_size)) +%if %%KEY_SIZE == 128 + lea %%L, [%%REMAIN_BITS + 31 + 2*(8*%%TAG_SIZE)] +%else ; %%KEY_SIZE == 256 + lea %%L, [%%REMAIN_BITS + 31 + (8*%%TAG_SIZE)] +%endif + shr %%L, 5 cmp %%L, 16 @@ -628,10 +636,8 @@ FLUSH_JOB_ZUC256_EEA3: ; Generate L KS words (less than 16), except for old buffers, which only need L-2, ; since 2 words are reused from previous iteration -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [%%OOO + _zuc_state] lea arg2, [%%OOO + _zuc_args_KS] xor arg3, arg3 ; offset = 0 @@ -643,11 +649,20 @@ FLUSH_JOB_ZUC256_EEA3: mov [rsp + 32], %%L %endif +%if %%KEY_SIZE == 128 call ZUC_KEYGEN_SKIP8_16 - -%ifndef LINUX - add rsp, 40 +%else ; %%KEY_SIZE == 256 +%if %%TAG_SIZE == 4 + call ZUC_KEYGEN_SKIP4_16 +%elif %%TAG_SIZE == 8 + call ZUC_KEYGEN_SKIP8_16 +%else ;; %%TAG_SIZE == 16 + call ZUC_KEYGEN_SKIP16_16 %endif +%endif ; %%KEY_SIZE + + RESTORE_STACK_SPACE 5 + jmp %%_exit %%_above_eq_16: @@ -658,7 +673,17 @@ FLUSH_JOB_ZUC256_EEA3: xor arg3, arg3 ; offset = 0 movzx DWORD(arg4), word [%%OOO + _zuc_init_not_done] +%if %%KEY_SIZE == 128 + call ZUC_KEYGEN64B_SKIP8_16 +%else ; %%KEY_SIZE == 256 +%if %%TAG_SIZE == 4 + call ZUC_KEYGEN64B_SKIP4_16 +%elif %%TAG_SIZE == 8 call ZUC_KEYGEN64B_SKIP8_16 +%else ;; %%TAG_SIZE == 16 + call ZUC_KEYGEN64B_SKIP16_16 +%endif +%endif sub %%L, 16 %%_loop: @@ -681,6 +706,7 @@ FLUSH_JOB_ZUC256_EEA3: lea arg2, [%%OOO + _zuc_args_KS] lea arg3, [%%OOO + _zuc_args_in] lea arg4, [%%OOO + _zuc_lens] + mov arg5, %%TAG_SIZE call ZUC_ROUND64B @@ -690,10 +716,8 @@ FLUSH_JOB_ZUC256_EEA3: %%_above_eq_16_loop: ; Generate next 16 KS words and digest 64 bytes of data -%ifndef LINUX - ;; 48 bytes for 6 parameters - sub rsp, 48 -%endif + RESERVE_STACK_SPACE 7 + mov DWORD(%%TMP), %%L shr DWORD(%%TMP), 4 ; Number of rounds of 64 bytes @@ -713,13 +737,12 @@ FLUSH_JOB_ZUC256_EEA3: lea %%TMP, [%%OOO + _zuc_lens] mov [rsp + 32], %%TMP %endif + mov arg7, %%TAG_SIZE call ZUC_EIA3_N64B -%ifndef LINUX - ;; 48 bytes for 6 parameters - add rsp, 48 -%endif + RESTORE_STACK_SPACE 7 + and %%L, 0xf ; Remaining words of KS left to generate jmp %%_loop @@ -736,10 +759,12 @@ FLUSH_JOB_ZUC256_EEA3: call ZUC_KEYGEN_16 %%_exit: -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 +%if %%KEY_SIZE == 128 + RESERVE_STACK_SPACE 5 +%else + RESERVE_STACK_SPACE 6 %endif + ; Digest final bytes of data and generate tag for finished buffers lea arg1, [%%OOO + _zuc_args_digest] lea arg2, [%%OOO + _zuc_args_KS] @@ -753,18 +778,19 @@ FLUSH_JOB_ZUC256_EEA3: %if %%KEY_SIZE == 128 call ZUC_REMAINDER_16 + RESTORE_STACK_SPACE 5 %else + mov arg6, %%TAG_SIZE call ZUC256_REMAINDER_16 + RESTORE_STACK_SPACE 6 %endif -%ifndef LINUX - add rsp, 40 -%endif mov word [%%OOO + _zuc_init_not_done], 0 %endmacro -%macro SUBMIT_JOB_ZUC_EIA3 1 +%macro SUBMIT_JOB_ZUC_EIA3 2 %define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16 bytes) ; idx needs to be in rbp %define len rbp @@ -819,14 +845,15 @@ FLUSH_JOB_ZUC256_EEA3: ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 - ; Expand to 8 bytes and write + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out mov tmp3, 0x3f3f3f3f3f3f3f3f pdep tmp2, tmp2, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -851,8 +878,9 @@ FLUSH_JOB_ZUC256_EEA3: not tmp and [state + _zuc_unused_lane_bitmask], WORD(tmp) ; Reset temporary digest for the lane +%if %%KEY_SIZE == 128 mov dword [state + _zuc_args_digest + lane*4], 0 - +%endif mov tmp, [job + _src] add tmp, [job + _hash_start_src_offset_in_bytes] mov [state + _zuc_args_in + lane*8], tmp @@ -868,8 +896,9 @@ FLUSH_JOB_ZUC256_EEA3: vmovdqu16 ymm0{k1}, ymm1 vmovdqa64 [state + _zuc_lens], ymm0 + xor job_rax, job_rax cmp qword [state + _zuc_lanes_in_use], 16 - jne %%return_null_submit_eia3 + jne %%return_submit_eia3 ;; Find min length for lanes 0-7 vphminposuw xmm2, xmm0 @@ -906,20 +935,14 @@ FLUSH_JOB_ZUC256_EEA3: lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] movzx DWORD(arg4), word [r12 + _zuc_init_not_done] - mov r10, 4 ; Argument 5 hardcoded to r10, as INIT is expecting it in that register + mov r10, %%TAG_SIZE ; Argument 5 hardcoded to r10, as INIT is expecting it in that register + lea r11, [r12 + _zuc_args_digest] ; Argument 6 hardcoded to r11 call ZUC256_INIT_16 - lea arg1, [r12 + _zuc_state] - lea arg2, [r12 + _zuc_args_digest] - movzx DWORD(arg3), word [r12 + _zuc_init_not_done] - - ; Generate first 4 bytes of keystream, used as the initial value of digests - call ZUC_KEYGEN4B_16 - %endif ;; %%KEY_SIZE == 128 - ZUC_EIA3_16_BUFFER r12, %%KEY_SIZE, DWORD(tmp), DWORD(tmp2), tmp3 + ZUC_EIA3_16_BUFFER r12, %%KEY_SIZE, %%TAG_SIZE, DWORD(tmp), DWORD(tmp2), tmp3 mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -933,9 +956,19 @@ FLUSH_JOB_ZUC256_EEA3: mov qword [state + _zuc_job_in_lane + idx*8], 0 or dword [job_rax + _status], IMB_STATUS_COMPLETED_AUTH ; Copy digest to auth tag output - mov r10d, [state + _zuc_args_digest + idx*4] mov r11, [job_rax + _auth_tag_output] +%if %%TAG_SIZE == 4 + mov r10d, [state + _zuc_args_digest + idx*4] mov [r11], r10d +%elif %%TAG_SIZE == 8 + mov r10, [state + _zuc_args_digest + idx*8] + mov [r11], r10 +%else ; %%TAG_SIZE == 16 + shl idx, 4 + vmovdqa xmm0, [state + _zuc_args_digest + idx] + vmovdqu [r11], xmm0 + shr idx, 4 +%endif shl unused_lanes, 4 or unused_lanes, idx mov [state + _zuc_unused_lanes], unused_lanes @@ -955,7 +988,11 @@ FLUSH_JOB_ZUC256_EEA3: %endif %%return_submit_eia3: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] @@ -968,16 +1005,11 @@ FLUSH_JOB_ZUC256_EEA3: mov rdi, [rsp + _gpr_save + 8*7] %endif mov rsp, [rsp + _rsp_save] ; original SP - - ret - -%%return_null_submit_eia3: - xor job_rax, job_rax - jmp %%return_submit_eia3 %endmacro -%macro FLUSH_JOB_ZUC_EIA3 1 +%macro FLUSH_JOB_ZUC_EIA3 2 %define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16 bytes) %define unused_lanes rbx %define tmp1 rbx @@ -1009,8 +1041,9 @@ FLUSH_JOB_ZUC256_EEA3: mov [rsp + _rsp_save], rax ; original SP ; check for empty + xor job_rax, job_rax cmp qword [state + _zuc_lanes_in_use], 0 - jz %%return_null_flush_eia3 + jz %%return_flush_eia3 ; find a lane with a null job vpxorq zmm0, zmm0 @@ -1087,21 +1120,15 @@ FLUSH_JOB_ZUC256_EEA3: lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] movzx DWORD(arg4), word [r12 + _zuc_init_not_done] - mov r10, 4 ; Argument 5 hardcoded to r10, as INIT is expecting it in that register + mov r10, %%TAG_SIZE ; Argument 5 hardcoded to r10, as INIT is expecting it in that register + lea r11, [r12 + _zuc_args_digest] ; Argument 6 hardcoded to r11 call ZUC256_INIT_16 - lea arg1, [r12 + _zuc_state] - lea arg2, [r12 + _zuc_args_digest] - movzx DWORD(arg3), word [r12 + _zuc_init_not_done] - - ; Generate first 4 bytes of keystream, used as the initial value of digests - call ZUC_KEYGEN4B_16 - %endif ;; %%KEY_SIZE == 128 %%skip_init_flush_eia3: - ZUC_EIA3_16_BUFFER r12, %%KEY_SIZE, DWORD(tmp), DWORD(tmp2), tmp4 + ZUC_EIA3_16_BUFFER r12, %%KEY_SIZE, %%TAG_SIZE, DWORD(tmp), DWORD(tmp2), tmp4 mov state, [rsp + _gpr_save + 8*8] @@ -1132,9 +1159,19 @@ FLUSH_JOB_ZUC256_EEA3: mov qword [state + _zuc_job_in_lane + idx*8], 0 or dword [job_rax + _status], IMB_STATUS_COMPLETED_AUTH ; Copy digest to auth tag output - mov r10d, [state + _zuc_args_digest + idx*4] mov r11, [job_rax + _auth_tag_output] +%if %%TAG_SIZE == 4 + mov r10d, [state + _zuc_args_digest + idx*4] mov [r11], r10d +%elif %%TAG_SIZE == 8 + mov r10, [state + _zuc_args_digest + idx*8] + mov [r11], r10 +%else ; %%TAG_SIZE == 16 + shl idx, 4 + vmovdqa xmm0, [state + _zuc_args_digest + idx] + vmovdqu [r11], xmm0 + shr idx, 4 +%endif shl unused_lanes, 4 or unused_lanes, idx mov [state + _zuc_unused_lanes], unused_lanes @@ -1152,7 +1189,11 @@ FLUSH_JOB_ZUC256_EEA3: %endrep %endif +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper +%endif %%return_flush_eia3: @@ -1167,12 +1208,6 @@ FLUSH_JOB_ZUC256_EEA3: mov rdi, [rsp + _gpr_save + 8*7] %endif mov rsp, [rsp + _rsp_save] ; original SP - - ret - -%%return_null_flush_eia3: - xor job_rax, job_rax - jmp %%return_flush_eia3 %endmacro ; JOB* SUBMIT_JOB_ZUC128_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -1180,29 +1215,59 @@ FLUSH_JOB_ZUC256_EEA3: ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EIA3,function,internal) SUBMIT_JOB_ZUC128_EIA3: - endbranch64 - SUBMIT_JOB_ZUC_EIA3 128 + SUBMIT_JOB_ZUC_EIA3 128, 4 + ret -; JOB* SUBMIT_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) +; JOB* SUBMIT_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job, +; const uint64_t tag_sz) ; arg 1 : state ; arg 2 : job +; arg 3 : tag size (4, 8 or 16 bytes) MKGLOBAL(SUBMIT_JOB_ZUC256_EIA3,function,internal) SUBMIT_JOB_ZUC256_EIA3: - endbranch64 - SUBMIT_JOB_ZUC_EIA3 256 + cmp arg3, 8 + je submit_tag_8B + jb submit_tag_4B + +submit_tag_16B: + SUBMIT_JOB_ZUC_EIA3 256, 16 + ret + +submit_tag_8B: + SUBMIT_JOB_ZUC_EIA3 256, 8 + ret + +submit_tag_4B: + SUBMIT_JOB_ZUC_EIA3 256, 4 + ret ; JOB* FLUSH_JOB_ZUC128_EIA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC128_EIA3,function,internal) FLUSH_JOB_ZUC128_EIA3: - endbranch64 - FLUSH_JOB_ZUC_EIA3 128 + FLUSH_JOB_ZUC_EIA3 128, 4 + ret -; JOB* FLUSH_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state) +; JOB* FLUSH_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, +; const uint64_t tag_sz) ; arg 1 : state +; arg 2 : tag size (4, 8 or 16 bytes) MKGLOBAL(FLUSH_JOB_ZUC256_EIA3,function,internal) FLUSH_JOB_ZUC256_EIA3: - endbranch64 - FLUSH_JOB_ZUC_EIA3 256 + cmp arg2, 8 + je flush_tag_8B + jb flush_tag_4B + +flush_tag_16B: + FLUSH_JOB_ZUC_EIA3 256, 16 + ret + +flush_tag_8B: + FLUSH_JOB_ZUC_EIA3 256, 8 + ret + +flush_tag_4B: + FLUSH_JOB_ZUC_EIA3 256, 4 + ret mksection stack-noexec diff --git a/lib/avx512/poly_avx512.asm b/lib/avx512_t1/poly_avx512.asm similarity index 99% rename from lib/avx512/poly_avx512.asm rename to lib/avx512_t1/poly_avx512.asm index ed58d554f7b179e8fa48d47cbf85fe1f00d62ae0..400dade99f22c195b235a97caa6fc3ef0c86abab 100644 --- a/lib/avx512/poly_avx512.asm +++ b/lib/avx512_t1/poly_avx512.asm @@ -217,6 +217,7 @@ dw 0, 0x1, 0x5, 0x15, 0x55, 0x57, 0x5f, 0x7f, 0xff struc STACKFRAME _r_save: resq 16 ; Memory to save limbs of powers of R _rp_save: resq 8 ; Memory to save limbs of powers of R' +_xmm_save: reso 10 ; Memory to save XMM registers _gpr_save: resq 8 ; Memory to save GP registers _rsp_save: resq 1 ; Memory to save RSP pointer endstruc @@ -1298,6 +1299,13 @@ APPEND(%%_shuffle_blocks_, i): %ifndef LINUX mov [rsp + _gpr_save + 8*6], rsi mov [rsp + _gpr_save + 8*7], rdi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _xmm_save + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov [rsp + _rsp_save], rax @@ -1308,6 +1316,13 @@ APPEND(%%_shuffle_blocks_, i): ;; Restores registers and removes the stack frame ;; ============================================================================= %macro FUNC_EXIT 0 +%ifdef SAFE_DATA + clear_scratch_gps_asm + clear_all_zmms_asm +%else + vzeroupper +%endif ;; SAFE_DATA + mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -1317,13 +1332,16 @@ APPEND(%%_shuffle_blocks_, i): %ifndef LINUX mov rsi, [rsp + _gpr_save + 8*6] mov rdi, [rsp + _gpr_save + 8*7] +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _xmm_save + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov rsp, [rsp + _rsp_save] -%ifdef SAFE_DATA - clear_scratch_gps_asm -%endif ;; SAFE_DATA - %endmacro ;; ============================================================================= diff --git a/lib/avx512/sha1_x16_avx512.asm b/lib/avx512_t1/sha1_x16_avx512.asm similarity index 81% rename from lib/avx512/sha1_x16_avx512.asm rename to lib/avx512_t1/sha1_x16_avx512.asm index e0a43b5f5ce76046058780ac572b9b12492584ea..afa52c5827b908c83111d507477e2ab86ba58a6e 100644 --- a/lib/avx512/sha1_x16_avx512.asm +++ b/lib/avx512_t1/sha1_x16_avx512.asm @@ -44,7 +44,6 @@ %include "include/transpose_avx512.asm" %include "include/reg_sizes.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" mksection .rodata default rel align 64 @@ -94,6 +93,12 @@ mksection .text %define APPEND(a,b) a %+ b +%define XMM_STORAGE 16*10 +%define GP_STORAGE 8*5 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + %ifdef LINUX %define arg1 rdi %define arg2 rsi @@ -229,13 +234,85 @@ mksection .text %endif %endmacro +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET +%ifndef LINUX + vmovdqa32 [rsp + 0*16], xmm6 + vmovdqa32 [rsp + 1*16], xmm7 + vmovdqa32 [rsp + 2*16], xmm8 + vmovdqa32 [rsp + 3*16], xmm9 + vmovdqa32 [rsp + 4*16], xmm10 + vmovdqa32 [rsp + 5*16], xmm11 + vmovdqa32 [rsp + 6*16], xmm12 + vmovdqa32 [rsp + 7*16], xmm13 + vmovdqa32 [rsp + 8*16], xmm14 + vmovdqa32 [rsp + 9*16], xmm15 +%endif + mov [rsp + GP_OFFSET], r12 + mov [rsp + GP_OFFSET + 8], r13 + mov [rsp + GP_OFFSET + 2*8], r14 + mov [rsp + GP_OFFSET + 3*8], r15 + mov [rsp + GP_OFFSET + 4*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 +%ifndef LINUX + vmovdqa32 xmm6, [rsp + 0*16] + vmovdqa32 xmm7, [rsp + 1*16] + vmovdqa32 xmm8, [rsp + 2*16] + vmovdqa32 xmm9, [rsp + 3*16] + vmovdqa32 xmm10, [rsp + 4*16] + vmovdqa32 xmm11, [rsp + 5*16] + vmovdqa32 xmm12, [rsp + 6*16] + vmovdqa32 xmm13, [rsp + 7*16] + vmovdqa32 xmm14, [rsp + 8*16] + vmovdqa32 xmm15, [rsp + 9*16] + +%ifdef SAFE_DATA + vpxord xmm5, xmm5, xmm5 + vmovdqa32 [rsp + 0*16], xmm5 + vmovdqa32 [rsp + 1*16], xmm5 + vmovdqa32 [rsp + 2*16], xmm5 + vmovdqa32 [rsp + 3*16], xmm5 + vmovdqa32 [rsp + 4*16], xmm5 + vmovdqa32 [rsp + 5*16], xmm5 + vmovdqa32 [rsp + 6*16], xmm5 + vmovdqa32 [rsp + 7*16], xmm5 + vmovdqa32 [rsp + 8*16], xmm5 + vmovdqa32 [rsp + 9*16], xmm5 +%endif +%endif + mov r12, [rsp + GP_OFFSET] + mov r13, [rsp + GP_OFFSET + 8] + mov r14, [rsp + GP_OFFSET + 2*8] + mov r15, [rsp + GP_OFFSET + 3*8] + mov rsp, [rsp + GP_OFFSET + 4*8] ;; rsp pointer +%endmacro + +;; FRAMESZ must be an odd multiple of 8 +%define FRAMESZ 16*10 + 8 + align 64 ; void sha1_mult_x16_avx3(void **input_data, UINT128 *digest, UINT32 size) ; arg 1 : pointer to SHA1 args structure ; arg 2 : size (in blocks) ;; assumed to be >= 1 MKGLOBAL(sha1_x16_avx512,function,internal) sha1_x16_avx512: - endbranch64 +%ifndef LINUX + sub rsp, FRAMESZ + + vmovdqa [rsp + 0*16], xmm6 + vmovdqa [rsp + 1*16], xmm7 + vmovdqa [rsp + 2*16], xmm8 + vmovdqa [rsp + 3*16], xmm9 + vmovdqa [rsp + 4*16], xmm10 + vmovdqa [rsp + 5*16], xmm11 + vmovdqa [rsp + 6*16], xmm12 + vmovdqa [rsp + 7*16], xmm13 + vmovdqa [rsp + 8*16], xmm14 + vmovdqa [rsp + 9*16], xmm15 +%endif ;; Initialize digests vmovdqu32 A, [state + 0*SHA1_DIGEST_ROW_SIZE] vmovdqu32 B, [state + 1*SHA1_DIGEST_ROW_SIZE] @@ -436,11 +513,41 @@ lastLoop: mov [state + _data_ptr_sha1 + 15*PTR_SZ], inp7 %ifdef SAFE_DATA - clear_all_zmms_asm + clear_scratch_zmms_asm %else vzeroupper %endif ;; SAFE_DATA +%ifndef LINUX + vmovdqa xmm6, [rsp + 0*16] + vmovdqa xmm7, [rsp + 1*16] + vmovdqa xmm8, [rsp + 2*16] + vmovdqa xmm9, [rsp + 3*16] + vmovdqa xmm10, [rsp + 4*16] + vmovdqa xmm11, [rsp + 5*16] + vmovdqa xmm12, [rsp + 6*16] + vmovdqa xmm13, [rsp + 7*16] + vmovdqa xmm14, [rsp + 8*16] + vmovdqa xmm15, [rsp + 9*16] + +%ifdef SAFE_DATA + ; xmm0 already 0 +%assign i 0 +%rep 10 + vmovdqa [rsp + i*16], xmm0 +%assign i (i+1) +%endrep +%endif ;SAFE_DATA + add rsp, FRAMESZ +%endif ; !LINUX + ret + +; void call_sha1_x16_avx512_from_c(SHA1_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha1_x16_avx512_from_c,function,internal) +call_sha1_x16_avx512_from_c: + FUNC_SAVE + call sha1_x16_avx512 + FUNC_RESTORE ret mksection stack-noexec diff --git a/lib/avx512/sha256_x16_avx512.asm b/lib/avx512_t1/sha256_x16_avx512.asm similarity index 92% rename from lib/avx512/sha256_x16_avx512.asm rename to lib/avx512_t1/sha256_x16_avx512.asm index 76c8e0829b9d3c9aabfc341e3cee15976efe79a3..fb060b9dfbfde7534623327d0a0e899f5b32721c 100644 --- a/lib/avx512/sha256_x16_avx512.asm +++ b/lib/avx512_t1/sha256_x16_avx512.asm @@ -44,7 +44,6 @@ %include "include/transpose_avx512.asm" %include "include/reg_sizes.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" ; re-use K256 from sha256_oct_avx2.asm extern K256 @@ -565,6 +564,78 @@ PSHUFFLE_BYTE_FLIP_MASK: mksection .text +%define XMM_STORAGE 10*16 +%define GP_STORAGE 9*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~31 ; align rsp to 32 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 + mov [rsp + 3*8], r13 + mov [rsp + 4*8], r14 + mov [rsp + 5*8], r15 +%ifndef LINUX + mov [rsp + 6*8], rsi + mov [rsp + 7*8], rdi + vmovdqa32 [rsp + 4*16], xmm6 + vmovdqa32 [rsp + 5*16], xmm7 + vmovdqa32 [rsp + 6*16], xmm8 + vmovdqa32 [rsp + 7*16], xmm9 + vmovdqa32 [rsp + 8*16], xmm10 + vmovdqa32 [rsp + 9*16], xmm11 + vmovdqa32 [rsp + 10*16], xmm12 + vmovdqa32 [rsp + 11*16], xmm13 + vmovdqa32 [rsp + 12*16], xmm14 + vmovdqa32 [rsp + 13*16], xmm15 +%endif ; LINUX + mov [rsp + 14*16], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] + mov r13, [rsp + 3*8] + mov r14, [rsp + 4*8] + mov r15, [rsp + 5*8] +%ifndef LINUX + mov rsi, [rsp + 6*8] + mov rdi, [rsp + 7*8] + vmovdqa32 xmm6, [rsp + 4*16] + vmovdqa32 xmm7, [rsp + 5*16] + vmovdqa32 xmm8, [rsp + 6*16] + vmovdqa32 xmm9, [rsp + 7*16] + vmovdqa32 xmm10, [rsp + 8*16] + vmovdqa32 xmm11, [rsp + 9*16] + vmovdqa32 xmm12, [rsp + 10*16] + vmovdqa32 xmm13, [rsp + 11*16] + vmovdqa32 xmm14, [rsp + 12*16] + vmovdqa32 xmm15, [rsp + 13*16] + +%ifdef SAFE_DATA + vpxord xmm5, xmm5, xmm5 + vmovdqa32 xmm5, [rsp + 4*16] + vmovdqa32 xmm5, [rsp + 5*16] + vmovdqa32 xmm5, [rsp + 6*16] + vmovdqa32 xmm5, [rsp + 7*16] + vmovdqa32 xmm5, [rsp + 8*16] + vmovdqa32 xmm5, [rsp + 9*16] + vmovdqa32 xmm5, [rsp + 10*16] + vmovdqa32 xmm5, [rsp + 11*16] + vmovdqa32 xmm5, [rsp + 12*16] + vmovdqa32 xmm5, [rsp + 13*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 14*16] ;; rsp pointer +%endmacro + ;; void sha256_x16_avx512(void **input_data, UINT128 *digest[16], UINT64 size) ;; arg 1 : pointer to SHA256 args structure ;; arg 2 : size (in blocks) ;; assumed to be >= 1 @@ -574,7 +645,6 @@ mksection .text MKGLOBAL(sha256_x16_avx512,function,internal) align 64 sha256_x16_avx512: - endbranch64 mov rax, rsp sub rsp, STACK_SPACE and rsp, ~63 ; align stack to multiple of 64 @@ -759,4 +829,12 @@ lastLoop: mov rsp, [rsp + _rsp] ret +; void call_sha256_x16_avx512_from_c(SHA256_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha256_x16_avx512_from_c,function,internal) +call_sha256_x16_avx512_from_c: + FUNC_SAVE + call sha256_x16_avx512 + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/avx512/sha512_x8_avx512.asm b/lib/avx512_t1/sha512_x8_avx512.asm similarity index 92% rename from lib/avx512/sha512_x8_avx512.asm rename to lib/avx512_t1/sha512_x8_avx512.asm index 28d895b241397ead7072feccbf607f75455f4b24..24c03f8a9c02d5f3ee00944fa6718dfa1f53ab73 100644 --- a/lib/avx512/sha512_x8_avx512.asm +++ b/lib/avx512_t1/sha512_x8_avx512.asm @@ -45,7 +45,6 @@ %include "include/mb_mgr_datastruct.asm" %include "include/transpose_avx512.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" %define APPEND(a,b) a %+ b %ifdef LINUX @@ -412,6 +411,78 @@ PSHUFFLE_BYTE_FLIP_MASK: mksection .text +%define XMM_STORAGE 10*16 +%define GP_STORAGE 9*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~31 ; align rsp to 32 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 + mov [rsp + 3*8], r13 + mov [rsp + 4*8], r14 + mov [rsp + 5*8], r15 +%ifndef LINUX + mov [rsp + 6*8], rsi + mov [rsp + 7*8], rdi + vmovdqa32 [rsp + 4*16], xmm6 + vmovdqa32 [rsp + 5*16], xmm7 + vmovdqa32 [rsp + 6*16], xmm8 + vmovdqa32 [rsp + 7*16], xmm9 + vmovdqa32 [rsp + 8*16], xmm10 + vmovdqa32 [rsp + 9*16], xmm11 + vmovdqa32 [rsp + 10*16], xmm12 + vmovdqa32 [rsp + 11*16], xmm13 + vmovdqa32 [rsp + 12*16], xmm14 + vmovdqa32 [rsp + 13*16], xmm15 +%endif ; LINUX + mov [rsp + 14*16], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] + mov r13, [rsp + 3*8] + mov r14, [rsp + 4*8] + mov r15, [rsp + 5*8] +%ifndef LINUX + mov rsi, [rsp + 6*8] + mov rdi, [rsp + 7*8] + vmovdqa32 xmm6, [rsp + 4*16] + vmovdqa32 xmm7, [rsp + 5*16] + vmovdqa32 xmm8, [rsp + 6*16] + vmovdqa32 xmm9, [rsp + 7*16] + vmovdqa32 xmm10, [rsp + 8*16] + vmovdqa32 xmm11, [rsp + 9*16] + vmovdqa32 xmm12, [rsp + 10*16] + vmovdqa32 xmm13, [rsp + 11*16] + vmovdqa32 xmm14, [rsp + 12*16] + vmovdqa32 xmm15, [rsp + 13*16] + +%ifdef SAFE_DATA + vpxord xmm5, xmm5, xmm5 + vmovdqa32 xmm5, [rsp + 4*16] + vmovdqa32 xmm5, [rsp + 5*16] + vmovdqa32 xmm5, [rsp + 6*16] + vmovdqa32 xmm5, [rsp + 7*16] + vmovdqa32 xmm5, [rsp + 8*16] + vmovdqa32 xmm5, [rsp + 9*16] + vmovdqa32 xmm5, [rsp + 10*16] + vmovdqa32 xmm5, [rsp + 11*16] + vmovdqa32 xmm5, [rsp + 12*16] + vmovdqa32 xmm5, [rsp + 13*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 14*16] ;; rsp pointer +%endmacro + ;; void sha512_x8_avx512(void *input_data, UINT64 *digest[NUM_LANES], const int size) ;; arg 1 : rcx : pointer to input data ;; arg 2 : rdx : pointer to UINT64 digest[8][num_lanes] @@ -419,7 +490,6 @@ mksection .text MKGLOBAL(sha512_x8_avx512,function,internal) align 64 sha512_x8_avx512: - endbranch64 mov rax, rsp sub rsp, STACK_SPACE and rsp, ~63 ; align stack to multiple of 64 @@ -591,4 +661,12 @@ lastLoop: ;hash_done: ret +; void call_sha512_x8_avx512_from_c(SHA512_ARGS *args, UINT64 size_in_blocks); +MKGLOBAL(call_sha512_x8_avx512_from_c,function,internal) +call_sha512_x8_avx512_from_c: + FUNC_SAVE + call sha512_x8_avx512 + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/avx512_t1/sha_mb_avx512.c b/lib/avx512_t1/sha_mb_avx512.c new file mode 100644 index 0000000000000000000000000000000000000000..6b283fc2adbca491ed0fc6caf15e83a1be0b8af7 --- /dev/null +++ b/lib/avx512_t1/sha_mb_avx512.c @@ -0,0 +1,141 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "include/sha_mb_mgr.h" +#include "include/arch_avx512_type1.h" + +IMB_JOB *submit_job_sha384_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_sha384_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); + +IMB_JOB *submit_job_sha512_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_sha512_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); + +/* ========================================================================== */ +/* + * SHA1 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha1_avx512(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 16, 1, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_x16_avx512_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha1_avx512(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 16, 0, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_x16_avx512_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA224 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha224_avx512(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 16, 1, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha256_x16_avx512_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha224_avx512(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 16, 0, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha256_x16_avx512_from_c, 0); +} + + +/* ========================================================================== */ +/* + * SHA256 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha256_avx512(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 16, 1, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha256_x16_avx512_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha256_avx512(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 16, 0, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha256_x16_avx512_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA384 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha384_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 8, 1, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x8_avx512_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha384_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 8, 0, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x8_avx512_from_c); +} + +/* ========================================================================== */ +/* + * SHA512 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha512_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 8, 1, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x8_avx512_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha512_avx512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 8, 0, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x8_avx512_from_c); +} diff --git a/lib/avx512/snow3g_avx512.c b/lib/avx512_t1/snow3g_avx512.c similarity index 100% rename from lib/avx512/snow3g_avx512.c rename to lib/avx512_t1/snow3g_avx512.c diff --git a/lib/avx512/zuc_top_avx512.c b/lib/avx512_t1/zuc_top_avx512.c old mode 100755 new mode 100644 similarity index 98% rename from lib/avx512/zuc_top_avx512.c rename to lib/avx512_t1/zuc_top_avx512.c index e6488e2c059f2d5758e1051b3c5978cb24d30587..918d25318d5a69badfdbb2e42ac81c7f7b72d3d2 --- a/lib/avx512/zuc_top_avx512.c +++ b/lib/avx512_t1/zuc_top_avx512.c @@ -145,12 +145,12 @@ cipher_16(ZucState16_t *pState, const uint64_t *pIn[16], uint64_t *pOut[16], static inline void round64B_16(uint32_t *T, const uint32_t *ks, const void **data, - uint16_t *lens, const unsigned use_gfni) + uint16_t *lens, const unsigned use_gfni, const uint64_t tag_sz) { if (use_gfni) - asm_Eia3Round64B_16_VPCLMUL(T, ks, data, lens); + asm_Eia3Round64B_16_VPCLMUL(T, ks, data, lens, tag_sz); else - asm_Eia3Round64BAVX512_16(T, ks, data, lens); + asm_Eia3Round64BAVX512_16(T, ks, data, lens, tag_sz); } static inline @@ -700,14 +700,11 @@ void _zuc_eia3_16_buffer_avx512(const void * const pKey[NUM_AVX512_BUFS], else keystr_64B_gen_16(&state, keyStr, 64, use_gfni); round64B_16(T, keyStr, - (const void **)pIn8, lens, use_gfni); + (const void **)pIn8, lens, use_gfni, 4); } /* Process each packet separately for the remaining bits */ for (i = 0; i < NUM_AVX512_BUFS; i++) { - const uint32_t N = lengthInBits[i] + (2 * ZUC_WORD_BITS); - uint32_t L = ((N + 31) / ZUC_WORD_BITS) - - numKeyStr*(keyStreamLengthInBits / 32); uint32_t remainBits = lengthInBits[i] - numKeyStr*keyStreamLengthInBits; uint32_t keyStr32[16*2]; @@ -748,7 +745,6 @@ void _zuc_eia3_16_buffer_avx512(const void * const pKey[NUM_AVX512_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - L -= (keyStreamLengthInBits / 32); /* Generate the next key stream 8 bytes or 64 bytes */ if (!remainBits) diff --git a/lib/avx512_t1/zuc_x16_avx512.asm b/lib/avx512_t1/zuc_x16_avx512.asm new file mode 100644 index 0000000000000000000000000000000000000000..0b1a26d90d5091cc94c364a125152690b3aaa5d6 --- /dev/null +++ b/lib/avx512_t1/zuc_x16_avx512.asm @@ -0,0 +1,4202 @@ +;; +;; Copyright (c) 2020-2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%include "include/os.asm" +%include "include/reg_sizes.asm" +%include "include/zuc_sbox.inc" +%include "include/transpose_avx512.asm" +%include "include/const.inc" +%include "include/mb_mgr_datastruct.asm" +%include "include/cet.inc" +%define APPEND(a,b) a %+ b +%define APPEND3(a,b,c) a %+ b %+ c + +%ifndef CIPHER_16 +%define USE_GFNI_VAES_VPCLMUL 0 +%define CIPHER_16 asm_ZucCipher_16_avx512 +%define ZUC128_INIT asm_ZucInitialization_16_avx512 +%define ZUC256_INIT asm_Zuc256Initialization_16_avx512 +%define ZUC128_REMAINDER_16 asm_Eia3RemainderAVX512_16 +%define ZUC256_REMAINDER_16 asm_Eia3_256_RemainderAVX512_16 +%define ZUC_KEYGEN64B_16 asm_ZucGenKeystream64B_16_avx512 +%define ZUC_KEYGEN8B_16 asm_ZucGenKeystream8B_16_avx512 +%define ZUC_KEYGEN_16 asm_ZucGenKeystream_16_avx512 +%define ZUC_KEYGEN64B_SKIP16_16 asm_ZucGenKeystream64B_16_skip16_avx512 +%define ZUC_KEYGEN_SKIP16_16 asm_ZucGenKeystream_16_skip16_avx512 +%define ZUC_KEYGEN64B_SKIP8_16 asm_ZucGenKeystream64B_16_skip8_avx512 +%define ZUC_KEYGEN_SKIP8_16 asm_ZucGenKeystream_16_skip8_avx512 +%define ZUC_KEYGEN64B_SKIP4_16 asm_ZucGenKeystream64B_16_skip4_avx512 +%define ZUC_KEYGEN_SKIP4_16 asm_ZucGenKeystream_16_skip4_avx512 +%define ZUC_ROUND64B_16 asm_Eia3Round64BAVX512_16 +%define ZUC_EIA3_N64B asm_Eia3_Nx64B_AVX512_16 +%endif + +mksection .rodata +default rel + +align 64 +EK_d64: +dd 0x0044D700, 0x0026BC00, 0x00626B00, 0x00135E00, 0x00578900, 0x0035E200, 0x00713500, 0x0009AF00 +dd 0x004D7800, 0x002F1300, 0x006BC400, 0x001AF100, 0x005E2600, 0x003C4D00, 0x00789A00, 0x0047AC00 + +; Constants to be used to initialize the LFSR registers +; The tables contain four different sets of constants: +; 0-63 bytes: Encryption +; 64-127 bytes: Authentication with tag size = 4 +; 128-191 bytes: Authentication with tag size = 8 +; 192-255 bytes: Authentication with tag size = 16 +align 64 +EK256_d64: +dd 0x00220000, 0x002F0000, 0x00240000, 0x002A0000, 0x006D0000, 0x00400000, 0x00400000, 0x00400000 +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 64 +EK256_EIA3_4: +dd 0x00220000, 0x002F0000, 0x00250000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 64 +EK256_EIA3_8: +dd 0x00230000, 0x002F0000, 0x00240000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 64 +EK256_EIA3_16: +dd 0x00230000, 0x002F0000, 0x00250000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 64 +shuf_mask_key: +dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, 0x04FFFFFF, 0x05FFFFFF, 0x06FFFFFF, 0x07FFFFFF, +dd 0x08FFFFFF, 0x09FFFFFF, 0x0AFFFFFF, 0x0BFFFFFF, 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, + +align 64 +shuf_mask_iv: +dd 0xFFFFFF00, 0xFFFFFF01, 0xFFFFFF02, 0xFFFFFF03, 0xFFFFFF04, 0xFFFFFF05, 0xFFFFFF06, 0xFFFFFF07, +dd 0xFFFFFF08, 0xFFFFFF09, 0xFFFFFF0A, 0xFFFFFF0B, 0xFFFFFF0C, 0xFFFFFF0D, 0xFFFFFF0E, 0xFFFFFF0F, + +align 64 +shuf_mask_key256_first_high: +dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, 0x04FFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, +dd 0x08FFFFFF, 0x09FFFFFF, 0xFFFFFFFF, 0x0BFFFFFF, 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, + +align 64 +shuf_mask_key256_first_low: +dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFF05FF, 0xFFFF06FF, 0xFFFF07FF, +dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFF0AFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, + +align 64 +shuf_mask_key256_second: +dd 0xFFFF0500, 0xFFFF0601, 0xFFFF0702, 0xFFFF0803, 0xFFFF0904, 0xFFFFFF0A, 0xFFFFFF0B, 0xFFFFFFFF, +dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFF0C, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFF0FFFFF, 0xFF0F0E0D, + +align 64 +shuf_mask_iv256_first_high: +dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0x00FFFFFF, 0x01FFFFFF, 0x0AFFFFFF, +dd 0xFFFFFFFF, 0xFFFFFFFF, 0x05FFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, + +align 64 +shuf_mask_iv256_first_low: +dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFF02, +dd 0xFFFF030B, 0xFFFF0C04, 0xFFFFFFFF, 0xFFFF060D, 0xFFFF070E, 0xFFFF0F08, 0xFFFFFF09, 0xFFFFFFFF, + +align 64 +shuf_mask_iv256_second: +dd 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFF01FFFF, 0xFF02FFFF, 0xFF03FFFF, +dd 0xFF04FFFF, 0xFF05FFFF, 0xFF06FFFF, 0xFF07FFFF, 0xFF08FFFF, 0xFFFFFFFF, 0xFFFF00FF, 0xFFFFFFFF, + +align 64 +key_mask_low_4: +dq 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff +dq 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xff0fffffffff0fff + +align 64 +iv_mask_low_6: +dq 0x3f3f3f3f3f3f3fff, 0x000000000000003f + +align 64 +mask31: +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, + +align 64 +swap_mask: +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c + +align 64 +S1_S0_shuf: +db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F +db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F +db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F +db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F + +align 64 +S0_S1_shuf: +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, + +align 64 +rev_S1_S0_shuf: +db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F +db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F +db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F +db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F + +align 64 +rev_S0_S1_shuf: +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 + +align 64 +bit_reverse_table_l: +db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f +db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f +db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f +db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f + +align 64 +bit_reverse_table_h: +db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 +db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 +db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 +db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 + +align 64 +bit_reverse_and_table: +db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f +db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f +db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f +db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f + +align 64 +bit_reverse_table: +times 8 db 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80 + +align 64 +shuf_mask_4B_tags_0_1_2_3: +dd 0x01, 0x05, 0x09, 0x0D, 0x11, 0x15, 0x19, 0x1D, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF +dd 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x01, 0x05, 0x09, 0x0D, 0x11, 0x15, 0x19, 0x1D + +align 64 +shuf_mask_4B_tags_0_4_8_12: +dd 0x01, 0x11, 0xFF, 0xFF, 0x05, 0x15, 0xFF, 0xFF, 0x09, 0x19, 0xFF, 0xFF, 0x0D, 0x1D, 0xFF, 0xFF +dd 0xFF, 0xFF, 0x01, 0x11, 0xFF, 0xFF, 0x05, 0x15, 0xFF, 0xFF, 0x09, 0x19, 0xFF, 0xFF, 0x0D, 0x1D + +align 64 +shuf_mask_8B_tags_0_1_4_5: +dq 0x00, 0x08, 0xFF, 0xFF, 0x02, 0x0A, 0xFF, 0xFF + +align 64 +shuf_mask_8B_tags_2_3_6_7: +dq 0xFF, 0xFF, 0x00, 0x08, 0xFF, 0xFF, 0x02, 0x0A + +align 64 +shuf_mask_8B_tags_8_9_12_13: +dq 0x04, 0x0C, 0xFF, 0xFF, 0x06, 0x0E, 0xFF, 0xFF + +align 64 +shuf_mask_8B_tags_10_11_14_15: +dq 0xFF, 0xFF, 0x04, 0x0C, 0xFF, 0xFF, 0x06, 0x0E + +align 64 +shuf_mask_8B_tags: +dq 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E + +align 64 +all_ffs: +dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff +dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff +dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff +dw 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff, 0xffff + +align 64 +all_threes: +dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 +dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 + +align 64 +all_fffcs: +dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc +dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc +dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc +dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc + +align 64 +all_3fs: +dw 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f +dw 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f, 0x003f + +align 16 +bit_mask_table: +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x80 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc0 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe0 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf0 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe + +byte64_len_to_mask_table: + dq 0xffffffffffffffff, 0x0000000000000001 + dq 0x0000000000000003, 0x0000000000000007 + dq 0x000000000000000f, 0x000000000000001f + dq 0x000000000000003f, 0x000000000000007f + dq 0x00000000000000ff, 0x00000000000001ff + dq 0x00000000000003ff, 0x00000000000007ff + dq 0x0000000000000fff, 0x0000000000001fff + dq 0x0000000000003fff, 0x0000000000007fff + dq 0x000000000000ffff, 0x000000000001ffff + dq 0x000000000003ffff, 0x000000000007ffff + dq 0x00000000000fffff, 0x00000000001fffff + dq 0x00000000003fffff, 0x00000000007fffff + dq 0x0000000000ffffff, 0x0000000001ffffff + dq 0x0000000003ffffff, 0x0000000007ffffff + dq 0x000000000fffffff, 0x000000001fffffff + dq 0x000000003fffffff, 0x000000007fffffff + dq 0x00000000ffffffff, 0x00000001ffffffff + dq 0x00000003ffffffff, 0x00000007ffffffff + dq 0x0000000fffffffff, 0x0000001fffffffff + dq 0x0000003fffffffff, 0x0000007fffffffff + dq 0x000000ffffffffff, 0x000001ffffffffff + dq 0x000003ffffffffff, 0x000007ffffffffff + dq 0x00000fffffffffff, 0x00001fffffffffff + dq 0x00003fffffffffff, 0x00007fffffffffff + dq 0x0000ffffffffffff, 0x0001ffffffffffff + dq 0x0003ffffffffffff, 0x0007ffffffffffff + dq 0x000fffffffffffff, 0x001fffffffffffff + dq 0x003fffffffffffff, 0x007fffffffffffff + dq 0x00ffffffffffffff, 0x01ffffffffffffff + dq 0x03ffffffffffffff, 0x07ffffffffffffff + dq 0x0fffffffffffffff, 0x1fffffffffffffff + dq 0x3fffffffffffffff, 0x7fffffffffffffff + dq 0xffffffffffffffff + +align 64 +add_64: +dq 64, 64, 64, 64, 64, 64, 64, 64 + +align 32 +all_512w: +dw 512, 512, 512, 512, 512, 512, 512, 512 +dw 512, 512, 512, 512, 512, 512, 512, 512 + +align 64 +bswap_mask: +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c + +align 64 +all_31w: +dw 31, 31, 31, 31, 31, 31, 31, 31 +dw 31, 31, 31, 31, 31, 31, 31, 31 + +align 64 +all_ffe0w: +dw 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0 +dw 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0, 0xffe0 + +align 32 +permw_mask: +dw 0, 4, 8, 12, 1, 5, 8, 13, 2, 6, 10, 14, 3, 7, 11, 15 + +extr_bits_0_4_8_12: +db 00010001b, 00010001b, 00000000b, 00000000b + +extr_bits_1_5_9_13: +db 00100010b, 00100010b, 00000000b, 00000000b + +extr_bits_2_6_10_14: +db 01000100b, 01000100b, 00000000b, 00000000b + +extr_bits_3_7_11_15: +db 10001000b, 10001000b, 00000000b, 00000000b + +alignr_mask: +dw 0xffff, 0xffff, 0xffff, 0xffff +dw 0x0000, 0xffff, 0xffff, 0xffff +dw 0xffff, 0x0000, 0xffff, 0xffff +dw 0x0000, 0x0000, 0xffff, 0xffff +dw 0xffff, 0xffff, 0x0000, 0xffff +dw 0x0000, 0xffff, 0x0000, 0xffff +dw 0xffff, 0x0000, 0x0000, 0xffff +dw 0x0000, 0x0000, 0x0000, 0xffff +dw 0xffff, 0xffff, 0xffff, 0x0000 +dw 0x0000, 0xffff, 0xffff, 0x0000 +dw 0xffff, 0x0000, 0xffff, 0x0000 +dw 0x0000, 0x0000, 0xffff, 0x0000 +dw 0xffff, 0xffff, 0x0000, 0x0000 +dw 0x0000, 0xffff, 0x0000, 0x0000 +dw 0xffff, 0x0000, 0x0000, 0x0000 +dw 0x0000, 0x0000, 0x0000, 0x0000 + +mov_16B_mask: +dw 0000000000000000b, 0000000000001111b, 0000000011110000b, 0000000011111111b +dw 0000111100000000b, 0000111100001111b, 0000111111110000b, 0000111111111111b +dw 1111000000000000b, 1111000000001111b, 1111000011110000b, 1111000011111111b +dw 1111111100000000b, 1111111100001111b, 1111111111110000b, 1111111111111111b + +mov_8B_mask: +dw 1100110011001100b, 1100110011001111b, 1100110011111100b, 1100110011111111b +dw 1100111111001100b, 1100111111001111b, 1100111111111100b, 1100111111111111b +dw 1111110011001100b, 1111110011001111b, 1111110011111100b, 1111110011111111b +dw 1111111111001100b, 1111111111001111b, 1111111111111100b, 1111111111111111b + +mov_4B_mask: +dw 1110111011101110b, 1110111011101111b, 1110111011111110b, 1110111011111111b +dw 1110111111101110b, 1110111111101111b, 1110111111111110b, 1110111111111111b +dw 1111111011101110b, 1111111011101111b, 1111111011111110b, 1111111011111111b +dw 1111111111101110b, 1111111111101111b, 1111111111111110b, 1111111111111111b + +align 64 +idx_tags_64_0_7: +dd 0x00, 0x10, 0x01, 0x11, 0x02, 0x12, 0x03, 0x13 +dd 0x04, 0x14, 0x05, 0x15, 0x06, 0x16, 0x07, 0x17 + +align 64 +idx_tags_64_8_15: +dd 0x08, 0x18, 0x09, 0x19, 0x0A, 0x1A, 0x0B, 0x1B +dd 0x0C, 0x1C, 0x0D, 0x1D, 0x0E, 0x1E, 0x0F, 0x1F + +align 64 +bits_32_63: +times 4 dd 0x00000000, 0xffffffff, 0x00000000, 0x00000000 + +align 64 +shuf_mask_0_0_0_dw1: +times 4 db 0x04, 0x05, 0x06, 0x07, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff + +expand_mask: +db 0x00, 0x03, 0x0c, 0x0f, 0x30, 0x33, 0x3c, 0x3f +db 0xc0, 0xc3, 0xcc, 0xcf, 0xf0, 0xf3, 0xfc, 0xff + +align 64 +shuf_mask_0_dw1_0_0: +times 4 db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x04, 0x05, 0x06, 0x07, 0xff, 0xff, 0xff, 0xff + +align 64 +shuf_mask_dw1_0_0_0: +times 4 db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x04, 0x05, 0x06, 0x07 + +;; Calculate address for next bytes of keystream (KS) +;; Memory for KS is laid out in the following way: +;; - There are 128 bytes of KS for each buffer spread in chunks of 16 bytes, +;; interleaving with KS from other 3 buffers, every 512 bytes +;; - There are 16 bytes of KS every 64 bytes, for every buffer + +;; - To access the 512-byte chunk, containing the 128 bytes of KS for the 4 buffers, +;; lane4_idx +;; - To access the next 16 bytes of KS for a buffer, bytes16_idx is used +;; - To access a 16-byte chunk inside a 64-byte chunk, ks_idx is used +%define GET_KS(base, lane4_idx, bytes16_idx, ks_idx) (base + lane4_idx * 512 + bytes16_idx * 64 + ks_idx * 16) + +; Define Stack Layout +START_FIELDS +;;; name size align +FIELD _TEMP_DIGEST_SAVE, 16*64, 64 +FIELD _RSP, 8, 8 +%assign STACK_SPACE _FIELD_OFFSET + +mksection .text +align 64 + +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%define arg4 rcx +%define arg5 r8 +%define arg6 r9d +%define arg7 qword [rsp + 8] +%else +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%define arg4 r9 +%define arg5 qword [rsp + 40] +%define arg6 qword [rsp + 48] +%define arg7 qword [rsp + 56] +%endif + +%define OFS_R1 (16*(4*16)) +%define OFS_R2 (OFS_R1 + (4*16)) + +%ifidn __OUTPUT_FORMAT__, win64 + %define XMM_STORAGE 16*10 + %define GP_STORAGE 8*8 +%else + %define XMM_STORAGE 0 + %define GP_STORAGE 6*8 +%endif +%define LANE_STORAGE 64 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE + LANE_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov rax, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 + +%ifidn __OUTPUT_FORMAT__, win64 + ; xmm6:xmm15 need to be maintained for Windows + vmovdqa [rsp + 0*16], xmm6 + vmovdqa [rsp + 1*16], xmm7 + vmovdqa [rsp + 2*16], xmm8 + vmovdqa [rsp + 3*16], xmm9 + vmovdqa [rsp + 4*16], xmm10 + vmovdqa [rsp + 5*16], xmm11 + vmovdqa [rsp + 6*16], xmm12 + vmovdqa [rsp + 7*16], xmm13 + vmovdqa [rsp + 8*16], xmm14 + vmovdqa [rsp + 9*16], xmm15 + mov [rsp + GP_OFFSET + 48], rdi + mov [rsp + GP_OFFSET + 56], rsi +%endif + mov [rsp + GP_OFFSET], r12 + mov [rsp + GP_OFFSET + 8], r13 + mov [rsp + GP_OFFSET + 16], r14 + mov [rsp + GP_OFFSET + 24], r15 + mov [rsp + GP_OFFSET + 32], rbx + mov [rsp + GP_OFFSET + 40], rax ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + +%ifidn __OUTPUT_FORMAT__, win64 + vmovdqa xmm6, [rsp + 0*16] + vmovdqa xmm7, [rsp + 1*16] + vmovdqa xmm8, [rsp + 2*16] + vmovdqa xmm9, [rsp + 3*16] + vmovdqa xmm10, [rsp + 4*16] + vmovdqa xmm11, [rsp + 5*16] + vmovdqa xmm12, [rsp + 6*16] + vmovdqa xmm13, [rsp + 7*16] + vmovdqa xmm14, [rsp + 8*16] + vmovdqa xmm15, [rsp + 9*16] + mov rdi, [rsp + GP_OFFSET + 48] + mov rsi, [rsp + GP_OFFSET + 56] +%endif + mov r12, [rsp + GP_OFFSET] + mov r13, [rsp + GP_OFFSET + 8] + mov r14, [rsp + GP_OFFSET + 16] + mov r15, [rsp + GP_OFFSET + 24] + mov rbx, [rsp + GP_OFFSET + 32] + mov rsp, [rsp + GP_OFFSET + 40] +%endmacro + +; This macro reorder the LFSR registers +; after N rounds (1 <= N <= 15), since the registers +; are shifted every round +; +; The macro clobbers ZMM0-15 +; +%macro REORDER_LFSR 3 +%define %%STATE %1 ; [in] Pointer to LFSR state +%define %%NUM_ROUNDS %2 ; [immediate] Number of key generation rounds +%define %%LANE_MASK %3 ; [in] Mask register with lanes to update + +%if %%NUM_ROUNDS != 16 +%assign i 0 +%rep 16 + vmovdqa32 APPEND(zmm,i){%%LANE_MASK}, [%%STATE + 64*i] +%assign i (i+1) +%endrep + +%assign i 0 +%assign j %%NUM_ROUNDS +%rep 16 + vmovdqa32 [%%STATE + 64*i]{%%LANE_MASK}, APPEND(zmm,j) +%assign i (i+1) +%assign j ((j+1) % 16) +%endrep +%endif ;; %%NUM_ROUNDS != 16 + +%endmacro + +; +; Perform a partial 16x16 transpose (as opposed to a full 16x16 transpose), +; where the output is chunks of 16 bytes from 4 different buffers interleaved +; in each register (all ZMM registers) +; +; Input: +; a0 a1 a2 a3 a4 a5 a6 a7 .... a15 +; b0 b1 b2 b3 b4 b5 b6 b7 .... b15 +; c0 c1 c2 c3 c4 c5 c6 c7 .... c15 +; d0 d1 d2 d3 d4 d5 d6 d7 .... d15 +; +; Output: +; a0 b0 c0 d0 a4 b4 c4 d4 .... d12 +; a1 b1 c1 d1 a5 b5 c5 d5 .... d13 +; a2 b2 c2 d2 a6 b6 c6 d6 .... d14 +; a3 b3 c3 d3 a7 b7 c7 d7 .... d15 +; +%macro TRANSPOSE16_U32_INTERLEAVED 26 +%define %%IN00 %1 ; [in/out] Bytes 0-3 for all buffers (in) / Bytes 0-15 for buffers 3,7,11,15 (out) +%define %%IN01 %2 ; [in/out] Bytes 4-7 for all buffers (in) / Bytes 16-31 for buffers 3,7,11,15 (out) +%define %%IN02 %3 ; [in/out] Bytes 8-11 for all buffers (in) / Bytes 32-47 for buffers 3,7,11,15 (out) +%define %%IN03 %4 ; [in/out] Bytes 12-15 for all buffers (in) / Bytes 48-63 for buffers 3,7,11,15 (out) +%define %%IN04 %5 ; [in/clobbered] Bytes 16-19 for all buffers (in) +%define %%IN05 %6 ; [in/clobbered] Bytes 20-23 for all buffers (in) +%define %%IN06 %7 ; [in/clobbered] Bytes 24-27 for all buffers (in) +%define %%IN07 %8 ; [in/clobbered] Bytes 28-31 for all buffers (in) +%define %%IN08 %9 ; [in/clobbered] Bytes 32-35 for all buffers (in) +%define %%IN09 %10 ; [in/clobbered] Bytes 36-39 for all buffers (in) +%define %%IN10 %11 ; [in/clobbered] Bytes 40-43 for all buffers (in) +%define %%IN11 %12 ; [in/clobbered] Bytes 44-47 for all buffers (in) +%define %%IN12 %13 ; [in/out] Bytes 48-51 for all buffers (in) / Bytes 0-15 for buffers 2,6,10,14 (out) +%define %%IN13 %14 ; [in/out] Bytes 52-55 for all buffers (in) / Bytes 16-31 for buffers 2,6,10,14 (out) +%define %%IN14 %15 ; [in/out] Bytes 56-59 for all buffers (in) / Bytes 32-47 for buffers 2,6,10,14 (out) +%define %%IN15 %16 ; [in/out] Bytes 60-63 for all buffers (in) / Bytes 48-63 for buffers 2,6,10,14 (out) +%define %%T0 %17 ; [out] Bytes 32-47 for buffers 1,5,9,13 (out) +%define %%T1 %18 ; [out] Bytes 48-63 for buffers 1,5,9,13 (out) +%define %%T2 %19 ; [out] Bytes 32-47 for buffers 0,4,8,12 (out) +%define %%T3 %20 ; [out] Bytes 48-63 for buffers 0,4,8,12 (out) +%define %%K0 %21 ; [out] Bytes 0-15 for buffers 1,5,9,13 (out) +%define %%K1 %22 ; [out] Bytes 16-31for buffers 1,5,9,13 (out) +%define %%K2 %23 ; [out] Bytes 0-15 for buffers 0,4,8,12 (out) +%define %%K3 %24 ; [out] Bytes 16-31 for buffers 0,4,8,12 (out) +%define %%K4 %25 ; [clobbered] Temporary register +%define %%K5 %26 ; [clobbered] Temporary register + + vpunpckldq %%K0, %%IN00, %%IN01 + vpunpckhdq %%K1, %%IN00, %%IN01 + vpunpckldq %%T0, %%IN02, %%IN03 + vpunpckhdq %%T1, %%IN02, %%IN03 + + vpunpckldq %%IN00, %%IN04, %%IN05 + vpunpckhdq %%IN01, %%IN04, %%IN05 + vpunpckldq %%IN02, %%IN06, %%IN07 + vpunpckhdq %%IN03, %%IN06, %%IN07 + + vpunpcklqdq %%K2, %%K0, %%T0 + vpunpckhqdq %%K3, %%K0, %%T0 + vpunpcklqdq %%T2, %%K1, %%T1 + vpunpckhqdq %%T3, %%K1, %%T1 + + vpunpcklqdq %%K0, %%IN00, %%IN02 + vpunpckhqdq %%K1, %%IN00, %%IN02 + vpunpcklqdq %%T0, %%IN01, %%IN03 + vpunpckhqdq %%T1, %%IN01, %%IN03 + + vpunpckldq %%K4, %%IN08, %%IN09 + vpunpckhdq %%K5, %%IN08, %%IN09 + vpunpckldq %%IN04, %%IN10, %%IN11 + vpunpckhdq %%IN05, %%IN10, %%IN11 + vpunpckldq %%IN06, %%IN12, %%IN13 + vpunpckhdq %%IN07, %%IN12, %%IN13 + vpunpckldq %%IN10, %%IN14, %%IN15 + vpunpckhdq %%IN11, %%IN14, %%IN15 + + vpunpcklqdq %%IN12, %%K4, %%IN04 + vpunpckhqdq %%IN13, %%K4, %%IN04 + vpunpcklqdq %%IN14, %%K5, %%IN05 + vpunpckhqdq %%IN15, %%K5, %%IN05 + vpunpcklqdq %%IN00, %%IN06, %%IN10 + vpunpckhqdq %%IN01, %%IN06, %%IN10 + vpunpcklqdq %%IN02, %%IN07, %%IN11 + vpunpckhqdq %%IN03, %%IN07, %%IN11 +%endmacro + +; +; Perform a partial 4x16 transpose +; where the output is chunks of 16 bytes from 4 different buffers interleaved +; in each register (all ZMM registers) +; +; Input: +; a0 a1 a2 a3 a4 a5 a6 a7 .... a15 +; b0 b1 b2 b3 b4 b5 b6 b7 .... b15 +; c0 c1 c2 c3 c4 c5 c6 c7 .... c15 +; d0 d1 d2 d3 d4 d5 d6 d7 .... d15 +; +; Output: +; a0 b0 c0 d0 a4 b4 c4 d4 .... d12 +; a1 b1 c1 d1 a5 b5 c5 d5 .... d13 +; a2 b2 c2 d2 a6 b6 c6 d6 .... d14 +; a3 b3 c3 d3 a7 b7 c7 d7 .... d15 +; +%macro TRANSPOSE4_U32_INTERLEAVED 8 +%define %%IN00 %1 ; [in/out] Bytes 0-3 for all buffers (in) / Bytes 0-15 for buffers 0,4,8,12 (out) +%define %%IN01 %2 ; [in/out] Bytes 4-7 for all buffers (in) / Bytes 0-15 for buffers 1,5,9,13 (out) +%define %%IN02 %3 ; [in/out] Bytes 8-11 for all buffers (in) / Bytes 0-15 for buffers 2,6,10,14 (out) +%define %%IN03 %4 ; [in/out] Bytes 12-15 for all buffers (in) / Bytes 0-15 for buffers 3,7,11,15 (out) +%define %%T0 %5 ; [clobbered] Temporary ZMM register +%define %%T1 %6 ; [clobbered] Temporary ZMM register +%define %%K0 %7 ; [clobbered] Temporary ZMM register +%define %%K1 %8 ; [clobbered] Temporary ZMM register + + vpunpckldq %%K0, %%IN00, %%IN01 + vpunpckhdq %%K1, %%IN00, %%IN01 + vpunpckldq %%T0, %%IN02, %%IN03 + vpunpckhdq %%T1, %%IN02, %%IN03 + + vpunpcklqdq %%IN00, %%K0, %%T0 + vpunpckhqdq %%IN01, %%K0, %%T0 + vpunpcklqdq %%IN02, %%K1, %%T1 + vpunpckhqdq %%IN03, %%K1, %%T1 +%endmacro + +; +; Performs a 4x16 32-bit transpose +; +; Input (each item is a 32-bit word): +; A0 A1 .. A15 +; B0 B1 .. B15 +; C0 C1 .. C15 +; D0 D1 .. D15 +; +; Output (each item is a 32-bit word): +; A0 B0 C0 D0 A1 B1 .. C3 D3 +; A4 B4 C4 D4 A5 B5 .. C7 D7 +; A8 B8 C8 D8 A9 B9 .. C11 D11 +; A12 B12 C12 D12 A13 B13 .. C15 D15 +; +%macro TRANSPOSE4_U32 16 +%define %%IN00 %1 ; [in/out] Input row 0 / Output column 0 +%define %%IN01 %2 ; [in/out] Input row 1 / Output column 1 +%define %%IN02 %3 ; [in/out] Input row 2 / Output column 2 +%define %%IN03 %4 ; [in/out] Input row 3 / Output column 3 +%define %%T0 %5 ; [clobbered] Temporary ZMM register +%define %%T1 %6 ; [clobbered] Temporary ZMM register +%define %%T2 %7 ; [clobbered] Temporary ZMM register +%define %%T3 %8 ; [clobbered] Temporary ZMM register +%define %%K0 %9 ; [clobbered] Temporary ZMM register +%define %%K1 %10 ; [clobbered] Temporary ZMM register +%define %%K2 %11 ; [clobbered] Temporary ZMM register +%define %%K3 %12 ; [clobbered] Temporary ZMM register +%define %%H0 %13 ; [clobbered] Temporary ZMM register +%define %%H1 %14 ; [clobbered] Temporary ZMM register +%define %%H2 %15 ; [clobbered] Temporary ZMM register +%define %%H3 %16 ; [clobbered] Temporary ZMM register + + vpunpckldq %%K0, %%IN00, %%IN01 + vpunpckhdq %%K1, %%IN00, %%IN01 + vpunpckldq %%T0, %%IN02, %%IN03 + vpunpckhdq %%T1, %%IN02, %%IN03 + + vpunpcklqdq %%K2, %%K0, %%T0 + vpunpckhqdq %%T2, %%K0, %%T0 + vpunpcklqdq %%K3, %%K1, %%T1 + vpunpckhqdq %%T3, %%K1, %%T1 + + vshufi64x2 %%H0, %%K2, %%T2, 0x44 + vshufi64x2 %%H1, %%K2, %%T2, 0xee + vshufi64x2 %%H2, %%K3, %%T3, 0x44 + vshufi64x2 %%H3, %%K3, %%T3, 0xee + + vshufi64x2 %%IN00, %%H0, %%H2, 0x88 + vshufi64x2 %%IN01, %%H0, %%H2, 0xdd + vshufi64x2 %%IN02, %%H1, %%H3, 0x88 + vshufi64x2 %%IN03, %%H1, %%H3, 0xdd + +%endmacro + +; +; Calculates X0-X3 from LFSR registers +; +%macro BITS_REORG16 16-17 +%define %%STATE %1 ; [in] ZUC state +%define %%ROUND_NUM %2 ; [in] Round number +%define %%LANE_MASK %3 ; [in] Mask register with lanes to update +%define %%LFSR_0 %4 ; [clobbered] LFSR_0 +%define %%LFSR_2 %5 ; [clobbered] LFSR_2 +%define %%LFSR_5 %6 ; [clobbered] LFSR_5 +%define %%LFSR_7 %7 ; [clobbered] LFSR_7 +%define %%LFSR_9 %8 ; [clobbered] LFSR_9 +%define %%LFSR_11 %9 ; [clobbered] LFSR_11 +%define %%LFSR_14 %10 ; [clobbered] LFSR_14 +%define %%LFSR_15 %11 ; [clobbered] LFSR_15 +%define %%ZTMP %12 ; [clobbered] Temporary ZMM register +%define %%BLEND_KMASK %13 ; [in] Blend K-mask +%define %%X0 %14 ; [out] ZMM register containing X0 of all lanes +%define %%X1 %15 ; [out] ZMM register containing X1 of all lanes +%define %%X2 %16 ; [out] ZMM register containing X2 of all lanes +%define %%X3 %17 ; [out] ZMM register containing X3 of all lanes (only for work mode) + + vmovdqa64 %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_14, [%%STATE + ((14 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_11, [%%STATE + ((11 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_9, [%%STATE + (( 9 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_7, [%%STATE + (( 7 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_5, [%%STATE + (( 5 + %%ROUND_NUM) % 16)*64] +%if (%0 == 17) ; Only needed when generating X3 (for "working" mode) + vmovdqa64 %%LFSR_2, [%%STATE + (( 2 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*64] +%endif + +%if USE_GFNI_VAES_VPCLMUL == 1 + vpsrld %%LFSR_15, 15 + vpslld %%LFSR_14, 16 + vpslld %%LFSR_9, 1 + vpslld %%LFSR_5, 1 + vpshldd %%X0, %%LFSR_15, %%LFSR_14, 16 + vpshldd %%X1, %%LFSR_11, %%LFSR_9, 16 + vpshldd %%X2, %%LFSR_7, %%LFSR_5, 16 +%if (%0 == 17) + vpslld %%LFSR_0, 1 + vpshldd %%X3, %%LFSR_2, %%LFSR_0, 16 +%endif +%else ; USE_GFNI_VAES_VPCLMUL == 1 + vpxorq %%ZTMP, %%ZTMP + vpslld %%LFSR_15, 1 + vpblendmw %%ZTMP{%%BLEND_KMASK}, %%LFSR_14, %%ZTMP + vpblendmw %%X0{%%BLEND_KMASK}, %%ZTMP, %%LFSR_15 + vpslld %%LFSR_11, 16 + vpsrld %%LFSR_9, 15 + vporq %%X1, %%LFSR_11, %%LFSR_9 + vpslld %%LFSR_7, 16 + vpsrld %%LFSR_5, 15 + vporq %%X2, %%LFSR_7, %%LFSR_5 +%if (%0 == 17) + vpslld %%LFSR_2, 16 + vpsrld %%LFSR_0, 15 + vporq %%X3, %%LFSR_2, %%LFSR_0 ; Store BRC_X3 in ZMM register +%endif ; %0 == 17 +%endif ; USE_GFNI_VAES_VPCLMUL == 1 +%endmacro + +; +; Updates R1-R2, using X0-X3 and generates W (if needed) +; +%macro NONLIN_FUN16 13-14 +%define %%STATE %1 ; [in] ZUC state +%define %%LANE_MASK %2 ; [in] Mask register with lanes to update +%define %%X0 %3 ; [in] ZMM register containing X0 of all lanes +%define %%X1 %4 ; [in] ZMM register containing X1 of all lanes +%define %%X2 %5 ; [in] ZMM register containing X2 of all lanes +%define %%R1 %6 ; [in/out] ZMM register to contain R1 for all lanes +%define %%R2 %7 ; [in/out] ZMM register to contain R2 for all lanes +%define %%ZTMP1 %8 ; [clobbered] Temporary ZMM register +%define %%ZTMP2 %9 ; [clobbered] Temporary ZMM register +%define %%ZTMP3 %10 ; [clobbered] Temporary ZMM register +%define %%ZTMP4 %11 ; [clobbered] Temporary ZMM register +%define %%ZTMP5 %12 ; [clobbered] Temporary ZMM register +%define %%ZTMP6 %13 ; [clobbered] Temporary ZMM register +%define %%W %14 ; [out] ZMM register to contain W for all lanes + +%define %%W1 %%ZTMP5 +%define %%W2 %%ZTMP6 + +%if (%0 == 14) + vpxorq %%W, %%X0, %%R1 + vpaddd %%W, %%R2 ; W = (BRC_X0 ^ F_R1) + F_R2 +%endif + + vpaddd %%W1, %%R1, %%X1 ; W1 = F_R1 + BRC_X1 + vpxorq %%W2, %%R2, %%X2 ; W2 = F_R2 ^ BRC_X2 + +%if USE_GFNI_VAES_VPCLMUL == 1 + vpshldd %%ZTMP1, %%W1, %%W2, 16 + vpshldd %%ZTMP2, %%W2, %%W1, 16 +%else + vpslld %%ZTMP3, %%W1, 16 + vpsrld %%ZTMP4, %%W1, 16 + vpslld %%ZTMP5, %%W2, 16 + vpsrld %%ZTMP6, %%W2, 16 + vporq %%ZTMP1, %%ZTMP3, %%ZTMP6 + vporq %%ZTMP2, %%ZTMP4, %%ZTMP5 +%endif + + vprold %%ZTMP3, %%ZTMP1, 10 + vprold %%ZTMP4, %%ZTMP1, 18 + vprold %%ZTMP5, %%ZTMP1, 24 + vprold %%ZTMP6, %%ZTMP1, 2 + ; ZMM1 = U = L1(P) + vpternlogq %%ZTMP1, %%ZTMP3, %%ZTMP4, 0x96 ; (A ^ B) ^ C + vpternlogq %%ZTMP1, %%ZTMP5, %%ZTMP6, 0x96 ; (A ^ B) ^ C + + vprold %%ZTMP3, %%ZTMP2, 8 + vprold %%ZTMP4, %%ZTMP2, 14 + vprold %%ZTMP5, %%ZTMP2, 22 + vprold %%ZTMP6, %%ZTMP2, 30 + ; ZMM2 = V = L2(Q) + vpternlogq %%ZTMP2, %%ZTMP3, %%ZTMP4, 0x96 ; (A ^ B) ^ C + vpternlogq %%ZTMP2, %%ZTMP5, %%ZTMP6, 0x96 ; (A ^ B) ^ C + + ; Shuffle U and V to have all S0 lookups in XMM1 and all S1 lookups in XMM2 + + ; Compress all S0 and S1 input values in each register + ; S0: Bytes 0-7,16-23,32-39,48-55 S1: Bytes 8-15,24-31,40-47,56-63 + vpshufb %%ZTMP1, [rel S0_S1_shuf] + ; S1: Bytes 0-7,16-23,32-39,48-55 S0: Bytes 8-15,24-31,40-47,56-63 + vpshufb %%ZTMP2, [rel S1_S0_shuf] + + vshufpd %%ZTMP3, %%ZTMP1, %%ZTMP2, 0xAA ; All S0 input values + vshufpd %%ZTMP4, %%ZTMP2, %%ZTMP1, 0xAA ; All S1 input values + + ; Compute S0 and S1 values + S0_comput_AVX512 %%ZTMP3, %%ZTMP1, %%ZTMP2, USE_GFNI_VAES_VPCLMUL + S1_comput_AVX512 %%ZTMP4, %%ZTMP1, %%ZTMP2, %%ZTMP5, %%ZTMP6, USE_GFNI_VAES_VPCLMUL + + ; Need to shuffle back %%ZTMP1 & %%ZTMP2 before storing output + ; (revert what was done before S0 and S1 computations) + vshufpd %%ZTMP1, %%ZTMP3, %%ZTMP4, 0xAA + vshufpd %%ZTMP2, %%ZTMP4, %%ZTMP3, 0xAA + + vpshufb %%R1, %%ZTMP1, [rel rev_S0_S1_shuf] + vpshufb %%R2, %%ZTMP2, [rel rev_S1_S0_shuf] +%endmacro + +; +; Function to store 64 bytes of keystream for 16 buffers +; Note: all the 64*16 bytes are not store contiguously, +; the first 256 bytes (containing 64 bytes from 4 buffers) +; are stored in the first half of the first 512 bytes, +; then there is a gap of 256 bytes and then the next 256 bytes +; are written, and so on. +; +%macro STORE_KSTR16 18-25 +%define %%KS %1 ; [in] Pointer to keystream +%define %%DATA64B_L0 %2 ; [in] 64 bytes of keystream for lane 0 +%define %%DATA64B_L1 %3 ; [in] 64 bytes of keystream for lane 1 +%define %%DATA64B_L2 %4 ; [in] 64 bytes of keystream for lane 2 +%define %%DATA64B_L3 %5 ; [in] 64 bytes of keystream for lane 3 +%define %%DATA64B_L4 %6 ; [in] 64 bytes of keystream for lane 4 +%define %%DATA64B_L5 %7 ; [in] 64 bytes of keystream for lane 5 +%define %%DATA64B_L6 %8 ; [in] 64 bytes of keystream for lane 6 +%define %%DATA64B_L7 %9 ; [in] 64 bytes of keystream for lane 7 +%define %%DATA64B_L8 %10 ; [in] 64 bytes of keystream for lane 8 +%define %%DATA64B_L9 %11 ; [in] 64 bytes of keystream for lane 9 +%define %%DATA64B_L10 %12 ; [in] 64 bytes of keystream for lane 10 +%define %%DATA64B_L11 %13 ; [in] 64 bytes of keystream for lane 11 +%define %%DATA64B_L12 %14 ; [in] 64 bytes of keystream for lane 12 +%define %%DATA64B_L13 %15 ; [in] 64 bytes of keystream for lane 13 +%define %%DATA64B_L14 %16 ; [in] 64 bytes of keystream for lane 14 +%define %%DATA64B_L15 %17 ; [in] 64 bytes of keystream for lane 15 +%define %%KEY_OFF %18 ; [in] Offset to start writing Keystream +%define %%LANE_MASK %19 ; [in] Lane mask with lanes to generate all keystream words +%define %%ALIGN_MASK %20 ; [in] Address with alignr masks +%define %%MOV_MASK %21 ; [in] Address with move masks +%define %%TMP %22 ; [in] Temporary GP register +%define %%KMASK1 %23 ; [clobbered] Temporary K mask +%define %%KMASK2 %24 ; [clobbered] Temporary K mask +%define %%SKIP_ROUNDS %25 ; [constant] Number of rounds to skip (1, 2 or 4) + +%if (%0 == 18) + vmovdqu64 [%%KS + %%KEY_OFF*4], %%DATA64B_L0 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 64], %%DATA64B_L1 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 2*64], %%DATA64B_L2 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 3*64], %%DATA64B_L3 + + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512], %%DATA64B_L4 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 64], %%DATA64B_L5 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 2*64], %%DATA64B_L6 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512 + 3*64], %%DATA64B_L7 + + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2], %%DATA64B_L8 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64], %%DATA64B_L9 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64*2], %%DATA64B_L10 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*2 + 64*3], %%DATA64B_L11 + + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3], %%DATA64B_L12 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64], %%DATA64B_L13 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64*2], %%DATA64B_L14 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 512*3 + 64*3], %%DATA64B_L15 +%else + pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_0_4_8_12] + kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] + kmovw %%KMASK2, [%%MOV_MASK + 2*%%TMP] + ; Shifting left 4/8/16 bytes of KS for lanes which first 4/8/16 bytes are skipped +%if %%SKIP_ROUNDS == 4 + vmovdqu8 %%DATA64B_L3{%%KMASK1}, %%DATA64B_L2 + vmovdqu8 %%DATA64B_L2{%%KMASK1}, %%DATA64B_L1 + vmovdqu8 %%DATA64B_L1{%%KMASK1}, %%DATA64B_L0 +%else + vpalignr %%DATA64B_L3{%%KMASK1}, %%DATA64B_L3, %%DATA64B_L2, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L2{%%KMASK1}, %%DATA64B_L2, %%DATA64B_L1, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L1{%%KMASK1}, %%DATA64B_L1, %%DATA64B_L0, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L0{%%KMASK1}, %%DATA64B_L0, %%DATA64B_L3, (16 - %%SKIP_ROUNDS * 4) +%endif + vmovdqu32 [%%KS + %%KEY_OFF*4]{%%KMASK2}, %%DATA64B_L0 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 64], %%DATA64B_L1 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 2*64], %%DATA64B_L2 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 3*64], %%DATA64B_L3 + + pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_1_5_9_13] + kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] + kmovw %%KMASK2, [%%MOV_MASK + 2*%%TMP] +%if %%SKIP_ROUNDS == 4 + vmovdqu8 %%DATA64B_L7{%%KMASK1}, %%DATA64B_L6 + vmovdqu8 %%DATA64B_L6{%%KMASK1}, %%DATA64B_L5 + vmovdqu8 %%DATA64B_L5{%%KMASK1}, %%DATA64B_L4 +%else + vpalignr %%DATA64B_L7{%%KMASK1}, %%DATA64B_L7, %%DATA64B_L6, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L6{%%KMASK1}, %%DATA64B_L6, %%DATA64B_L5, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L5{%%KMASK1}, %%DATA64B_L5, %%DATA64B_L4, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L4{%%KMASK1}, %%DATA64B_L4, %%DATA64B_L7, (16 - %%SKIP_ROUNDS * 4) +%endif + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512]{%%KMASK2}, %%DATA64B_L4 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512 + 64], %%DATA64B_L5 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512 + 64*2], %%DATA64B_L6 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512 + 64*3], %%DATA64B_L7 + + pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_2_6_10_14] + kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] + kmovw %%KMASK2, [%%MOV_MASK + 2*%%TMP] +%if %%SKIP_ROUNDS == 4 + vmovdqu8 %%DATA64B_L11{%%KMASK1}, %%DATA64B_L10 + vmovdqu8 %%DATA64B_L10{%%KMASK1}, %%DATA64B_L9 + vmovdqu8 %%DATA64B_L9{%%KMASK1}, %%DATA64B_L8 +%else + vpalignr %%DATA64B_L11{%%KMASK1}, %%DATA64B_L11, %%DATA64B_L10, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L10{%%KMASK1}, %%DATA64B_L10, %%DATA64B_L9, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L9{%%KMASK1}, %%DATA64B_L9, %%DATA64B_L8, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L8{%%KMASK1}, %%DATA64B_L8, %%DATA64B_L11, (16 - %%SKIP_ROUNDS * 4) +%endif + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*2]{%%KMASK2}, %%DATA64B_L8 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*2 + 64], %%DATA64B_L9 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*2 + 64*2], %%DATA64B_L10 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*2 + 64*3], %%DATA64B_L11 + + pext DWORD(%%TMP), DWORD(%%LANE_MASK), [rel extr_bits_3_7_11_15] + kmovq %%KMASK1, [%%ALIGN_MASK + 8*%%TMP] + kmovw %%KMASK2, [%%MOV_MASK + 2*%%TMP] +%if %%SKIP_ROUNDS == 4 + vmovdqu8 %%DATA64B_L15{%%KMASK1}, %%DATA64B_L14 + vmovdqu8 %%DATA64B_L14{%%KMASK1}, %%DATA64B_L13 + vmovdqu8 %%DATA64B_L13{%%KMASK1}, %%DATA64B_L12 +%else + vpalignr %%DATA64B_L15{%%KMASK1}, %%DATA64B_L15, %%DATA64B_L14, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L14{%%KMASK1}, %%DATA64B_L14, %%DATA64B_L13, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L13{%%KMASK1}, %%DATA64B_L13, %%DATA64B_L12, (16 - %%SKIP_ROUNDS * 4) + vpalignr %%DATA64B_L12{%%KMASK1}, %%DATA64B_L12, %%DATA64B_L15, (16 - %%SKIP_ROUNDS * 4) +%endif + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*3]{%%KMASK2}, %%DATA64B_L12 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*3 + 64], %%DATA64B_L13 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*3 + 64*2], %%DATA64B_L14 + vmovdqu32 [%%KS + %%KEY_OFF*4 + 512*3 + 64*3], %%DATA64B_L15 +%endif +%endmacro + +; +; Function to store 64 bytes of keystream for 4 buffers +; Note: all the 64*4 bytes are not store contiguously. +; Each 64 bytes are stored every 512 bytes, being written in +; qword index 0, 1, 2 or 3 inside the 512 bytes, depending on the lane. +%macro STORE_KSTR4 7 +%define %%KS %1 ; [in] Pointer to keystream +%define %%DATA64B_L0 %2 ; [in] 64 bytes of keystream for lane 0 +%define %%DATA64B_L1 %3 ; [in] 64 bytes of keystream for lane 1 +%define %%DATA64B_L2 %4 ; [in] 64 bytes of keystream for lane 2 +%define %%DATA64B_L3 %5 ; [in] 64 bytes of keystream for lane 3 +%define %%KEY_OFF %6 ; [in] Offset to start writing Keystream +%define %%LANE_GROUP %7 ; [immediate] 0, 1, 2 or 3 + + vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP], %%DATA64B_L0 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP + 512], %%DATA64B_L1 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP + 512*2], %%DATA64B_L2 + vmovdqu64 [%%KS + %%KEY_OFF*4 + 64*%%LANE_GROUP + 512*3], %%DATA64B_L3 +%endmacro + +; +; Add two 32-bit args and reduce mod (2^31-1) +; +%macro ADD_MOD31 4 +%define %%IN_OUT %1 ; [in/out] ZMM register with first input and output +%define %%IN2 %2 ; [in] ZMM register with second input +%define %%ZTMP %3 ; [clobbered] Temporary ZMM register +%define %%MASK31 %4 ; [in] ZMM register containing 0x7FFFFFFF's in all dwords + + vpaddd %%IN_OUT, %%IN2 + vpsrld %%ZTMP, %%IN_OUT, 31 + vpandq %%IN_OUT, %%MASK31 + vpaddd %%IN_OUT, %%ZTMP +%endmacro + +; +; Rotate (mult by pow of 2) 32-bit arg and reduce mod (2^31-1) +; +%macro ROT_MOD31 4 +%define %%IN_OUT %1 ; [in/out] ZMM register with input and output +%define %%ZTMP %2 ; [clobbered] Temporary ZMM register +%define %%MASK31 %3 ; [in] ZMM register containing 0x7FFFFFFF's in all dwords +%define %%N_BITS %4 ; [immediate] Number of bits to rotate for each dword + + vpslld %%ZTMP, %%IN_OUT, %%N_BITS + vpsrld %%IN_OUT, %%IN_OUT, (31 - %%N_BITS) + vpternlogq %%IN_OUT, %%ZTMP, %%MASK31, 0xA8 ; (A | B) & C +%endmacro + +; +; Update LFSR registers, calculating S_16 +; +; S_16 = [ 2^15*S_15 + 2^17*S_13 + 2^21*S_10 + 2^20*S_4 + (1 + 2^8)*S_0 ] mod (2^31 - 1) +; If init mode, add W to the calculation above. +; S_16 -> S_15 for next round +; +%macro LFSR_UPDT16 12 +%define %%STATE %1 ; [in] ZUC state +%define %%ROUND_NUM %2 ; [in] Round number +%define %%LANE_MASK %3 ; [in] Mask register with lanes to update +%define %%LFSR_0 %4 ; [clobbered] LFSR_0 +%define %%LFSR_4 %5 ; [clobbered] LFSR_2 +%define %%LFSR_10 %6 ; [clobbered] LFSR_5 +%define %%LFSR_13 %7 ; [clobbered] LFSR_7 +%define %%LFSR_15 %8 ; [clobbered] LFSR_9 +%define %%ZTMP %9 ; [clobbered] Temporary ZMM register +%define %%MASK_31 %10 ; [in] Mask_31 +%define %%W %11 ; [in/clobbered] In init mode, contains W for all 16 lanes +%define %%MODE %12 ; [constant] "init" / "work" mode + + vmovdqa64 %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_4, [%%STATE + (( 4 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_10, [%%STATE + ((10 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_13, [%%STATE + ((13 + %%ROUND_NUM) % 16)*64] + vmovdqa64 %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*64] + + ; Calculate LFSR feedback (S_16) + + ; In Init mode, W is added to the S_16 calculation +%ifidn %%MODE, init + ADD_MOD31 %%W, %%LFSR_0, %%ZTMP, %%MASK_31 +%else + vmovdqa64 %%W, %%LFSR_0 +%endif + ROT_MOD31 %%LFSR_0, %%ZTMP, %%MASK_31, 8 + ADD_MOD31 %%W, %%LFSR_0, %%ZTMP, %%MASK_31 + ROT_MOD31 %%LFSR_4, %%ZTMP, %%MASK_31, 20 + ADD_MOD31 %%W, %%LFSR_4, %%ZTMP, %%MASK_31 + ROT_MOD31 %%LFSR_10, %%ZTMP, %%MASK_31, 21 + ADD_MOD31 %%W, %%LFSR_10, %%ZTMP, %%MASK_31 + ROT_MOD31 %%LFSR_13, %%ZTMP, %%MASK_31, 17 + ADD_MOD31 %%W, %%LFSR_13, %%ZTMP, %%MASK_31 + ROT_MOD31 %%LFSR_15, %%ZTMP, %%MASK_31, 15 + ADD_MOD31 %%W, %%LFSR_15, %%ZTMP, %%MASK_31 + + vmovdqa32 [%%STATE + (( 0 + %%ROUND_NUM) % 16)*64]{%%LANE_MASK}, %%W + + ; LFSR_S16 = (LFSR_S15++) = eax +%endmacro + +; +; Initialize LFSR registers for a single lane, for ZUC-128 +; +; From spec, s_i (LFSR) registers need to be loaded as follows: +; +; For 0 <= i <= 15, let s_i= k_i || d_i || iv_i. +; Where k_i is each byte of the key, d_i is a 15-bit constant +; and iv_i is each byte of the IV. +; +%macro INIT_LFSR_128 4 +%define %%KEY %1 ;; [in] Key pointer +%define %%IV %2 ;; [in] IV pointer +%define %%LFSR %3 ;; [out] ZMM register to contain initialized LFSR regs +%define %%ZTMP %4 ;; [clobbered] ZMM temporary register + + vbroadcasti64x2 %%LFSR, [%%KEY] + vbroadcasti64x2 %%ZTMP, [%%IV] + vpshufb %%LFSR, [rel shuf_mask_key] + vpsrld %%LFSR, 1 + vpshufb %%ZTMP, [rel shuf_mask_iv] + vpternlogq %%LFSR, %%ZTMP, [rel EK_d64], 0xFE ; A OR B OR C + +%endmacro + +; +; Initialize LFSR registers for a single lane, for ZUC-256 +; +%macro INIT_LFSR_256 11 +%define %%KEY %1 ;; [in] Key pointer +%define %%IV %2 ;; [in] IV pointer +%define %%LFSR %3 ;; [out] ZMM register to contain initialized LFSR regs +%define %%ZTMP1 %4 ;; [clobbered] ZMM temporary register +%define %%ZTMP2 %5 ;; [clobbered] ZMM temporary register +%define %%ZTMP3 %6 ;; [clobbered] ZMM temporary register +%define %%ZTMP4 %7 ;; [clobbered] ZMM temporary register +%define %%ZTMP5 %8 ;; [clobbered] ZMM temporary register +%define %%SHIFT_MASK %9 ;; [in] Mask register to shift K_31 +%define %%IV_MASK %10 ;; [in] Mask register to read IV (last 10 bytes) +%define %%TAG_SIZE %11 ;; [in] Tag size (0, 4, 8 or 16 bytes) + +%if %%TAG_SIZE == 0 +%define %%CONSTANTS rel EK256_d64 +%elif %%TAG_SIZE == 4 +%define %%CONSTANTS rel EK256_EIA3_4 +%elif %%TAG_SIZE == 8 +%define %%CONSTANTS rel EK256_EIA3_8 +%elif %%TAG_SIZE == 16 +%define %%CONSTANTS rel EK256_EIA3_16 +%endif + vmovdqu8 XWORD(%%ZTMP4){%%IV_MASK}, [%%IV + 16] + ; Zero out first 2 bits of IV bytes 17-24 + vpandq XWORD(%%ZTMP4), [rel iv_mask_low_6] + vshufi32x4 %%ZTMP4, %%ZTMP4, 0 + vbroadcasti64x2 %%ZTMP1, [%%KEY] + vbroadcasti64x2 %%ZTMP2, [%%KEY + 16] + vbroadcasti64x2 %%ZTMP3, [%%IV] + + vpshufb %%ZTMP5, %%ZTMP1, [rel shuf_mask_key256_first_high] + vpshufb %%LFSR, %%ZTMP3, [rel shuf_mask_iv256_first_high] + vporq %%LFSR, %%ZTMP5 + vpsrld %%LFSR, 1 + + vpshufb %%ZTMP5, %%ZTMP2, [rel shuf_mask_key256_second] + vpsrld %%ZTMP5{%%SHIFT_MASK}, 4 + vpandq %%ZTMP5, [rel key_mask_low_4] + + vpshufb %%ZTMP1, [rel shuf_mask_key256_first_low] + vpshufb %%ZTMP3, [rel shuf_mask_iv256_first_low] + vpshufb %%ZTMP4, [rel shuf_mask_iv256_second] + + vpternlogq %%LFSR, %%ZTMP5, %%ZTMP1, 0xFE + vpternlogq %%LFSR, %%ZTMP3, %%ZTMP4, 0xFE + + vporq %%LFSR, [%%CONSTANTS] +%endmacro + +%macro INIT_16_AVX512 8-9 +%define %%KEY %1 ; [in] Array of 16 key pointers +%define %%IV %2 ; [in] Array of 16 IV pointers +%define %%STATE %3 ; [in] State +%define %%LANE_MASK %4 ; [in] Mask register with lanes to update +%define %%TMP %5 ; [clobbered] Temporary GP register +%define %%TMP2 %6 ; [clobbered] Temporary GP register +%define %%KEY_SIZE %7 ; [in] Key size (128 or 256) +%define %%TAG_SIZE %8 ; [in] Tag size (0, 4, 8 or 16 bytes) +%define %%TAGS %9 ; [in] Array of temporary tags + +%define %%TMP r14 +%define %%TMP2 r15 + +%define %%ZTMP1 zmm0 +%define %%ZTMP2 zmm1 +%define %%ZTMP3 zmm2 +%define %%ZTMP4 zmm3 +%define %%ZTMP5 zmm4 +%define %%ZTMP6 zmm5 +%define %%ZTMP7 zmm6 +%define %%ZTMP8 zmm7 +%define %%ZTMP9 zmm8 +%define %%ZTMP10 zmm9 +%define %%ZTMP11 zmm10 +%define %%ZTMP12 zmm11 +%define %%ZTMP13 zmm12 +%define %%ZTMP14 zmm13 +%define %%ZTMP15 zmm14 +%define %%ZTMP16 zmm15 + +%define %%LFSR1 zmm16 +%define %%LFSR2 zmm17 +%define %%LFSR3 zmm18 +%define %%LFSR4 zmm19 +%define %%LFSR5 zmm20 +%define %%LFSR6 zmm21 +%define %%LFSR7 zmm22 +%define %%LFSR8 zmm23 +%define %%LFSR9 zmm24 +%define %%LFSR10 zmm25 +%define %%LFSR11 zmm26 +%define %%LFSR12 zmm27 +%define %%LFSR13 zmm28 +%define %%LFSR14 zmm29 +%define %%LFSR15 zmm30 +%define %%LFSR16 zmm31 + +%define %%X0 %%ZTMP10 +%define %%X1 %%ZTMP11 +%define %%X2 %%ZTMP12 +%define %%W %%ZTMP13 +%define %%R1 %%ZTMP14 +%define %%R2 %%ZTMP15 +%define %%MASK31 %%ZTMP16 + +%define %%KSTR1 zmm16 +%define %%KSTR2 zmm17 +%define %%KSTR3 zmm18 +%define %%KSTR4 zmm19 + +%define %%BLEND_KMASK k1 ; Mask to blend LFSRs 14&15 +%define %%INIT_LANE_KMASK k2 ; Mask containing lanes to initialize +%define %%SHIFT_KMASK k3 ; Mask to shift 4 bytes only in the 15th dword +%define %%IV_KMASK k4 ; Mask to read 10 bytes of IV + +%define %%TMP_KMASK1 k3 +%define %%TMP_KMASK2 k4 +%define %%TMP_KMASK3 k5 +%define %%TMP_KMASK4 k6 + + kmovw %%INIT_LANE_KMASK, DWORD(%%LANE_MASK) + +%if %%KEY_SIZE == 256 + mov %%TMP, 0x4000 ; Mask to shift 4 bits only in the 15th dword + kmovq %%SHIFT_KMASK, %%TMP + mov %%TMP, 0x3ff ; Mask to read 10 bytes of IV + kmovq %%IV_KMASK, %%TMP +%endif + + ; Set LFSR registers for Packets 1-16 +%assign %%IDX 0 +%assign %%LFSR_IDX 1 +%rep 16 + mov %%TMP, [pKe + 8*%%IDX] ; Load Key N pointer + lea %%TMP2, [pIv + 32*%%IDX] ; Load IV N pointer +%if %%KEY_SIZE == 128 + INIT_LFSR_128 %%TMP, %%TMP2, APPEND(%%LFSR, %%LFSR_IDX), %%ZTMP1 +%else + INIT_LFSR_256 %%TMP, %%TMP2, APPEND(%%LFSR, %%LFSR_IDX), %%ZTMP1, \ + %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%SHIFT_KMASK, %%IV_KMASK, %%TAG_SIZE +%endif +%assign %%IDX (%%IDX + 1) +%assign %%LFSR_IDX (%%LFSR_IDX + 1) +%endrep + + ; Store LFSR registers in memory (reordering first, so all S0 regs + ; are together, then all S1 regs... until S15) + TRANSPOSE16_U32 %%LFSR1, %%LFSR2, %%LFSR3, %%LFSR4, %%LFSR5, %%LFSR6, %%LFSR7, %%LFSR8, \ + %%LFSR9, %%LFSR10, %%LFSR11, %%LFSR12, %%LFSR13, %%LFSR14, %%LFSR15, %%LFSR16, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%ZTMP7, %%ZTMP8, \ + %%ZTMP9, %%ZTMP10, %%ZTMP11, %%ZTMP12, %%ZTMP13, %%ZTMP14 + +%assign %%IDX 0 +%assign %%LFSR_IDX 1 +%rep 16 + vmovdqa32 [pState + 64*%%IDX]{%%INIT_LANE_KMASK}, APPEND(%%LFSR, %%LFSR_IDX) +%assign %%IDX (%%IDX+1) +%assign %%LFSR_IDX (%%LFSR_IDX+1) +%endrep + + ; Load read-only registers + vmovdqa64 %%MASK31, [rel mask31] + mov DWORD(%%TMP), 0xAAAAAAAA + kmovd %%BLEND_KMASK, DWORD(%%TMP) + + ; Zero out R1, R2 + vpxorq %%R1, %%R1 + vpxorq %%R2, %%R2 + + ; Shift LFSR 32-times, update state variables +%assign %%N 0 +%rep 32 + BITS_REORG16 %%STATE, %%N, %%INIT_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%ZTMP7, %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2 + NONLIN_FUN16 %%STATE, %%INIT_LANE_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%W + vpsrld %%W, 1 ; Shift out LSB of W + + LFSR_UPDT16 %%STATE, %%N, %%INIT_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%W, init ; W used in LFSR update +%assign %%N (%%N + 1) +%endrep + + ; And once more, initial round from keygen phase = 33 times + BITS_REORG16 %%STATE, 0, %%INIT_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%ZTMP7, \ + %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2 + NONLIN_FUN16 %%STATE, %%INIT_LANE_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6 + + LFSR_UPDT16 %%STATE, 0, %%INIT_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%W, work + + ; Generate extra 4, 8 or 16 bytes of KS for initial tags +%if %%TAG_SIZE == 4 +%define %%NUM_ROUNDS 1 +%elif %%TAG_SIZE == 8 +%define %%NUM_ROUNDS 2 +%elif %%TAG_SIZE == 16 +%define %%NUM_ROUNDS 4 +%else +%define %%NUM_ROUNDS 0 +%endif + +%assign %%N 1 +%rep %%NUM_ROUNDS + BITS_REORG16 %%STATE, %%N, %%INIT_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%ZTMP7, %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2, APPEND(%%KSTR, %%N) + NONLIN_FUN16 %%STATE, %%INIT_LANE_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%W + ; OFS_X3 XOR W + vpxorq APPEND(%%KSTR, %%N), %%W + LFSR_UPDT16 %%STATE, %%N, %%INIT_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%ZTMP7, work +%assign %%N %%N+1 +%endrep + + ; Update R1, R2 + vmovdqa32 [%%STATE + OFS_R1]{%%INIT_LANE_KMASK}, %%R1 + vmovdqa32 [%%STATE + OFS_R2]{%%INIT_LANE_KMASK}, %%R2 + + ; Transpose (if needed) the keystream generated and store it + ; for each lane as their initial digest +%if %%TAG_SIZE == 4 + vmovdqa32 [%%TAGS]{%%INIT_LANE_KMASK}, %%KSTR1 + REORDER_LFSR %%STATE, 1, %%INIT_LANE_KMASK +%elif %%TAG_SIZE == 8 + mov DWORD(%%TMP), 0xff + kmovd %%TMP_KMASK1, DWORD(%%TMP) + kandd %%TMP_KMASK1, %%TMP_KMASK1, %%INIT_LANE_KMASK ; First 8 lanes + kshiftrd %%TMP_KMASK2, %%INIT_LANE_KMASK, 8 ; Second 8 lanes + vmovdqa64 %%ZTMP1, [rel idx_tags_64_0_7] + vmovdqa64 %%ZTMP2, [rel idx_tags_64_8_15] + vpermi2d %%ZTMP1, %%KSTR1, %%KSTR2 + vpermi2d %%ZTMP2, %%KSTR1, %%KSTR2 + vmovdqa64 [%%TAGS]{%%TMP_KMASK1}, %%ZTMP1 + vmovdqa64 [%%TAGS + 64]{%%TMP_KMASK2}, %%ZTMP2 + REORDER_LFSR %%STATE, 2, %%INIT_LANE_KMASK +%elif %%TAG_SIZE == 16 + lea %%TMP, [rel expand_mask] + kmovd DWORD(%%TMP2), %%INIT_LANE_KMASK + and DWORD(%%TMP2), 0xf + kmovb %%TMP_KMASK1, [%%TMP + %%TMP2] ; First 4 lanes + kmovd DWORD(%%TMP2), %%INIT_LANE_KMASK + shr DWORD(%%TMP2), 4 + and DWORD(%%TMP2), 0xf + kmovb %%TMP_KMASK2, [%%TMP + %%TMP2] ; Second 4 lanes + + kmovd DWORD(%%TMP2), %%INIT_LANE_KMASK + shr DWORD(%%TMP2), 8 + and DWORD(%%TMP2), 0xf + kmovb %%TMP_KMASK3, [%%TMP + %%TMP2] ; Third 4 lanes + kmovd DWORD(%%TMP2), %%INIT_LANE_KMASK + shr DWORD(%%TMP2), 12 + kmovb %%TMP_KMASK4, [%%TMP + %%TMP2] ; Fourth 4 lanes + + TRANSPOSE4_U32 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, \ + %%ZTMP5, %%ZTMP6, %%ZTMP7, %%ZTMP8, \ + %%ZTMP9, %%ZTMP10, %%ZTMP11, %%ZTMP12 + vmovdqa64 [%%TAGS]{%%TMP_KMASK1}, %%KSTR1 + vmovdqa64 [%%TAGS + 64]{%%TMP_KMASK2}, %%KSTR2 + vmovdqa64 [%%TAGS + 64*2]{%%TMP_KMASK3}, %%KSTR3 + vmovdqa64 [%%TAGS + 64*3]{%%TMP_KMASK4}, %%KSTR4 + REORDER_LFSR %%STATE, 4, %%INIT_LANE_KMASK +%endif + +%endmacro ; INIT_16_AVX512 + +;; +;; void asm_ZucInitialization_16_avx512(ZucKey16_t *pKeys, ZucIv16_t *pIvs, +;; ZucState16_t *pState, +;; const uint64_t lane_mask) +;; +MKGLOBAL(ZUC128_INIT,function,internal) +ZUC128_INIT: +%define pKe arg1 +%define pIv arg2 +%define pState arg3 +%define lane_mask arg4 + + endbranch64 + + FUNC_SAVE + + INIT_16_AVX512 pKe, pIv, pState, lane_mask, r12, r13, 128, 0 + + FUNC_RESTORE + + ret + +;; +;; void asm_Zuc256Initialization_16_avx512(ZucKey16_t *pKeys, ZucIv16_t *pIvs, +;; ZucState16_t *pState, +;; const uint64_t lane_mask, +;; const uint32_t tag_sz, +;; void *tags) +;; +MKGLOBAL(ZUC256_INIT,function,internal) +ZUC256_INIT: +%define pKe arg1 +%define pIv arg2 +%define pState arg3 +%define lane_mask arg4 +%define tag_sz r10 +%define tags r11 + + endbranch64 + + or tag_sz, tag_sz + jz init_for_cipher + + cmp tag_sz, 8 + je init_for_auth_tag_8B + jb init_for_auth_tag_4B + +init_for_auth_tag_16B: + FUNC_SAVE + + INIT_16_AVX512 pKe, pIv, pState, lane_mask, r12, r13, 256, 16, tags + + FUNC_RESTORE + + ret + +init_for_cipher: + FUNC_SAVE + + INIT_16_AVX512 pKe, pIv, pState, lane_mask, r12, r13, 256, 0, tags + + FUNC_RESTORE + + ret + +init_for_auth_tag_4B: + FUNC_SAVE + + INIT_16_AVX512 pKe, pIv, pState, lane_mask, r12, r13, 256, 4, tags + + FUNC_RESTORE + + ret + +init_for_auth_tag_8B: + FUNC_SAVE + + INIT_16_AVX512 pKe, pIv, pState, lane_mask, r12, r13, 256, 8, tags + + FUNC_RESTORE + + ret + +; +; Generate N*4 bytes of keystream +; for 16 buffers (where N is number of rounds) +; +%macro KEYGEN_16_AVX512 3-4 +%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds +%define %%KEY_OFF %2 ; [in] Offset to start writing Keystream +%define %%SKIP_ROUNDS %3 ; [constant] Number of rounds to skip (1, 2 or 4) +%define %%LANE_MASK %4 ; [in] Lane mask with lanes to generate all keystream words + +%define pState arg1 +%define pKS arg2 + +%define %%TMP1 r10 +%define %%TMP2 r12 +%define %%TMP3 r13 + +%define %%ZTMP1 zmm0 +%define %%ZTMP2 zmm1 +%define %%ZTMP3 zmm2 +%define %%ZTMP4 zmm3 +%define %%ZTMP5 zmm4 +%define %%ZTMP6 zmm5 +%define %%ZTMP7 zmm6 +%define %%ZTMP8 zmm7 +%define %%ZTMP9 zmm8 +%define %%ZTMP10 zmm9 +%define %%ZTMP11 zmm10 +%define %%ZTMP12 zmm11 +%define %%ZTMP13 zmm12 +%define %%ZTMP14 zmm13 +%define %%ZTMP15 zmm14 +%define %%ZTMP16 zmm15 + +%define %%KSTR1 zmm16 +%define %%KSTR2 zmm17 +%define %%KSTR3 zmm18 +%define %%KSTR4 zmm19 +%define %%KSTR5 zmm20 +%define %%KSTR6 zmm21 +%define %%KSTR7 zmm22 +%define %%KSTR8 zmm23 +%define %%KSTR9 zmm24 +%define %%KSTR10 zmm25 +%define %%KSTR11 zmm26 +%define %%KSTR12 zmm27 +%define %%KSTR13 zmm28 +%define %%KSTR14 zmm29 +%define %%KSTR15 zmm30 +%define %%KSTR16 zmm31 + +%define %%X0 %%ZTMP10 +%define %%X1 %%ZTMP11 +%define %%X2 %%ZTMP12 +%define %%W %%ZTMP13 +%define %%R1 %%ZTMP14 +%define %%R2 %%ZTMP15 +%define %%MASK31 %%ZTMP16 + +%define %%BLEND_KMASK k1 ; Mask to blend LFSRs 14&15 +%define %%FULL_LANE_KMASK k2 ; Mask with lanes to generate all keystream words +%define %%ALL_KMASK k3 ; Mask with all 1's +%define %%SKIP_LANE_KMASK k4 ; Mask with lanes to skip some keystream words +%define %%TMP_KMASK1 k5 +%define %%TMP_KMASK2 k6 + + ; Load read-only registers + vmovdqa64 %%MASK31, [rel mask31] + mov DWORD(%%TMP1), 0xAAAAAAAA + kmovd %%BLEND_KMASK, DWORD(%%TMP1) + +%if (%0 == 4) + kmovd %%FULL_LANE_KMASK, DWORD(%%LANE_MASK) + knotd %%SKIP_LANE_KMASK, %%FULL_LANE_KMASK + mov DWORD(%%TMP1), 0x0000FFFF + kmovd %%ALL_KMASK, DWORD(%%TMP1) +%else + mov DWORD(%%TMP1), 0x0000FFFF + kmovd %%FULL_LANE_KMASK, DWORD(%%TMP1) + kmovd %%ALL_KMASK, %%FULL_LANE_KMASK +%endif + + ; Read R1/R2 + vmovdqa32 %%R1, [pState + OFS_R1] + vmovdqa32 %%R2, [pState + OFS_R2] + + ; Store all 4 bytes of keystream in a single 64-byte buffer +%if (%%NUM_ROUNDS <= %%SKIP_ROUNDS) + BITS_REORG16 pState, 1, %%FULL_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%ZTMP7, %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2, %%KSTR1 + NONLIN_FUN16 pState, %%FULL_LANE_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%W + ; OFS_X3 XOR W + vpxorq %%KSTR1, %%W + LFSR_UPDT16 pState, 1, %%FULL_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%ZTMP7, work + vmovdqa32 [pState + OFS_R1]{%%FULL_LANE_KMASK}, %%R1 + vmovdqa32 [pState + OFS_R2]{%%FULL_LANE_KMASK}, %%R2 +%else ;; %%NUM_ROUNDS > %%SKIP_ROUNDS + ; Generate N*4B of keystream in N rounds + ; Generate first bytes of KS for all lanes +%assign %%N 1 +%assign %%IDX 1 +%rep (%%NUM_ROUNDS-%%SKIP_ROUNDS) + BITS_REORG16 pState, %%N, %%ALL_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%ZTMP7, %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2, APPEND(%%KSTR, %%IDX) + NONLIN_FUN16 pState, %%ALL_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%W + ; OFS_X3 XOR W + vpxorq APPEND(%%KSTR, %%IDX), %%W + LFSR_UPDT16 pState, %%N, %%ALL_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%ZTMP7, work +%assign %%N %%N+1 +%assign %%IDX (%%IDX + 1) +%endrep +%if (%%NUM_ROUNDS > %%SKIP_ROUNDS) + vmovdqa32 [pState + OFS_R1]{%%ALL_KMASK}, %%R1 + vmovdqa32 [pState + OFS_R2]{%%ALL_KMASK}, %%R2 +%endif + + ; Generate rest of the KS bytes (last 8 bytes) for selected lanes +%rep %%SKIP_ROUNDS + BITS_REORG16 pState, %%N, %%FULL_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%ZTMP7, %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2, APPEND(%%KSTR, %%IDX) + NONLIN_FUN16 pState, %%FULL_LANE_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%W + ; OFS_X3 XOR W + vpxorq APPEND(%%KSTR, %%IDX), %%W + LFSR_UPDT16 pState, %%N, %%FULL_LANE_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%ZTMP7, work +%assign %%N %%N+1 +%assign %%IDX (%%IDX + 1) +%endrep + vmovdqa32 [pState + OFS_R1]{%%FULL_LANE_KMASK}, %%R1 + vmovdqa32 [pState + OFS_R2]{%%FULL_LANE_KMASK}, %%R2 +%endif ;; (%%NUM_ROUNDS == 1) + + ; Perform a 32-bit 16x16 transpose to have up to 64 bytes + ; (NUM_ROUNDS * 4B) of each lane in a different register + TRANSPOSE16_U32_INTERLEAVED %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, %%KSTR5, %%KSTR6, %%KSTR7, %%KSTR8, \ + %%KSTR9, %%KSTR10, %%KSTR11, %%KSTR12, %%KSTR13, %%KSTR14, %%KSTR15, %%KSTR16, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%ZTMP7, %%ZTMP8, \ + %%ZTMP9, %%ZTMP10 + +%if (%0 == 4) + lea %%TMP1, [rel alignr_mask] +%if %%SKIP_ROUNDS == 1 + lea %%TMP2, [rel mov_4B_mask] +%elif %%SKIP_ROUNDS == 2 + lea %%TMP2, [rel mov_8B_mask] +%else ; %%SKIP_ROUNDS == 4 + lea %%TMP2, [rel mov_16B_mask] +%endif + STORE_KSTR16 pKS, %%ZTMP7, %%ZTMP5, %%KSTR13, %%KSTR1, %%ZTMP8, %%ZTMP6, %%KSTR14, %%KSTR2, \ + %%ZTMP3, %%ZTMP1, %%KSTR15, %%KSTR3, %%ZTMP4, %%ZTMP2, %%KSTR16, %%KSTR4, %%KEY_OFF, \ + %%LANE_MASK, %%TMP1, %%TMP2, %%TMP3, %%TMP_KMASK1, %%TMP_KMASK2, %%SKIP_ROUNDS +%else + STORE_KSTR16 pKS, %%ZTMP7, %%ZTMP5, %%KSTR13, %%KSTR1, %%ZTMP8, %%ZTMP6, %%KSTR14, %%KSTR2, \ + %%ZTMP3, %%ZTMP1, %%KSTR15, %%KSTR3, %%ZTMP4, %%ZTMP2, %%KSTR16, %%KSTR4, %%KEY_OFF +%endif + + ; Reorder LFSR registers +%if (%0 == 4) + REORDER_LFSR pState, %%NUM_ROUNDS, %%FULL_LANE_KMASK +%if (%%NUM_ROUNDS >= %%SKIP_ROUNDS) + REORDER_LFSR pState, (%%NUM_ROUNDS - %%SKIP_ROUNDS), %%SKIP_LANE_KMASK ; 1/2/4 less rounds for "old" buffers +%endif +%else + REORDER_LFSR pState, %%NUM_ROUNDS, %%FULL_LANE_KMASK +%endif + +%endmacro ; KEYGEN_16_AVX512 + +;; +;; Reverse bits of each byte of a XMM register +;; +%macro REVERSE_BITS 7 +%define %%DATA_IN %1 ; [in] Input data +%define %%DATA_OUT %2 ; [out] Output data +%define %%TABLE_L %3 ; [in] Table to shuffle low nibbles +%define %%TABLE_H %4 ; [in] Table to shuffle high nibbles +%define %%REV_AND_TABLE %5 ; [in] Mask to keep low nibble of each byte +%define %%XTMP1 %6 ; [clobbered] Temporary XMM register +%define %%XTMP2 %7 ; [clobbered] Temporary XMM register + + vpandq %%XTMP1, %%DATA_IN, %%REV_AND_TABLE + + vpandnq %%XTMP2, %%REV_AND_TABLE, %%DATA_IN + vpsrld %%XTMP2, 4 + + vpshufb %%DATA_OUT, %%TABLE_H, %%XTMP1 ; bit reverse low nibbles (use high table) + vpshufb %%XTMP2, %%TABLE_L, %%XTMP2 ; bit reverse high nibbles (use low table) + + vporq %%DATA_OUT, %%XTMP2 +%endmacro + +;; +;; Set up data and KS bytes and use PCLMUL to digest data, +;; then the result gets XOR'ed with the previous digest. +;; This macro can be used with XMM (for 1 buffer), +;; YMM (for 2 buffers) or ZMM registers (for 4 buffers). +;; To use it with YMM and ZMM registers, VPCMULQDQ must be +;; supported. +;; +%macro DIGEST_DATA 14-16 +%define %%DATA %1 ; [in] Input data (16 bytes) per buffer +%define %%KS_L %2 ; [in/clobbered] Lower 16 bytes of KS per buffer +%define %%KS_H %3 ; [in/clobbered] Higher 16 bytes of KS per buffer +%define %%KS_M1 %4 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%KS_M2 %5 ; [cloberred] Temporary XMM/YMM/ZMM register +%define %%IN_OUT %6 ; [in/out] Accumulated digest +%define %%KMASK %7 ; [in] Shuffle mask register +%define %%TMP1 %8 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%TMP2 %9 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%TMP3 %10 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%TMP4 %11 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%TMP5 %12 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%TMP6 %13 ; [clobbered] Temporary XMM/YMM/ZMM register +%define %%TAG_SIZE %14 ; [constant] Tag size (4, 8 or 16 bytes) +%define %%LANE_GROUP %15 ; [constant] Lane group (0-3) +%define %%IDX %16 ; [constant] Index inside lane group (0-3) + +%if %0 == 15 +%define %%IDX 0 +%endif + ;; Set up KS + ;; + ;; KS_L contains bytes 15:0 of KS (for 1, 2 or 4 buffers) + ;; KS_H contains bytes 31:16 of KS (for 1, 2 or 4 buffers) + ;; TMP1 to contain bytes in the following order [7:4 11:8 3:0 7:4] + ;; TMP2 to contain bytes in the following order [15:12 19:16 11:8 15:12] + vpalignr %%TMP1, %%KS_H, %%KS_L, 8 +%if %%TAG_SIZE != 4 ;; TAG_SIZE == 8 or 16 + vpshufd %%KS_M2, %%KS_H, 0x61 ; KS bits [191:160 159:128 223:192 191:160] +%endif +%if %%TAG_SIZE == 16 + vpshufd %%KS_H, %%KS_H, 0xBB ; KS bits [255:224 223:192 255:224 223:192] +%endif + vpshufd %%KS_L, %%KS_L, 0x61 + vpshufd %%KS_M1, %%TMP1, 0x61 + + ;; Set up DATA + ;; + ;; DATA contains 16 bytes of input data (for 1, 2 or 4 buffers) + ;; TMP3 to contain bytes in the following order [4*0's 7:4 4*0's 3:0] + ;; TMP3 to contain bytes in the following order [4*0's 15:12 4*0's 11:8] + vpshufd %%TMP1{%%KMASK}{z}, %%DATA, 0x10 + vpshufd %%TMP2{%%KMASK}{z}, %%DATA, 0x32 + + ;; PCMUL the KS's with the DATA + ;; XOR the results from 4 32-bit words together + vpclmulqdq %%TMP3, %%TMP1, %%KS_L, 0x00 + vpclmulqdq %%TMP4, %%TMP1, %%KS_L, 0x11 + vpclmulqdq %%TMP5, %%TMP2, %%KS_M1, 0x00 + vpclmulqdq %%TMP6, %%TMP2, %%KS_M1, 0x11 + vpternlogq %%TMP5, %%TMP3, %%TMP4, 0x96 +%if %%TAG_SIZE == 4 + vpternlogq %%IN_OUT, %%TMP5, %%TMP6, 0x96 +%endif ; %%TAG_SIZE == 4 +%if %%TAG_SIZE >= 8 + ; Move previous result to low 32 bits and XOR with previous digest +%if %0 > 14 + vpternlogq %%TMP5, %%TMP6, [rsp + 256*%%LANE_GROUP + %%IDX*16], 0x96 + vmovdqa64 [rsp + 256*%%LANE_GROUP + %%IDX*16], %%TMP5 +%else + vpxorq %%TMP5, %%TMP5, %%TMP6 + vpshufb %%TMP5, %%TMP5, [rel shuf_mask_0_0_0_dw1] + vpxorq %%IN_OUT, %%IN_OUT, %%TMP5 +%endif + + vpclmulqdq %%TMP3, %%TMP1, %%KS_L, 0x10 + vpclmulqdq %%TMP4, %%TMP1, %%KS_M1, 0x01 + vpclmulqdq %%TMP5, %%TMP2, %%KS_M1, 0x10 + vpclmulqdq %%TMP6, %%TMP2, %%KS_M2, 0x01 + + ; XOR all the products and keep only 32-63 bits + vpternlogq %%TMP5, %%TMP3, %%TMP4, 0x96 +%if %0 > 14 + vpternlogq %%TMP5, %%TMP6, [rsp + 256*%%LANE_GROUP + 64 + %%IDX*16], 0x96 + vmovdqa64 [rsp + 256*%%LANE_GROUP + 64 + %%IDX*16], %%TMP5 +%else + vpxorq %%TMP5, %%TMP5, %%TMP6 + vpandq %%TMP5, %%TMP5, [rel bits_32_63] + + ; XOR with bits 32-63 of previous digest + vpxorq %%IN_OUT, %%TMP5 +%endif +%if %%TAG_SIZE == 16 + ; Prepare data and calculate bits 95-64 of tag + vpclmulqdq %%TMP3, %%TMP1, %%KS_M1, 0x00 + vpclmulqdq %%TMP4, %%TMP1, %%KS_M1, 0x11 + vpclmulqdq %%TMP5, %%TMP2, %%KS_M2, 0x00 + vpclmulqdq %%TMP6, %%TMP2, %%KS_M2, 0x11 + + ; XOR all the products and move bits 63-32 to bits 95-64 + vpternlogq %%TMP5, %%TMP3, %%TMP4, 0x96 +%if %0 > 14 + vpternlogq %%TMP5, %%TMP6, [rsp + 256*%%LANE_GROUP + 64*2 + %%IDX*16], 0x96 + vmovdqa64 [rsp + 256*%%LANE_GROUP + 64*2 + %%IDX*16], %%TMP5 +%else + vpxorq %%TMP5, %%TMP5, %%TMP6 + vpshufb %%TMP5, %%TMP5, [rel shuf_mask_0_dw1_0_0] + + ; XOR with previous bits 64-95 of previous digest + vpxorq %%IN_OUT, %%TMP5 +%endif + + ; Prepare data and calculate bits 127-96 of tag + vpclmulqdq %%TMP3, %%TMP1, %%KS_M1, 0x10 + vpclmulqdq %%TMP4, %%TMP1, %%KS_M2, 0x01 + vpclmulqdq %%TMP5, %%TMP2, %%KS_M2, 0x10 + vpclmulqdq %%TMP6, %%TMP2, %%KS_H, 0x01 + + ; XOR all the products and move bits 63-32 to bits 127-96 + vpternlogq %%TMP5, %%TMP3, %%TMP4, 0x96 +%if %0 > 14 + vpternlogq %%TMP5, %%TMP6, [rsp + 256*%%LANE_GROUP + 64*3 + %%IDX*16], 0x96 + vmovdqa64 [rsp + 256*%%LANE_GROUP + 64*3 + %%IDX*16], %%TMP5 +%else + vpxorq %%TMP5, %%TMP5, %%TMP6 + vpshufb %%TMP5, %%TMP5, [rel shuf_mask_dw1_0_0_0] + + ; XOR with lower 96 bits, to construct 128 bits of tag + vpxorq %%IN_OUT, %%TMP5 +%endif + +%endif ; %%TAG_SIZE == 16 +%endif ; %%TAG_SIZE >= 8 +%endmacro + +%macro UPDATE_TAGS 13-14 +%define %%T %1 ; [in] Pointer to digests +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16 bytes) +%define %%ORDER_TAGS %3 ; [constant] Order of tags (order_0_4_8_12 or order_0_1_2_3) +%define %%TMP %4 ; [clobbered] Temporary GP register +%define %%PERM_DIGEST_KMASK1 %5 ; [clobbered] Permutation mask for digests +%define %%PERM_DIGEST_KMASK2 %6 ; [clobbered] Permulation mask for digests +%define %%DIGEST_0 %7 ; [in/clobbered] Digests for lanes 0,4,8,12 or 0,1,2,3 +%define %%DIGEST_1 %8 ; [in] Digests for lanes 1,5,9,13 or 4,5,6,7 +%define %%DIGEST_2 %9 ; [in/clobbered] Digests for lanes 2,6,10,14 or 8,9,10,11 +%define %%DIGEST_3 %10 ; [in] Digests for lanes 3,7,11,15 or 12,13,14,15 +%define %%ZTMP1 %11 ; [clobbered] Temporary ZMM register +%define %%ZTMP2 %12 ; [clobbered] Temporary ZMM register +%define %%ZTMP3 %13 ; [clobbered] Temporary ZMM register +%define %%ZTMP4 %14 ; [clobbered] Temporary ZMM register + +%if %%TAG_SIZE == 4 +%ifidn %%ORDER_TAGS, order_0_4_8_12 + mov DWORD(%%TMP), 0x3333 + kmovd %%PERM_DIGEST_KMASK1, DWORD(%%TMP) + kshiftld %%PERM_DIGEST_KMASK2, %%PERM_DIGEST_KMASK1, 2 + vmovdqa64 %%ZTMP2, [rel shuf_mask_4B_tags_0_4_8_12] + vmovdqa64 %%ZTMP3, [rel shuf_mask_4B_tags_0_4_8_12 + 64] +%else + mov DWORD(%%TMP), 0x00FF + kmovd %%PERM_DIGEST_KMASK1, DWORD(%%TMP) + kshiftld %%PERM_DIGEST_KMASK2, %%PERM_DIGEST_KMASK1, 8 + vmovdqa64 %%ZTMP2, [rel shuf_mask_4B_tags_0_1_2_3] + vmovdqa64 %%ZTMP3, [rel shuf_mask_4B_tags_0_1_2_3 + 64] +%endif + ; Get result tags for 16 buffers in different position in each lane + ; and blend these tags into an ZMM register. + ; Then, XOR the results with the previous tags and write out the result. + vpermt2d %%DIGEST_0{%%PERM_DIGEST_KMASK1}{z}, %%ZTMP2, %%DIGEST_1 + vpermt2d %%DIGEST_2{%%PERM_DIGEST_KMASK2}{z}, %%ZTMP3, %%DIGEST_3 + vpternlogq %%DIGEST_0, %%DIGEST_2, [%%T], 0x96 ; A XOR B XOR C + vmovdqu64 [%%T], %%DIGEST_0 + +%elif %%TAG_SIZE == 8 +%ifidn %%ORDER_TAGS, order_0_4_8_12 + mov DWORD(%%TMP), 0x33 + kmovd %%PERM_DIGEST_KMASK1, DWORD(%%TMP) + kshiftld %%PERM_DIGEST_KMASK2, %%PERM_DIGEST_KMASK1, 2 + + vmovdqa64 %%ZTMP1, [rel shuf_mask_8B_tags_0_1_4_5] + vmovdqa64 %%ZTMP2, [rel shuf_mask_8B_tags_2_3_6_7] + vmovdqa64 %%ZTMP3, [rel shuf_mask_8B_tags_8_9_12_13] + vmovdqa64 %%ZTMP4, [rel shuf_mask_8B_tags_10_11_14_15] + + ; Get result tags for 16 buffers in different positions in each lane + ; and blend these tags into two ZMM registers + ; Then, XOR the results with the previous tags and write out the result. + + vpermi2q %%ZTMP1{%%PERM_DIGEST_KMASK1}{z}, %%DIGEST_0, %%DIGEST_1 + vpermi2q %%ZTMP2{%%PERM_DIGEST_KMASK2}{z}, %%DIGEST_2, %%DIGEST_3 + vpermi2q %%ZTMP3{%%PERM_DIGEST_KMASK1}{z}, %%DIGEST_0, %%DIGEST_1 + vpermi2q %%ZTMP4{%%PERM_DIGEST_KMASK2}{z}, %%DIGEST_2, %%DIGEST_3 + + vpternlogq %%ZTMP1, %%ZTMP2, [%%T], 0x96 ; A XOR B XOR C + vpternlogq %%ZTMP3, %%ZTMP4, [%%T + 64], 0x96 ; A XOR B XOR C + +%else ; %%ORDER_TAGS == order_0_1_2_3 + vmovdqa64 %%ZTMP3, [rel shuf_mask_8B_tags] + ; Get result tags for 16 buffers in different position in each lane + ; and blend these tags into an ZMM register. + ; Then, XOR the results with the previous tags and write out the result. + vpermt2q %%DIGEST_0, %%ZTMP3, %%DIGEST_1 + vpermt2q %%DIGEST_2, %%ZTMP3, %%DIGEST_3 + vpxorq %%ZTMP1, %%DIGEST_0, [%%T] + vpxorq %%ZTMP3, %%DIGEST_2, [%%T + 64] +%endif + vmovdqu64 [%%T], %%ZTMP1 + vmovdqu64 [%%T + 64], %%ZTMP3 +%else ;; %%TAG_SIZE == 16 +%ifidn %%ORDER_TAGS, order_0_4_8_12 + ; Get result tags for 16 buffers in different positions in each lane + ; from 0,4,8,12 to 0,1,2,3 + ; Then, XOR the results with the previous tags and write out the result. + + TRANSPOSE4_U128_INPLACE %%DIGEST_0, %%DIGEST_1, %%DIGEST_2, %%DIGEST_3, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 + +%endif + + ; XOR with previous tags and store + vpxorq %%DIGEST_0, [%%T] + vpxorq %%DIGEST_1, [%%T + 64] + vpxorq %%DIGEST_2, [%%T + 64*2] + vpxorq %%DIGEST_3, [%%T + 64*3] + vmovdqa64 [%%T], %%DIGEST_0 + vmovdqa64 [%%T + 64], %%DIGEST_1 + vmovdqa64 [%%T + 64*2], %%DIGEST_2 + vmovdqa64 [%%T + 64*3], %%DIGEST_3 +%endif ; %%TAG_SIZE +%endmacro +; +; Generate 64 bytes of keystream +; for 16 buffers and authenticate 64 bytes of data +; +%macro ZUC_EIA3_16_64B_AVX512 7 +%define %%STATE %1 ; [in] ZUC state +%define %%KS %2 ; [in] Pointer to keystream (128x16 bytes) +%define %%T %3 ; [in] Pointer to digests +%define %%DATA %4 ; [in] Pointer to array of pointers to data buffers +%define %%LEN %5 ; [in] Pointer to array of remaining length to digest +%define %%NROUNDS %6 ; [in/clobbered] Number of rounds of 64 bytes of data to digest +%define %%TAG_SIZE %7 ; [in] Tag size (4 or 8 bytes) + +%define %%TMP r12 +%define %%DATA_ADDR0 rbx +%define %%DATA_ADDR1 r12 +%define %%DATA_ADDR2 r13 +%define %%DATA_ADDR3 r14 +%define %%OFFSET r15 + +%define %%DIGEST_0 zmm28 +%define %%DIGEST_1 zmm29 +%define %%DIGEST_2 zmm30 +%define %%DIGEST_3 zmm31 + +%define %%ZTMP1 zmm1 +%define %%ZTMP2 zmm2 +%define %%ZTMP3 zmm3 +%define %%ZTMP4 zmm4 +%define %%ZTMP5 zmm5 +%define %%ZTMP6 zmm6 +%define %%ZTMP7 zmm7 +%define %%ZTMP8 zmm8 +%define %%ZTMP9 zmm9 +%define %%ZTMP10 zmm0 + +%define %%ZKS_L %%ZTMP9 +%define %%ZKS_H zmm21 + +%define %%XTMP1 xmm1 +%define %%XTMP2 xmm2 +%define %%XTMP3 xmm3 +%define %%XTMP4 xmm4 +%define %%XTMP5 xmm5 +%define %%XTMP6 xmm6 +%define %%XTMP7 xmm7 +%define %%XTMP8 xmm8 +%define %%XTMP9 xmm9 +%define %%XTMP10 xmm0 +%define %%KS_L %%XTMP9 +%define %%KS_H xmm15 +%define %%XDIGEST_0 xmm13 +%define %%XDIGEST_1 xmm14 +%define %%XDIGEST_2 xmm19 +%define %%XDIGEST_3 xmm20 +%define %%Z_TEMP_DIGEST zmm21 +%define %%REV_TABLE_L xmm16 +%define %%REV_TABLE_H xmm17 +%define %%REV_AND_TABLE xmm18 + +; Defines used in KEYGEN +%define %%MASK31 zmm0 + +%define %%X0 zmm10 +%define %%X1 zmm11 +%define %%X2 zmm12 +%define %%R1 zmm22 +%define %%R2 zmm23 + +%define %%KS_0 zmm24 +%define %%KS_1 zmm25 +%define %%KS_2 zmm26 +%define %%KS_3 zmm27 + +%define %%BLEND_KMASK k1 ; Mask to blend LFSRs 14&15 +%define %%ALL_KMASK k2 ; Mask with all 1's +%define %%SHUF_DATA_KMASK k3 ; Mask to shuffle data +%define %%TMP_KMASK1 k4 +%define %%TMP_KMASK2 k5 + +%if %%TAG_SIZE != 4 + mov %%TMP, rsp + ; Reserve stack space to store temporary digest products + sub rsp, STACK_SPACE + and rsp, ~63 + mov [rsp + _RSP], %%TMP + + vpxorq %%ZTMP1, %%ZTMP1 +%assign %%I 0 +%rep 16 + vmovdqa64 [rsp + 64*%%I], %%ZTMP1 +%assign %%I (%%I + 1) +%endrep +%endif + + xor %%OFFSET, %%OFFSET + + mov DWORD(%%TMP), 0xAAAAAAAA + kmovd %%BLEND_KMASK, DWORD(%%TMP) + + mov DWORD(%%TMP), 0x0000FFFF + kmovd %%ALL_KMASK, DWORD(%%TMP) + + mov DWORD(%%TMP), 0x55555555 + kmovd %%SHUF_DATA_KMASK, DWORD(%%TMP) + +%if %%TAG_SIZE == 4 + vpxorq %%DIGEST_0, %%DIGEST_0 + vpxorq %%DIGEST_1, %%DIGEST_1 + vpxorq %%DIGEST_2, %%DIGEST_2 + vpxorq %%DIGEST_3, %%DIGEST_3 +%endif + +%if USE_GFNI_VAES_VPCLMUL == 0 + vmovdqa64 %%REV_TABLE_L, [rel bit_reverse_table_l] + vmovdqa64 %%REV_TABLE_H, [rel bit_reverse_table_h] + vmovdqa64 %%REV_AND_TABLE, [rel bit_reverse_and_table] +%endif + + ; Read R1/R2 + vmovdqa32 %%R1, [%%STATE + OFS_R1] + vmovdqa32 %%R2, [%%STATE + OFS_R2] + + ;; + ;; Generate keystream and digest 64 bytes on each iteration + ;; +%%_loop: + ;; Generate 64B of keystream in 16 (4x4) rounds + ;; N goes from 1 to 16, within two nested reps of 4 iterations + ;; The outer "rep" loop iterates through 4 groups of lanes (4 buffers each), + ;; the inner "rep" loop iterates through the data for each group: + ;; each iteration digests 16 bytes of data (in case of having VPCLMUL + ;; data from the 4 buffers is digested in one go (using ZMM registers), otherwise, + ;; data is digested in 4 iterations (using XMM registers) +%assign %%N 1 +%assign %%LANE_GROUP 0 +%rep 4 + mov %%DATA_ADDR0, [%%DATA + %%LANE_GROUP*8 + 0*32] + mov %%DATA_ADDR1, [%%DATA + %%LANE_GROUP*8 + 1*32] + mov %%DATA_ADDR2, [%%DATA + %%LANE_GROUP*8 + 2*32] + mov %%DATA_ADDR3, [%%DATA + %%LANE_GROUP*8 + 3*32] + +%assign %%idx 0 +%rep 4 + ; Load read-only registers + vmovdqa64 %%MASK31, [rel mask31] + + BITS_REORG16 %%STATE, %%N, %%ALL_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%ZTMP7, %%ZTMP8, %%ZTMP9, %%BLEND_KMASK, %%X0, %%X1, %%X2, APPEND(%%KS_, %%idx) + NONLIN_FUN16 %%STATE, %%ALL_KMASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, %%ZTMP7 + ; OFS_X3 XOR W (%%ZTMP7) + vpxorq APPEND(%%KS_, %%idx), %%ZTMP7 + LFSR_UPDT16 %%STATE, %%N, %%ALL_KMASK, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%MASK31, %%ZTMP7, work + + ;; Transpose and store KS every 16 bytes +%if %%idx == 3 + TRANSPOSE4_U32_INTERLEAVED %%KS_0, %%KS_1, %%KS_2, %%KS_3, %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 + + STORE_KSTR4 %%KS, %%KS_0, %%KS_1, %%KS_2, %%KS_3, 64, %%LANE_GROUP +%endif + + ;; Digest next 16 bytes of data for 4 buffers +%if USE_GFNI_VAES_VPCLMUL == 1 + ;; If VPCMUL is available, read chunks of 16x4 bytes of data + ;; and digest them with 24x4 bytes of KS, then XOR their digest + ;; with previous digest (with DIGEST_DATA) + + ; Read 4 blocks of 16 bytes of data and put them in a register + vmovdqu64 %%XTMP1, [%%DATA_ADDR0 + 16*%%idx + %%OFFSET] + vinserti32x4 %%ZTMP1, [%%DATA_ADDR1 + 16*%%idx + %%OFFSET], 1 + vinserti32x4 %%ZTMP1, [%%DATA_ADDR2 + 16*%%idx + %%OFFSET], 2 + vinserti32x4 %%ZTMP1, [%%DATA_ADDR3 + 16*%%idx + %%OFFSET], 3 + + ; Read 8 blocks of 16 bytes of KS + vmovdqa64 %%ZKS_L, [GET_KS(%%KS, %%LANE_GROUP, %%idx, 0)] + vmovdqa64 %%ZKS_H, [GET_KS(%%KS, %%LANE_GROUP, (%%idx + 1), 0)] + + ; Reverse bits of next 16 bytes from all 4 buffers + vgf2p8affineqb %%ZTMP7, %%ZTMP1, [rel bit_reverse_table], 0x00 + + ; Digest 16 bytes of data with 24 bytes of KS, for 4 buffers + DIGEST_DATA %%ZTMP7, %%ZKS_L, %%ZKS_H, %%ZTMP8, %%ZTMP10, \ + APPEND(%%DIGEST_, %%LANE_GROUP), %%SHUF_DATA_KMASK, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, %%ZTMP6, \ + %%TAG_SIZE, %%LANE_GROUP + +%else ; USE_GFNI_VAES_VPCLMUL == 1 + ;; If VPCMUL is NOT available, read chunks of 16 bytes of data + ;; and digest them with 24 bytes of KS, and repeat this for 4 different buffers + ;; then insert these digests into a ZMM register and XOR with previous digest + +%assign %%J 0 +%rep 4 +%if %%TAG_SIZE == 4 +%if %%idx == 0 + ; Reset temporary digests (for the first 16 bytes) + vpxorq APPEND(%%XDIGEST_, %%J), APPEND(%%XDIGEST_, %%J) +%endif +%endif + ; Read the next 2 blocks of 16 bytes of KS + vmovdqa64 %%KS_L, [GET_KS(%%KS, %%LANE_GROUP, %%idx, %%J)] + vmovdqa64 %%KS_H, [GET_KS(%%KS, %%LANE_GROUP, (%%idx + 1), %%J)] + + ;; read 16 bytes and reverse bits + vmovdqu64 %%XTMP1, [APPEND(%%DATA_ADDR, %%J) + %%idx*16 + %%OFFSET] + REVERSE_BITS %%XTMP1, %%XTMP7, %%REV_TABLE_L, %%REV_TABLE_H, \ + %%REV_AND_TABLE, %%XTMP2, %%XTMP3 + + ; Digest 16 bytes of data with 24 bytes of KS, for one buffer + DIGEST_DATA %%XTMP7, %%KS_L, %%KS_H, %%XTMP8, %%XTMP10, \ + APPEND(%%XDIGEST_, %%J), %%SHUF_DATA_KMASK, \ + %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, %%TAG_SIZE, \ + %%LANE_GROUP, %%J + + ; Once all 64 bytes of data have been digested, insert them in temporary ZMM register +%if %%TAG_SIZE == 4 +%if %%idx == 3 + vinserti32x4 %%Z_TEMP_DIGEST, APPEND(%%XDIGEST_, %%J), %%J +%endif +%endif +%assign %%J (%%J + 1) +%endrep ; %rep 4 %%J + + ; XOR with previous digest +%if %%TAG_SIZE == 4 +%if %%idx == 3 + vpxorq APPEND(%%DIGEST_, %%LANE_GROUP), %%Z_TEMP_DIGEST +%endif +%endif +%endif ;; USE_GFNI_VAES_VPCLMUL == 0 +%assign %%idx (%%idx + 1) +%assign %%N %%N+1 +%endrep ; %rep 4 %%idx + +%assign %%LANE_GROUP (%%LANE_GROUP + 1) +%endrep ; %rep 4 %%LANE_GROUP + +%assign %%LANE_GROUP 0 +%rep 4 + ; Memcpy KS 64-127 bytes to 0-63 bytes + vmovdqa64 %%ZTMP3, [%%KS + %%LANE_GROUP*512 + 64*4] + vmovdqa64 %%ZTMP4, [%%KS + %%LANE_GROUP*512 + 64*5] + vmovdqa64 %%ZTMP5, [%%KS + %%LANE_GROUP*512 + 64*6] + vmovdqa64 %%ZTMP6, [%%KS + %%LANE_GROUP*512 + 64*7] + vmovdqa64 [%%KS + %%LANE_GROUP*512], %%ZTMP3 + vmovdqa64 [%%KS + %%LANE_GROUP*512 + 64], %%ZTMP4 + vmovdqa64 [%%KS + %%LANE_GROUP*512 + 64*2], %%ZTMP5 + vmovdqa64 [%%KS + %%LANE_GROUP*512 + 64*3], %%ZTMP6 +%assign %%LANE_GROUP (%%LANE_GROUP + 1) +%endrep ; %rep 4 %%LANE_GROUP + + add %%OFFSET, 64 + + dec %%NROUNDS + jnz %%_loop + + ; Read from stack to extract the products and arrange them to XOR later + ; against previous digests (only for 8-byte and 16-byte tag) +%if %%TAG_SIZE != 4 +%assign %%I 0 +%rep 4 + vmovdqa64 %%ZTMP1, [rsp + %%I*256] + vmovdqa64 %%ZTMP2, [rsp + %%I*256 + 64] + vpshufb %%ZTMP1, %%ZTMP1, [rel shuf_mask_0_0_0_dw1] + vpandq %%ZTMP2, %%ZTMP2, [rel bits_32_63] +%if %%TAG_SIZE == 16 + vmovdqa64 %%ZTMP3, [rsp + %%I*256 + 64*2] + vmovdqa64 %%ZTMP4, [rsp + %%I*256 + 64*3] + vpshufb %%ZTMP3, %%ZTMP3, [rel shuf_mask_0_dw1_0_0] + vpshufb %%ZTMP4, %%ZTMP4, [rel shuf_mask_dw1_0_0_0] + vpternlogq %%ZTMP1, %%ZTMP2, %%ZTMP3, 0x96 + vpxorq APPEND(%%DIGEST_, %%I), %%ZTMP1, %%ZTMP4 +%else ; %%TAG_SIZE == 8 + vpxorq APPEND(%%DIGEST_, %%I), %%ZTMP1, %%ZTMP2 +%endif +%assign %%I (%%I + 1) +%endrep +%endif ; %%TAG_SIZE != 4 + + UPDATE_TAGS %%T, %%TAG_SIZE, order_0_4_8_12, %%TMP, %%TMP_KMASK1, %%TMP_KMASK2, \ + %%DIGEST_0, %%DIGEST_1, %%DIGEST_2, %%DIGEST_3, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 + + ; Update R1/R2 + vmovdqa64 [%%STATE + OFS_R1], %%R1 + vmovdqa64 [%%STATE + OFS_R2], %%R2 + + ; Update data pointers + vmovdqu64 %%ZTMP1, [%%DATA] + vmovdqu64 %%ZTMP2, [%%DATA + 64] + vpbroadcastq %%ZTMP3, %%OFFSET + vpaddq %%ZTMP1, %%ZTMP3 + vpaddq %%ZTMP2, %%ZTMP3 + vmovdqu64 [%%DATA], %%ZTMP1 + vmovdqu64 [%%DATA + 64], %%ZTMP2 + + ; Update array of lengths (if lane is valid, so length < UINT16_MAX) + vmovdqa64 YWORD(%%ZTMP2), [%%LEN] + vpcmpw %%TMP_KMASK1, YWORD(%%ZTMP2), [rel all_ffs], 4 ; valid lanes + shl %%OFFSET, 3 ; Convert to bits + vpbroadcastw YWORD(%%ZTMP1), DWORD(%%OFFSET) + vpsubw YWORD(%%ZTMP2){%%TMP_KMASK1}, YWORD(%%ZTMP1) + vmovdqa64 [%%LEN], YWORD(%%ZTMP2) + +%if %%TAG_SIZE != 4 +%ifdef SAFE_DATA + vpxorq %%ZTMP1, %%ZTMP1 +%assign %%I 0 +%rep 16 + vmovdqa64 [rsp + %%I*64], %%ZTMP1 +%assign %%I (%%I + 1) +%endrep +%endif + + mov rsp, [rsp + _RSP] +%endif +%endmacro + +;; +;; void asm_ZucGenKeystream64B_16_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off) +;; +MKGLOBAL(ZUC_KEYGEN64B_16,function,internal) +ZUC_KEYGEN64B_16: + endbranch64 + + FUNC_SAVE + + KEYGEN_16_AVX512 16, arg3, 0 + + FUNC_RESTORE + + ret +;; +;; void asm_Eia3_Nx64B_AVX512_16(ZucState16_t *pState, +;; uint32_t *pKeyStr, +;; uint32_t *T, +;; const void **data, +;; uint16_t *len, +;; const uint64_t numRounds, +;; const uint64_t tag_size); +MKGLOBAL(ZUC_EIA3_N64B,function,internal) +ZUC_EIA3_N64B: +%define STATE arg1 +%define KS arg2 +%define T arg3 +%define DATA arg4 + +%ifdef LINUX +%define LEN arg5 +%define NROUNDS arg6 +%else +%define LEN r10 +%define NROUNDS r11 +%endif +%define TAG_SIZE arg7 + + endbranch64 + +%ifndef LINUX + mov LEN, arg5 + mov NROUNDS, arg6 +%endif + + cmp TAG_SIZE, 8 + je Eia3_N64B_tag_8B + ja Eia3_N64B_tag_16B + + ; Fall-through for 4 bytes +Eia3_N64B_tag_4B: + FUNC_SAVE + + ZUC_EIA3_16_64B_AVX512 STATE, KS, T, DATA, LEN, NROUNDS, 4 + + FUNC_RESTORE + + ret + +Eia3_N64B_tag_8B: + FUNC_SAVE + + ZUC_EIA3_16_64B_AVX512 STATE, KS, T, DATA, LEN, NROUNDS, 8 + + FUNC_RESTORE + + ret + +Eia3_N64B_tag_16B: + FUNC_SAVE + + ZUC_EIA3_16_64B_AVX512 STATE, KS, T, DATA, LEN, NROUNDS, 16 + + FUNC_RESTORE + + ret + +; +;; void asm_ZucGenKeystream64B_16_skip16_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u16 lane_mask) +;; +MKGLOBAL(ZUC_KEYGEN64B_SKIP16_16,function,internal) +ZUC_KEYGEN64B_SKIP16_16: + endbranch64 + + FUNC_SAVE + + KEYGEN_16_AVX512 16, arg3, 4, arg4 + + FUNC_RESTORE + + ret + +; +;; void asm_ZucGenKeystream64B_16_skip8_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u16 lane_mask) +;; +MKGLOBAL(ZUC_KEYGEN64B_SKIP8_16,function,internal) +ZUC_KEYGEN64B_SKIP8_16: + endbranch64 + + FUNC_SAVE + + KEYGEN_16_AVX512 16, arg3, 2, arg4 + + FUNC_RESTORE + + ret + +;; void asm_ZucGenKeystream64B_16_skip4_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u16 lane_mask) +;; +MKGLOBAL(ZUC_KEYGEN64B_SKIP4_16,function,internal) +ZUC_KEYGEN64B_SKIP4_16: + endbranch64 + + FUNC_SAVE + + KEYGEN_16_AVX512 16, arg3, 1, arg4 + + FUNC_RESTORE + + ret + +;; +;; void asm_ZucGenKeystream8B_16_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off) +;; +MKGLOBAL(ZUC_KEYGEN8B_16,function,internal) +ZUC_KEYGEN8B_16: + endbranch64 + + FUNC_SAVE + + KEYGEN_16_AVX512 2, arg3, 0 + + FUNC_RESTORE + + ret + +%macro KEYGEN_VAR_16_AVX512 3-4 +%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds (GP dowrd register) +%define %%KEY_OFF %2 ; [in] Offset to start writing Keystream +%define %%SKIP_ROUNDS %3 ; [constant] Number of rounds to skip (1, 2 or 4) +%define %%LANE_MASK %4 ; [in] Lane mask with lanes to generate full keystream (rest 1-2 words less) + + cmp %%NUM_ROUNDS, 16 + je %%_num_rounds_is_16 + cmp %%NUM_ROUNDS, 8 + je %%_num_rounds_is_8 + jb %%_rounds_is_1_7 + + ; Final blocks 9-16 + cmp %%NUM_ROUNDS, 12 + je %%_num_rounds_is_12 + jb %%_rounds_is_9_11 + + ; Final blocks 13-15 + cmp %%NUM_ROUNDS, 14 + je %%_num_rounds_is_14 + ja %%_num_rounds_is_15 + jb %%_num_rounds_is_13 + +%%_rounds_is_9_11: + cmp %%NUM_ROUNDS, 10 + je %%_num_rounds_is_10 + ja %%_num_rounds_is_11 + jb %%_num_rounds_is_9 + +%%_rounds_is_1_7: + cmp %%NUM_ROUNDS, 4 + je %%_num_rounds_is_4 + jb %%_rounds_is_1_3 + + ; Final blocks 5-7 + cmp %%NUM_ROUNDS, 6 + je %%_num_rounds_is_6 + ja %%_num_rounds_is_7 + jb %%_num_rounds_is_5 + +%%_rounds_is_1_3: + cmp %%NUM_ROUNDS, 2 + je %%_num_rounds_is_2 + ja %%_num_rounds_is_3 + + ; Rounds = 1 if fall-through +%assign %%I 1 +%rep 16 +APPEND(%%_num_rounds_is_,%%I): +%if (%0 == 4) + KEYGEN_16_AVX512 %%I, %%KEY_OFF, %%SKIP_ROUNDS, %%LANE_MASK +%else + KEYGEN_16_AVX512 %%I, %%KEY_OFF, 0 +%endif + jmp %%_done + +%assign %%I (%%I + 1) +%endrep + +%%_done: +%endmacro + +;; +;; void asm_ZucGenKeystream_16_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u32 numRounds) +;; +MKGLOBAL(ZUC_KEYGEN_16,function,internal) +ZUC_KEYGEN_16: + endbranch64 + + FUNC_SAVE + + KEYGEN_VAR_16_AVX512 arg4, arg3, 0 + + FUNC_RESTORE + + ret + +;; +;; void asm_ZucGenKeystream_16_skip16_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u16 lane_mask, +;; u32 numRounds) +;; +MKGLOBAL(ZUC_KEYGEN_SKIP16_16,function,internal) +ZUC_KEYGEN_SKIP16_16: + endbranch64 + + mov r10, arg5 + + FUNC_SAVE + + KEYGEN_VAR_16_AVX512 r10d, arg3, 4, arg4 + + FUNC_RESTORE + + ret +;; +;; void asm_ZucGenKeystream_16_skip8_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u16 lane_mask, +;; u32 numRounds) +;; +MKGLOBAL(ZUC_KEYGEN_SKIP8_16,function,internal) +ZUC_KEYGEN_SKIP8_16: + endbranch64 + + mov r10, arg5 + + FUNC_SAVE + + KEYGEN_VAR_16_AVX512 r10d, arg3, 2, arg4 + + FUNC_RESTORE + + ret + +;; +;; void asm_ZucGenKeystream_16_skip4_avx512(state16_t *pSta, u32* pKeyStr[16], +;; const u32 key_off, +;; const u16 lane_mask, +;; u32 numRounds) +;; +MKGLOBAL(ZUC_KEYGEN_SKIP4_16,function,internal) +ZUC_KEYGEN_SKIP4_16: + endbranch64 + + mov r10, arg5 + + FUNC_SAVE + + KEYGEN_VAR_16_AVX512 r10d, arg3, 1, arg4 + + FUNC_RESTORE + + ret + +;; +;; Encrypts up to 64 bytes of data +;; +;; 1 - Reads R1 & R2 +;; 2 - Generates up to 64 bytes of keystream (16 rounds of 4 bytes) +;; 3 - Writes R1 & R2 +;; 4 - Transposes the registers containing chunks of 4 bytes of KS for each buffer +;; 5 - ZMM16-31 will contain 64 bytes of KS for each buffer +;; 6 - Reads 64 bytes of data for each buffer, XOR with KS and writes the ciphertext +;; +%macro CIPHER64B 12 +%define %%NROUNDS %1 +%define %%BYTE_MASK %2 +%define %%LANE_MASK %3 +%define %%OFFSET %4 +%define %%LAST_ROUND %5 +%define %%MASK_31 %6 +%define %%X0 %7 +%define %%X1 %8 +%define %%X2 %9 +%define %%W %10 +%define %%R1 %11 +%define %%R2 %12 + + ; Read R1/R2 + vmovdqa32 %%R1, [rax + OFS_R1] + vmovdqa32 %%R2, [rax + OFS_R2] + + ; Generate N*4B of keystream in N rounds +%assign N 1 +%assign idx 16 +%rep %%NROUNDS + BITS_REORG16 rax, N, %%LANE_MASK, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, \ + zmm7, zmm8, zmm9, k1, %%X0, %%X1, %%X2, APPEND(zmm, idx) + NONLIN_FUN16 rax, %%LANE_MASK, %%X0, %%X1, %%X2, %%R1, %%R2, \ + zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7 + ; OFS_X3 XOR W (zmm7) + vpxorq APPEND(zmm, idx), zmm7 + ; Shuffle bytes within KS words to XOR with plaintext later + vpshufb APPEND(zmm, idx), [rel swap_mask] + LFSR_UPDT16 rax, N, %%LANE_MASK, zmm1, zmm2, zmm3, zmm4, zmm5, \ + zmm6, %%MASK_31, zmm7, work +%assign N (N + 1) +%assign idx (idx + 1) +%endrep + vmovdqa32 [rax + OFS_R1]{%%LANE_MASK}, %%R1 + vmovdqa32 [rax + OFS_R2]{%%LANE_MASK}, %%R2 + + ; ZMM16-31 contain the keystreams for each round + ; Perform a 32-bit 16x16 transpose to have the 64 bytes + ; of each lane in a different register + TRANSPOSE16_U32 zmm16, zmm17, zmm18, zmm19, zmm20, zmm21, zmm22, zmm23, \ + zmm24, zmm25, zmm26, zmm27, zmm28, zmm29, zmm30, zmm31, \ + zmm0, zmm1, zmm2, zmm3, zmm4, zmm5, zmm6, zmm7, \ + zmm8, zmm9, zmm10, zmm11, zmm12, zmm13 + + ;; XOR Input buffer with keystream +%if %%LAST_ROUND == 1 + lea rbx, [rel byte64_len_to_mask_table] +%endif + ;; Read all 16 streams using registers r12-15 into registers zmm0-15 +%assign i 0 +%assign j 0 +%assign k 12 +%rep 16 +%if %%LAST_ROUND == 1 + ;; Read number of bytes left to encrypt for the lane stored in stack + ;; and construct byte mask to read from input pointer + movzx r12d, word [rsp + j*2] + kmovq %%BYTE_MASK, [rbx + r12*8] +%endif + mov APPEND(r, k), [pIn + i] + vmovdqu8 APPEND(zmm, j){%%BYTE_MASK}{z}, [APPEND(r, k) + %%OFFSET] +%assign k 12 + ((j + 1) % 4) +%assign j (j + 1) +%assign i (i + 8) +%endrep + + ;; XOR Input (zmm0-15) with Keystreams (zmm16-31) +%assign i 0 +%assign j 16 +%rep 16 + vpxorq zmm %+j, zmm %+i +%assign i (i + 1) +%assign j (j + 1) +%endrep + + ;; Write output for all 16 buffers (zmm16-31) using registers r12-15 +%assign i 0 +%assign j 16 +%assign k 12 +%rep 16 +%if %%LAST_ROUND == 1 + ;; Read length to encrypt for the lane stored in stack + ;; and construct byte mask to write to output pointer + movzx r12d, word [rsp + (j-16)*2] + kmovq %%BYTE_MASK, [rbx + r12*8] +%endif + mov APPEND(r, k), [pOut + i] + vmovdqu8 [APPEND(r, k) + %%OFFSET]{%%BYTE_MASK}, APPEND(zmm, j) +%assign k 12 + ((j + 1) % 4) +%assign j (j + 1) +%assign i (i + 8) +%endrep + +%endmacro + +;; +;; void asm_ZucCipher_16_avx512(state16_t *pSta, u64 *pIn[16], +;; u64 *pOut[16], u16 lengths[16], +;; u64 min_length); +MKGLOBAL(CIPHER_16,function,internal) +CIPHER_16: + +%define pState arg1 +%define pIn arg2 +%define pOut arg3 +%define lengths arg4 + +%define min_length r10 +%define buf_idx r11 + + mov min_length, arg5 + + FUNC_SAVE + + ; Convert all lengths set to UINT16_MAX (indicating that lane is not valid) to min length + vpbroadcastw ymm0, min_length + vmovdqa ymm1, [lengths] + vpcmpw k1, ymm1, [rel all_ffs], 0 + vmovdqu16 ymm1{k1}, ymm0 ; YMM1 contain updated lengths + + ; Round up to nearest multiple of 4 bytes + vpaddw ymm0, [rel all_threes] + vpandq ymm0, [rel all_fffcs] + + ; Calculate remaining bytes to encrypt after function call + vpsubw ymm2, ymm1, ymm0 + vpxorq ymm3, ymm3 + vpcmpw k1, ymm2, ymm3, 1 ; Get mask of lengths < 0 + ; Set to zero the lengths of the lanes which are going to be completed + vmovdqu16 ymm2{k1}, ymm3 ; YMM2 contain final lengths + vmovdqa [lengths], ymm2 ; Update in memory the final updated lengths + + ; Calculate number of bytes to encrypt after round of 64 bytes (up to 63 bytes), + ; for each lane, and store it in stack to be used in the last round + vpsubw ymm1, ymm2 ; Bytes to encrypt in all lanes + vpandq ymm1, [rel all_3fs] ; Number of final bytes (up to 63 bytes) for each lane + sub rsp, 32 + vmovdqu [rsp], ymm1 + + ; Load state pointer in RAX + mov rax, pState + + ; Load read-only registers + mov r12d, 0xAAAAAAAA + kmovd k1, r12d + mov r12, 0xFFFFFFFFFFFFFFFF + kmovq k2, r12 + mov r12d, 0x0000FFFF + kmovd k3, r12d + + xor buf_idx, buf_idx + + ;; Perform rounds of 64 bytes, where LFSR reordering is not needed +loop: + cmp min_length, 64 + jl exit_loop + + vmovdqa64 zmm0, [rel mask31] + + CIPHER64B 16, k2, k3, buf_idx, 0, zmm0, zmm10, zmm11, zmm12, zmm13, zmm14, zmm15 + + sub min_length, 64 + add buf_idx, 64 + jmp loop + +exit_loop: + + mov r15, min_length + add r15, 3 + shr r15, 2 ;; numbers of rounds left (round up length to nearest multiple of 4B) + jz _no_final_rounds + + vmovdqa64 zmm0, [rel mask31] + + cmp r15, 8 + je _num_final_rounds_is_8 + jl _final_rounds_is_1_7 + + ; Final blocks 9-16 + cmp r15, 12 + je _num_final_rounds_is_12 + jl _final_rounds_is_9_11 + + ; Final blocks 13-16 + cmp r15, 16 + je _num_final_rounds_is_16 + cmp r15, 15 + je _num_final_rounds_is_15 + cmp r15, 14 + je _num_final_rounds_is_14 + cmp r15, 13 + je _num_final_rounds_is_13 + +_final_rounds_is_9_11: + cmp r15, 11 + je _num_final_rounds_is_11 + cmp r15, 10 + je _num_final_rounds_is_10 + cmp r15, 9 + je _num_final_rounds_is_9 + +_final_rounds_is_1_7: + cmp r15, 4 + je _num_final_rounds_is_4 + jl _final_rounds_is_1_3 + + ; Final blocks 5-7 + cmp r15, 7 + je _num_final_rounds_is_7 + cmp r15, 6 + je _num_final_rounds_is_6 + cmp r15, 5 + je _num_final_rounds_is_5 + +_final_rounds_is_1_3: + cmp r15, 3 + je _num_final_rounds_is_3 + cmp r15, 2 + je _num_final_rounds_is_2 + + jmp _num_final_rounds_is_1 + + ; Perform encryption of last bytes (<= 64 bytes) and reorder LFSR registers + ; if needed (if not all 16 rounds of 4 bytes are done) +%assign I 1 +%rep 16 +APPEND(_num_final_rounds_is_,I): + CIPHER64B I, k2, k3, buf_idx, 1, zmm0, zmm10, zmm11, zmm12, zmm13, zmm14, zmm15 + REORDER_LFSR rax, I, k3 + add buf_idx, min_length + jmp _no_final_rounds +%assign I (I + 1) +%endrep + +_no_final_rounds: + add rsp, 32 + ;; update in/out pointers + add buf_idx, 3 + and buf_idx, 0xfffffffffffffffc + vpbroadcastq zmm0, buf_idx + vpaddq zmm1, zmm0, [pIn] + vpaddq zmm2, zmm0, [pIn + 64] + vmovdqa64 [pIn], zmm1 + vmovdqa64 [pIn + 64], zmm2 + vpaddq zmm1, zmm0, [pOut] + vpaddq zmm2, zmm0, [pOut + 64] + vmovdqa64 [pOut], zmm1 + vmovdqa64 [pOut + 64], zmm2 + + FUNC_RESTORE + + ret + + +;; +;; Updates authentication tag T of 16 buffers based on keystream KS and DATA +;; (GFNI/VAES/VPCLMULQDQ version) +;; +%macro ROUND64B_16_GFNI 11 +%define %%T %1 ; [in] Pointer to digests +%define %%KS %2 ; [in] Pointer to keystream (128x16 bytes) +%define %%DATA %3 ; [in] Pointer to array of pointers to data buffers +%define %%LEN %4 ; [in] Pointer to array of remaining length to digest +%define %%TMP1 %5 ; [clobbered] Temporary GP register +%define %%TMP2 %6 ; [clobbered] Temporary GP register +%define %%TMP3 %7 ; [clobbered] Temporary GP register +%define %%TMP4 %8 ; [clobbered] Temporary GP register +%define %%TMP5 %9 ; [clobbered] Temporary GP register +%define %%TMP6 %10 ; [clobbered] Temporary GP register +%define %%TAG_SIZE %11 ; [constant] Tag size (4, 8 or 16 bytes) + +%define %%SHUF_DATA_KMASK k1 ; Mask to shuffle data +%define %%TMP_KMASK1 k2 +%define %%TMP_KMASK2 k3 + +%define %%DATA_ADDR0 %%TMP3 +%define %%DATA_ADDR1 %%TMP4 +%define %%DATA_ADDR2 %%TMP5 +%define %%DATA_ADDR3 %%TMP6 + +%define %%DATA_TRANS0 zmm19 +%define %%DATA_TRANS1 zmm20 +%define %%DATA_TRANS2 zmm21 +%define %%DATA_TRANS3 zmm22 +%define %%DATA_TRANS0x xmm19 +%define %%DATA_TRANS1x xmm20 +%define %%DATA_TRANS2x xmm21 +%define %%DATA_TRANS3x xmm22 + +%define %%KS_TRANS0 zmm23 +%define %%KS_TRANS1 zmm24 +%define %%KS_TRANS2 zmm25 +%define %%KS_TRANS3 zmm26 +%define %%KS_TRANS4 zmm27 +%define %%KS_TRANS0x xmm23 +%define %%KS_TRANS1x xmm24 +%define %%KS_TRANS2x xmm25 +%define %%KS_TRANS3x xmm26 +%define %%KS_TRANS4x xmm27 + +%define %%DIGEST_0 zmm28 +%define %%DIGEST_1 zmm29 +%define %%DIGEST_2 zmm30 +%define %%DIGEST_3 zmm31 + +%define %%ZTMP1 zmm0 +%define %%ZTMP2 zmm1 +%define %%ZTMP3 zmm2 +%define %%ZTMP4 zmm3 +%define %%ZTMP5 zmm4 +%define %%ZTMP6 zmm5 +%define %%ZTMP7 zmm6 +%define %%ZTMP8 zmm7 +%define %%ZTMP9 zmm8 + +%define %%YTMP1 YWORD(%%ZTMP1) + +%if %%TAG_SIZE != 4 + mov %%TMP1, rsp + ; Reserve stack space to store temporary digest products + sub rsp, STACK_SPACE + and rsp, ~63 + mov [rsp + _RSP], %%TMP1 + + vpxorq %%ZTMP1, %%ZTMP1 +%assign %%I 0 +%rep 16 + vmovdqa64 [rsp + 64*%%I], %%ZTMP1 +%assign %%I (%%I + 1) +%endrep +%endif ; %%TAG_SIZE != 4 + + mov DWORD(%%TMP1), 0x55555555 + kmovd %%SHUF_DATA_KMASK, DWORD(%%TMP1) + ;; Read first buffers 0,4,8,12; then 1,5,9,13, and so on, + ;; since the keystream is laid out this way, with chunks of + ;; 16 bytes interleaved. First the 128 bytes for + ;; buffers 0,4,8,12 (total of 512 bytes), then the 128 bytes + ;; for buffers 1,5,9,13, and so on. +%assign %%IDX 0 +%rep 4 +%if %%TAG_SIZE == 4 + vpxorq APPEND(%%DIGEST_, %%IDX), APPEND(%%DIGEST_, %%IDX) +%endif + mov %%DATA_ADDR0, [%%DATA + %%IDX*8 + 0*32] + mov %%DATA_ADDR1, [%%DATA + %%IDX*8 + 1*32] + mov %%DATA_ADDR2, [%%DATA + %%IDX*8 + 2*32] + mov %%DATA_ADDR3, [%%DATA + %%IDX*8 + 3*32] + +%assign %%I 0 +%assign %%J 1 +%rep 4 + vmovdqu64 XWORD(APPEND(%%DATA_TRANS, %%I)), [%%DATA_ADDR0 + 16*%%I] + vinserti32x4 APPEND(%%DATA_TRANS, %%I), [%%DATA_ADDR1 + 16*%%I], 1 + vinserti32x4 APPEND(%%DATA_TRANS, %%I), [%%DATA_ADDR2 + 16*%%I], 2 + vinserti32x4 APPEND(%%DATA_TRANS, %%I), [%%DATA_ADDR3 + 16*%%I], 3 + + vmovdqu64 APPEND(%%KS_TRANS, %%I), [%%KS + %%IDX*64*2*4 + 64*%%I] + vmovdqu64 APPEND(%%KS_TRANS, %%J), [%%KS + %%IDX*64*2*4 + 64*%%J] + + ;; Reverse bits of next 16 bytes from all 4 buffers + vgf2p8affineqb %%ZTMP1, APPEND(%%DATA_TRANS,%%I), [rel bit_reverse_table], 0x00 + + ; Digest 16 bytes of data with 24 bytes of KS, for 4 buffers + DIGEST_DATA %%ZTMP1, APPEND(%%KS_TRANS, %%I), APPEND(%%KS_TRANS, %%J), \ + %%ZTMP8, %%ZTMP9, APPEND(%%DIGEST_, %%IDX), %%SHUF_DATA_KMASK, \ + %%ZTMP2, %%ZTMP3, %%ZTMP4, %%ZTMP5, \ + %%ZTMP6, %%ZTMP7, %%TAG_SIZE, %%IDX + +%assign %%J (%%J + 1) +%assign %%I (%%I + 1) +%endrep + + ; Memcpy KS 64-127 bytes to 0-63 bytes + vmovdqa64 %%ZTMP4, [%%KS + %%IDX*4*64*2 + 64*4] + vmovdqa64 %%ZTMP1, [%%KS + %%IDX*4*64*2 + 64*5] + vmovdqa64 %%ZTMP2, [%%KS + %%IDX*4*64*2 + 64*6] + vmovdqa64 %%ZTMP3, [%%KS + %%IDX*4*64*2 + 64*7] + vmovdqa64 [%%KS + %%IDX*4*64*2], %%ZTMP4 + vmovdqa64 [%%KS + %%IDX*4*64*2 + 64], %%ZTMP1 + vmovdqa64 [%%KS + %%IDX*4*64*2 + 64*2], %%ZTMP2 + vmovdqa64 [%%KS + %%IDX*4*64*2 + 64*3], %%ZTMP3 + +%assign %%IDX (%%IDX + 1) +%endrep + + ; Read from stack to extract the products and arrange them to XOR later + ; against previous digests (only for 8-byte and 16-byte tag) +%if %%TAG_SIZE != 4 +%assign %%I 0 +%rep 4 + vmovdqa64 %%ZTMP1, [rsp + %%I*256] + vmovdqa64 %%ZTMP2, [rsp + %%I*256 + 64] + vpshufb %%ZTMP1, %%ZTMP1, [rel shuf_mask_0_0_0_dw1] + vpandq %%ZTMP2, %%ZTMP2, [rel bits_32_63] +%if %%TAG_SIZE == 16 + vmovdqa64 %%ZTMP3, [rsp + %%I*256 + 64*2] + vmovdqa64 %%ZTMP4, [rsp + %%I*256 + 64*3] + vpshufb %%ZTMP3, %%ZTMP3, [rel shuf_mask_0_dw1_0_0] + vpshufb %%ZTMP4, %%ZTMP4, [rel shuf_mask_dw1_0_0_0] + vpternlogq %%ZTMP1, %%ZTMP2, %%ZTMP3, 0x96 + vpxorq APPEND(%%DIGEST_, %%I), %%ZTMP1, %%ZTMP4 +%else ; %%TAG_SIZE == 8 + vpxorq APPEND(%%DIGEST_, %%I), %%ZTMP1, %%ZTMP2 +%endif +%assign %%I (%%I + 1) +%endrep +%endif ; %%TAG_SIZE != 4 + + UPDATE_TAGS %%T, %%TAG_SIZE, order_0_4_8_12, %%TMP1, %%TMP_KMASK1, %%TMP_KMASK2, \ + %%DIGEST_0, %%DIGEST_1, %%DIGEST_2, %%DIGEST_3, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 + + ; Update data pointers + vmovdqu64 %%ZTMP1, [%%DATA] + vmovdqu64 %%ZTMP2, [%%DATA + 64] + vpaddq %%ZTMP1, [rel add_64] + vpaddq %%ZTMP2, [rel add_64] + vmovdqu64 [%%DATA], %%ZTMP1 + vmovdqu64 [%%DATA + 64], %%ZTMP2 + + ; Update array of lengths (subtract 512 bits from all lengths if valid lane) + vmovdqa64 %%YTMP1, [LEN] + vpcmpw %%TMP_KMASK1, %%YTMP1, [rel all_ffs], 4 + vpsubw %%YTMP1{%%TMP_KMASK1}, [rel all_512w] + vmovdqa64 [%%LEN], %%YTMP1 + +%if %%TAG_SIZE != 4 +%ifdef SAFE_DATA + vpxorq %%ZTMP1, %%ZTMP1 +%assign %%I 0 +%rep 16 + vmovdqa64 [rsp + %%I*64], %%ZTMP1 +%assign %%I (%%I + 1) +%endrep +%endif + + mov rsp, [rsp + _RSP] +%endif ; %%TAG_SIZE != 4 +%endmacro + + +;; +;; Updates authentication tag T of 16 buffers based on keystream KS and DATA. +;; +%macro ROUND64B_16_NO_GFNI 11 +%define %%T %1 ; [in] Pointer to digests +%define %%KS %2 ; [in] Pointer to keystream (128x16 bytes) +%define %%DATA %3 ; [in] Pointer to array of pointers to data buffers +%define %%LEN %4 ; [in] Pointer to array of remaining length to digest +%define %%TMP1 %5 ; [clobbered] Temporary GP register +%define %%TMP2 %6 ; [clobbered] Temporary GP register +%define %%TMP3 %7 ; [clobbered] Temporary GP register +%define %%TMP4 %8 ; [clobbered] Temporary GP register +%define %%TMP5 %9 ; [clobbered] Temporary GP register +%define %%TMP6 %10 ; [clobbered] Temporary GP register +%define %%TAG_SIZE %11 ; [constant] Tag size (4, 8 or 16 bytes) + +%define %%SHUF_DATA_KMASK k1 ; Mask to shuffle data +%define %%TMP_KMASK1 k2 +%define %%TMP_KMASK2 k3 + +%define %%REV_TABLE_L xmm0 +%define %%REV_TABLE_H xmm1 +%define %%REV_AND_TABLE xmm2 +%define %%TEMP_DIGEST xmm3 +%define %%KS_L xmm4 +%define %%KS_H xmm5 +%define %%XDATA xmm6 +%define %%XTMP1 xmm7 +%define %%XTMP2 xmm8 +%define %%XTMP3 xmm9 +%define %%XTMP4 xmm10 +%define %%XTMP5 xmm11 +%define %%XTMP6 xmm12 +%define %%XTMP7 xmm13 +%define %%XTMP8 xmm14 + +%define %%ZTMP1 zmm22 +%define %%ZTMP2 zmm23 +%define %%ZTMP3 zmm24 +%define %%ZTMP4 zmm25 +%define %%DIGEST_0 zmm28 +%define %%DIGEST_1 zmm29 +%define %%DIGEST_2 zmm30 +%define %%DIGEST_3 zmm31 + +%define %%YTMP1 ymm24 + +%define %%DATA_ADDR %%TMP3 + +%if %%TAG_SIZE != 4 + mov %%TMP1, rsp + ; Reserve stack space to store temporary digest products + sub rsp, STACK_SPACE + and rsp, ~63 + mov [rsp + _RSP], %%TMP1 + + vpxorq %%ZTMP1, %%ZTMP1 +%assign %%I 0 +%rep 16 + vmovdqa64 [rsp + 64*%%I], %%ZTMP1 +%assign %%I (%%I + 1) +%endrep +%endif ; %%TAG_SIZE != 4 + + vmovdqa %%REV_TABLE_L, [rel bit_reverse_table_l] + vmovdqa %%REV_TABLE_H, [rel bit_reverse_table_h] + vmovdqa %%REV_AND_TABLE, [rel bit_reverse_and_table] + + mov DWORD(%%TMP1), 0x55555555 + kmovd %%SHUF_DATA_KMASK, DWORD(%%TMP1) + + ;; Read first buffers 0,4,8,12; then 1,5,9,13, and so on, + ;; since the keystream is laid out this way, which chunks of + ;; 16 bytes interleved. First the 128 bytes for + ;; buffers 0,4,8,12 (total of 512 bytes), then the 128 bytes + ;; for buffers 1,5,9,13, and so on +%assign %%I 0 +%rep 4 +%assign %%J 0 +%rep 4 + +%if %%TAG_SIZE == 4 + vpxor %%TEMP_DIGEST, %%TEMP_DIGEST +%endif + mov %%DATA_ADDR, [%%DATA + 8*(%%J*4 + %%I)] + +%assign %%K 0 +%rep 4 + ;; read 16 bytes and reverse bits + vmovdqu %%XTMP1, [%%DATA_ADDR + 16*%%K] + vpand %%XTMP2, %%XTMP1, %%REV_AND_TABLE + + vpandn %%XTMP3, %%REV_AND_TABLE, %%XTMP1 + vpsrld %%XTMP3, 4 + + vpshufb %%XDATA, %%REV_TABLE_H, %%XTMP2 ; bit reverse low nibbles (use high table) + vpshufb %%XTMP4, %%REV_TABLE_L, %%XTMP3 ; bit reverse high nibbles (use low table) + + vpor %%XDATA, %%XDATA, %%XTMP4 ; %%DATA - bit reversed data bytes + + ; Read the next 2 blocks of 16 bytes of KS + vmovdqu %%KS_L, [%%KS + (16*%%J + %%I*512) + %%K*(16*4)] + vmovdqu %%KS_H, [%%KS + (16*%%J + %%I*512) + (%%K + 1)*(16*4)] + ; Digest 16 bytes of data with 24 bytes of KS, for 4 buffers + DIGEST_DATA %%XDATA, %%KS_L, %%KS_H, %%XTMP7, %%XTMP8, %%TEMP_DIGEST, %%SHUF_DATA_KMASK, \ + %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, %%TAG_SIZE, %%I, %%J + +%assign %%K (%%K + 1) +%endrep + + vinserti32x4 APPEND(%%DIGEST_, %%I), %%TEMP_DIGEST, %%J +%assign %%J (%%J + 1) +%endrep + ; Memcpy KS 64-127 bytes to 0-63 bytes + vmovdqa64 %%ZTMP1, [%%KS + %%I*4*64*2 + 64*4] + vmovdqa64 %%ZTMP2, [%%KS + %%I*4*64*2 + 64*5] + vmovdqa64 %%ZTMP3, [%%KS + %%I*4*64*2 + 64*6] + vmovdqa64 %%ZTMP4, [%%KS + %%I*4*64*2 + 64*7] + vmovdqa64 [%%KS + %%I*4*64*2], %%ZTMP1 + vmovdqa64 [%%KS + %%I*4*64*2 + 64], %%ZTMP2 + vmovdqa64 [%%KS + %%I*4*64*2 + 64*2], %%ZTMP3 + vmovdqa64 [%%KS + %%I*4*64*2 + 64*3], %%ZTMP4 +%assign %%I (%%I + 1) +%endrep + + ; Read from stack to extract the products and arrange them to XOR later + ; against previous digests (only for 8-byte and 16-byte tag) +%if %%TAG_SIZE != 4 +%assign %%I 0 +%rep 4 + vmovdqa64 %%ZTMP1, [rsp + %%I*256] + vmovdqa64 %%ZTMP2, [rsp + %%I*256 + 64] + vpshufb %%ZTMP1, %%ZTMP1, [rel shuf_mask_0_0_0_dw1] + vpandq %%ZTMP2, %%ZTMP2, [rel bits_32_63] +%if %%TAG_SIZE == 16 + vmovdqa64 %%ZTMP3, [rsp + %%I*256 + 64*2] + vmovdqa64 %%ZTMP4, [rsp + %%I*256 + 64*3] + vpshufb %%ZTMP3, %%ZTMP3, [rel shuf_mask_0_dw1_0_0] + vpshufb %%ZTMP4, %%ZTMP4, [rel shuf_mask_dw1_0_0_0] + vpternlogq %%ZTMP1, %%ZTMP2, %%ZTMP3, 0x96 + vpxorq APPEND(%%DIGEST_, %%I), %%ZTMP1, %%ZTMP4 +%else ; %%TAG_SIZE == 8 + vpxorq APPEND(%%DIGEST_, %%I), %%ZTMP1, %%ZTMP2 +%endif +%assign %%I (%%I + 1) +%endrep +%endif ; %%TAG_SIZE != 4 + UPDATE_TAGS %%T, %%TAG_SIZE, order_0_4_8_12, %%TMP1, %%TMP_KMASK1, %%TMP_KMASK2, \ + %%DIGEST_0, %%DIGEST_1, %%DIGEST_2, %%DIGEST_3, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 + + ; Update data pointers + vmovdqu64 %%ZTMP2, [%%DATA] + vmovdqu64 %%ZTMP3, [%%DATA + 64] + vpaddq %%ZTMP2, [rel add_64] + vpaddq %%ZTMP3, [rel add_64] + vmovdqu64 [%%DATA], %%ZTMP2 + vmovdqu64 [%%DATA + 64], %%ZTMP3 + + ; Update array of lengths (if lane is valid, so length < UINT16_MAX) + vmovdqa64 %%YTMP1, [%%LEN] + vpcmpw %%TMP_KMASK1, %%YTMP1, [rel all_ffs], 4 ; valid lanes + vpsubw %%YTMP1{%%TMP_KMASK1}, [rel all_512w] + vmovdqa64 [%%LEN], %%YTMP1 + +%if %%TAG_SIZE != 4 +%ifdef SAFE_DATA + vpxorq %%ZTMP1, %%ZTMP1 +%assign %%I 0 +%rep 16 + vmovdqa64 [rsp + %%I*64], %%ZTMP1 +%assign %%I (%%I + 1) +%endrep +%endif + + mov rsp, [rsp + _RSP] +%endif ; %%TAG_SIZE != 4 +%endmacro + +;; +;;extern void asm_Eia3Round64B_16(void *T, const void *KS, +;; const void **DATA, uint16_t *LEN); +;; +;; Updates authentication tag T of 16 buffers based on keystream KS and DATA. +;; - it processes 64 bytes of DATA of buffers +;; - reads data in 16 byte chunks from different buffers +;; (first buffers 0,4,8,12; then 1,5,9,13; etc) and bit reverses them +;; - reads KS (when utilizing VPCLMUL instructions, it reads 64 bytes directly, +;; containing 16 bytes of KS for 4 different buffers) +;; - employs clmul for the XOR & ROL part +;; - copies top 64 bytes of KS to bottom (for the next round) +;; - Updates Data pointers for next rounds +;; - Updates array of lengths +;; +;; @param [in] T: Array of digests for all 16 buffers +;; @param [in] KS: Pointer to 128 bytes of keystream for all 16 buffers (2048 bytes in total) +;; @param [in] DATA: Array of pointers to data for all 16 buffers +;; @param [in] LEN: Array of lengths for all 16 buffers +;; @param [in] TAG_SZ: Tag size (4, 8 or 16 bytes) +;; +align 64 +MKGLOBAL(ZUC_ROUND64B_16,function,internal) +ZUC_ROUND64B_16: +%define T arg1 +%define KS arg2 +%define DATA arg3 +%define LEN arg4 +%define TAG_SIZE arg5 + + endbranch64 + + cmp TAG_SIZE, 8 + je round_8B + jb round_4B + + ;; Fall-through for 16-byte tag +round_16B: + + FUNC_SAVE + +%if USE_GFNI_VAES_VPCLMUL == 1 + ROUND64B_16_GFNI T, KS, DATA, LEN, rbx, r10, r11, r12, r13, r14, 16 +%else + ROUND64B_16_NO_GFNI T, KS, DATA, LEN, rbx, r10, r11, r12, r13, r14, 16 +%endif + + FUNC_RESTORE + + ret + +round_8B: + + FUNC_SAVE + +%if USE_GFNI_VAES_VPCLMUL == 1 + ROUND64B_16_GFNI T, KS, DATA, LEN, rbx, r10, r11, r12, r13, r14, 8 +%else + ROUND64B_16_NO_GFNI T, KS, DATA, LEN, rbx, r10, r11, r12, r13, r14, 8 +%endif + + FUNC_RESTORE + + ret +round_4B: + + FUNC_SAVE + +%if USE_GFNI_VAES_VPCLMUL == 1 + ROUND64B_16_GFNI T, KS, DATA, LEN, rbx, r10, r11, r12, r13, r14, 4 +%else + ROUND64B_16_NO_GFNI T, KS, DATA, LEN, rbx, r10, r11, r12, r13, r14, 4 +%endif + + FUNC_RESTORE + + ret + + +; +; Reads a qword of KS, rotates it by LEN % 32, and store the results as a single dword +; +%macro READ_AND_ROTATE_KS_DWORD 4 +%define %%KS_ADDR %1 ; [in] Base address of KS to read +%define %%LEN_BUF %2 ; [in] Remaining bytes of data +%define %%IN_OFFSET_OUT_KS %3 ; [in/out] Offset to read qwords of KS +%define %%TMP1 %4 ; [clobbered] Temporary GP register + + mov %%TMP1, %%IN_OFFSET_OUT_KS + and %%TMP1, 0xf + ; Read last two dwords of KS, which can be scattered or contiguous + ; (First dword can be at the end of a 16-byte chunk) + cmp %%TMP1, 12 + je %%_read_2dwords + mov %%IN_OFFSET_OUT_KS, [%%KS_ADDR + %%IN_OFFSET_OUT_KS] + jmp %%_ks_qword_read + + ;; The 8 bytes of %%KS are separated +%%_read_2dwords: + mov DWORD(%%TMP1), [%%KS_ADDR + %%IN_OFFSET_OUT_KS] + mov DWORD(%%IN_OFFSET_OUT_KS), [%%KS_ADDR + %%IN_OFFSET_OUT_KS + (4+48)] + shl %%IN_OFFSET_OUT_KS, 32 + or %%IN_OFFSET_OUT_KS, %%TMP1 +%%_ks_qword_read: + ; Rotate left by MIN_LEN % 32 + mov %%TMP1, rcx + mov rcx, %%LEN_BUF + and rcx, 0x1F + rol %%IN_OFFSET_OUT_KS, cl + mov rcx, %%TMP1 +%endmacro +; +; Reads two qwords of KS, overlapped by 4 bytes (e.g. KS[0-7] and KS[4-11]), +; rotates both qwords by LEN % 32, and store the results as a single qword, +; where lower dword is the result of rotation on first qword, and upper dword +; is the rotation on second dword. +; +%macro READ_AND_ROTATE_KS_QWORD 5 +%define %%KS_ADDR %1 ; [in] Base address of KS to read +%define %%LEN_BUF %2 ; [in] Remaining bytes of data +%define %%IN_OFFSET_OUT_KS %3 ; [in/out] Offset to read qwords of KS +%define %%TMP1 %4 ; [clobbered] Temporary GP register +%define %%TMP2 %5 ; [clobbered] Temporary GP register + + mov %%TMP2, %%IN_OFFSET_OUT_KS + and %%TMP2, 0xf + ; Read last three dwords of KS, which can be scattered or contiguous + ; (First dword can be at the end of a 16-byte chunk and the other + ; two dwords in the next chunk; first two dwords can be at the end of + ; a 16-byte chunk and the other dword in the next chunk; or all three + ; dwords can be in the same 16-byte chunk) + cmp %%TMP2, 8 + je %%_read_8B_4B + cmp %%TMP2, 12 + je %%_read_4B_8B + + ;; All 12 bytes of KS are contiguous +%%_read_12B: + mov %%TMP1, [%%KS_ADDR + %%IN_OFFSET_OUT_KS] + mov %%IN_OFFSET_OUT_KS, [%%KS_ADDR + %%IN_OFFSET_OUT_KS + 4] + jmp %%_ks_qwords_read + + ;; The first 8 bytes of KS are contiguous, the other 4 are separated +%%_read_8B_4B: + mov %%TMP1, [%%KS_ADDR + %%IN_OFFSET_OUT_KS] + ; Read last 4 bytes of first segment and first 4 bytes of second segment + mov DWORD(%%TMP2), [%%KS_ADDR + %%IN_OFFSET_OUT_KS + 4] + mov DWORD(%%IN_OFFSET_OUT_KS), [%%KS_ADDR + %%IN_OFFSET_OUT_KS + (8+48)] + shl %%IN_OFFSET_OUT_KS, 32 + or %%IN_OFFSET_OUT_KS, %%TMP2 + + jmp %%_ks_qwords_read + ;; The first 8 bytes of KS are separated, the other 8 are contiguous +%%_read_4B_8B: + mov DWORD(%%TMP1), [%%KS_ADDR + %%IN_OFFSET_OUT_KS] + mov DWORD(%%TMP2), [%%KS_ADDR + %%IN_OFFSET_OUT_KS + (4+48)] + shl %%TMP2, 32 + or %%TMP1, %%TMP2 + mov %%IN_OFFSET_OUT_KS, [%%KS_ADDR + %%IN_OFFSET_OUT_KS + (4+48)] +%%_ks_qwords_read: + ; Rotate left by LEN_BUF % 32 + mov %%TMP2, rcx + mov rcx, %%LEN_BUF + and rcx, 0x1F + rol %%TMP1, cl + rol %%IN_OFFSET_OUT_KS, cl + mov rcx, %%TMP2 + + shl %%IN_OFFSET_OUT_KS, 32 + mov DWORD(%%TMP1), DWORD(%%TMP1) ; Clear top 32 bits + or %%IN_OFFSET_OUT_KS, %%TMP1 +%endmacro + +%macro REMAINDER_16 14 +%define %%T %1 ; [in] Pointer to digests +%define %%KS %2 ; [in] Pointer to keystream (128x16 bytes) +%define %%DATA %3 ; [in] Pointer to array of pointers to data buffers +%define %%LEN %4 ; [in] Pointer to array of remaining length to digest +%define %%MIN_LEN %5 ; [in] Minimum common length +%define %%TMP1 %6 ; [clobbered] Temporary GP register +%define %%TMP2 %7 ; [clobbered] Temporary GP register +%define %%TMP3 %8 ; [clobbered] Temporary GP register +%define %%TMP4 %9 ; [clobbered] Temporary GP register +%define %%TMP5 %10 ; [clobbered] Temporary GP register +%define %%TMP6 %11 ; [clobbered] Temporary GP register +%define %%TMP7 %12 ; [clobbered] Temporary GP register +%define %%KEY_SIZE %13 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %14 ; [constant] Tag size (4, 8 or 16 bytes) + +%define %%DIGEST_0 zmm28 +%define %%DIGEST_1 zmm29 +%define %%DIGEST_2 zmm30 +%define %%DIGEST_3 zmm31 + +;; +;; There are two main parts in this code: +;; - 1st part: digest data +;; - 2nd part: reading final KS words and XOR'ing with digest +;; +%define %%DATA_ADDR %%TMP2 ; %%DATA_ADDR only used in 1st part / %%TMP2 only used in 2nd part +%define %%OFFSET %%TMP3 ; %%OFFSET only used in 1st part / %%TMP3 only used in 2nd part +%define %%KS_ADDR %%TMP7 ; %%KS_ADDR used in all code +%define %%N_BYTES %%TMP6 ; %%N_BYTES only used in 1st part + +%define %%LEN_BUF %%TMP4 ; %%LEN_BUF only used in 2nd part +%define %%IDX %%TMP5 ; %%IDX Only used in 2nd part +%define %%DIGEST %%TMP6 ; %%DIGEST only used in 2nd part + +%define %%YTMP1 ymm7 +%define %%YTMP2 ymm8 +%define %%YTMP3 ymm9 +%define %%YTMP4 ymm10 + +%define %%REV_TABLE_L xmm0 +%define %%REV_TABLE_H xmm1 +%define %%REV_AND_TABLE xmm2 +%define %%TEMP_DIGEST xmm3 +%define %%KS_L xmm4 +%define %%KS_H xmm5 +%define %%XDATA xmm6 +%define %%XTMP1 xmm7 +%define %%XTMP2 xmm8 +%define %%XTMP3 xmm9 +%define %%XTMP4 xmm10 +%define %%XTMP5 xmm11 +%define %%XTMP6 xmm12 +%define %%XTMP7 xmm13 +%define %%XTMP8 xmm14 + +%define %%ZTMP1 zmm7 +%define %%ZTMP2 zmm8 +%define %%ZTMP3 zmm9 +%define %%ZTMP4 zmm10 + +%define %%VALID_KMASK k1 ; Mask with valid lanes +%define %%SHUF_DATA_KMASK k2 ; Mask to shuffle data +%define %%TMP_KMASK1 k3 +%define %%TMP_KMASK2 k4 + + vpbroadcastw %%YTMP1, DWORD(%%MIN_LEN) + ; Get mask of non-NULL lanes (lengths not set to UINT16_MAX, indicating that lane is not valid) + vmovdqa %%YTMP2, [%%LEN] + vpcmpw %%VALID_KMASK, %%YTMP2, [rel all_ffs], 4 ; NEQ + + ; Round up to nearest multiple of 32 bits + vpaddw %%YTMP1{%%VALID_KMASK}, [rel all_31w] + vpandq %%YTMP1, [rel all_ffe0w] + + ; Calculate remaining bits to authenticate after function call + vpcmpuw %%TMP_KMASK1, %%YTMP2, %%YTMP1, 1 ; Get mask of lengths that will be < 0 after subtracting + vpsubw %%YTMP3{%%VALID_KMASK}, %%YTMP2, %%YTMP1 + vpxorq %%YTMP4, %%YTMP4 + ; Set to zero the lengths of the lanes which are going to be completed + vmovdqu16 %%YTMP3{%%TMP_KMASK1}, %%YTMP4 ; YMM2 contain final lengths + vmovdqu16 [%%LEN]{%%VALID_KMASK}, %%YTMP3 ; Update in memory the final updated lengths + + ; Calculate number of bits to authenticate (up to 511 bits), + ; for each lane, and store it in stack to be used later + vpsubw %%YTMP2{%%VALID_KMASK}{z}, %%YTMP3 ; Bits to authenticate in all lanes (zero out length of NULL lanes) + sub rsp, 32 + vmovdqu [rsp], %%YTMP2 + + xor %%OFFSET, %%OFFSET + +%if USE_GFNI_VAES_VPCLMUL != 1 + vmovdqa %%REV_TABLE_L, [rel bit_reverse_table_l] + vmovdqa %%REV_TABLE_H, [rel bit_reverse_table_h] + vmovdqa %%REV_AND_TABLE, [rel bit_reverse_and_table] +%endif + + mov r12d, 0x55555555 + kmovd %%SHUF_DATA_KMASK, r12d + + ;; Read first buffers 0,4,8,12; then 1,5,9,13, and so on, + ;; since the keystream is laid out this way, which chunks of + ;; 16 bytes interleved. First the 128 bytes for + ;; buffers 0,4,8,12 (total of 512 bytes), then the 128 bytes + ;; for buffers 1,5,9,13, and so on +%assign I 0 +%rep 4 +%assign J 0 +%rep 4 + + ; Read length to authenticate for each buffer + movzx %%LEN_BUF, word [rsp + 2*(I*4 + J)] + + vpxor %%TEMP_DIGEST, %%TEMP_DIGEST + + xor %%OFFSET, %%OFFSET + mov %%DATA_ADDR, [%%DATA + 8*(I*4 + J)] + +%assign K 0 +%rep 4 + cmp %%LEN_BUF, 128 + jb APPEND3(%%Eia3RoundsAVX512_dq_end,I,J) + + ;; read 16 bytes and reverse bits + vmovdqu %%XTMP1, [%%DATA_ADDR + %%OFFSET] +%if USE_GFNI_VAES_VPCLMUL == 1 + vgf2p8affineqb %%XDATA, %%XTMP1, [rel bit_reverse_table], 0x00 +%else + vpand %%XTMP2, %%XTMP1, %%REV_AND_TABLE + + vpandn %%XTMP3, %%REV_AND_TABLE, %%XTMP1 + vpsrld %%XTMP3, 4 + + vpshufb %%XDATA, %%REV_TABLE_H, %%XTMP2 ; bit reverse low nibbles (use high table) + vpshufb %%XTMP4, %%REV_TABLE_L, %%XTMP3 ; bit reverse high nibbles (use low table) + + vpor %%XDATA, %%XTMP4 +%endif + ; %%XDATA - bit reversed data bytes + + ; Read the next 2 blocks of 16 bytes of %%KS + vmovdqu %%KS_L, [%%KS + (16*I + J*512) + %%OFFSET*4] + vmovdqu %%KS_H, [%%KS + (16*I + J*512) + %%OFFSET*4 + (16*4)] + ; Digest 16 bytes of data with 24 bytes of KS, for 4 buffers + DIGEST_DATA %%XDATA, %%KS_L, %%KS_H, %%XTMP7, %%XTMP8, %%TEMP_DIGEST, %%SHUF_DATA_KMASK, \ + %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, %%TAG_SIZE + add %%OFFSET, 16 + sub %%LEN_BUF, 128 +%assign K (K + 1) +%endrep +APPEND3(%%Eia3RoundsAVX512_dq_end,I,J): + + or %%LEN_BUF, %%LEN_BUF + jz APPEND3(%%Eia3RoundsAVX_end,I,J) + + ; Get number of bytes + mov %%N_BYTES, %%LEN_BUF + add %%N_BYTES, 7 + shr %%N_BYTES, 3 + + lea %%TMP1, [rel byte64_len_to_mask_table] + kmovq %%TMP_KMASK1, [%%TMP1 + %%N_BYTES*8] + + ;; read up to 16 bytes of data, zero bits not needed if partial byte and bit-reverse + vmovdqu8 %%XTMP1{%%TMP_KMASK1}{z}, [%%DATA_ADDR + %%OFFSET] + ; check if there is a partial byte (less than 8 bits in last byte) + mov %%TMP2, %%LEN_BUF + and %%TMP2, 0x7 + shl %%TMP2, 4 + lea %%TMP1, [rel bit_mask_table] + add %%TMP1, %%TMP2 + + ; Get mask to clear last bits + vmovdqa %%XTMP4, [%%TMP1] + + ; Shift left 16-N bytes to have the last byte always at the end of the XMM register + ; to apply mask, then restore by shifting right same amount of bytes + mov %%TMP1, 16 + sub %%TMP1, %%N_BYTES + XVPSLLB %%XTMP1, %%TMP1, %%XTMP5, %%TMP2 + vpandq %%XTMP1, %%XTMP4 + XVPSRLB %%XTMP1, %%TMP1, %%XTMP5, %%TMP2 + +%if USE_GFNI_VAES_VPCLMUL == 1 + vgf2p8affineqb %%XDATA, %%XTMP1, [rel bit_reverse_table], 0x00 +%else + ; Bit reverse input data + vpand %%XTMP2, %%XTMP1, %%REV_AND_TABLE + + vpandn %%XTMP3, %%REV_AND_TABLE, %%XTMP1 + vpsrld %%XTMP3, 4 + + vpshufb %%XDATA, %%REV_TABLE_H, %%XTMP2 ; bit reverse low nibbles (use high table) + vpshufb %%XTMP4, %%REV_TABLE_L, %%XTMP3 ; bit reverse high nibbles (use low table) + + vpor %%XDATA, %%XTMP4 +%endif + + ; Read the next 2 blocks of 16 bytes of KS + shl %%OFFSET, 2 + vmovdqu %%KS_L, [%%KS + (16*I + J*512) + %%OFFSET] + vmovdqu %%KS_H, [%%KS + (16*I + J*512) + %%OFFSET + 16*4] + shr %%OFFSET, 2 + + ; Digest 16 bytes of data with 24 bytes of KS, for 4 buffers + DIGEST_DATA %%XDATA, %%KS_L, %%KS_H, %%XTMP7, %%XTMP8, %%TEMP_DIGEST, %%SHUF_DATA_KMASK, \ + %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, %%TAG_SIZE +APPEND3(%%Eia3RoundsAVX_end,I,J): + vinserti32x4 APPEND(%%DIGEST_, I), %%TEMP_DIGEST, J +%assign J (J + 1) +%endrep +%assign I (I + 1) +%endrep + + UPDATE_TAGS %%T, %%TAG_SIZE, order_0_1_2_3, %%TMP1, %%TMP_KMASK1, %%TMP_KMASK2, \ + %%DIGEST_0, %%DIGEST_1, %%DIGEST_2, %%DIGEST_3, \ + %%ZTMP1, %%ZTMP2, %%ZTMP3, %%ZTMP4 + + ; These last steps should be done only for the buffers that + ; have no more data to authenticate + xor %%IDX, %%IDX +%%start_loop: + ; Update data pointer + movzx DWORD(%%TMP1), word [rsp + %%IDX*2] + shr DWORD(%%TMP1), 3 ; length authenticated in bytes + add [%%DATA + %%IDX*8], %%TMP1 + + cmp word [%%LEN + 2*%%IDX], 0 + jnz %%skip_comput + + ; Load base address of keystream for lane %%IDX + ; Fist, find the offset for the 512-byte set (containing the 128-byte KS for 4 lanes) + mov %%TMP1, %%IDX + and %%TMP1, 0x3 + shl %%TMP1, 9 ; * 512 + + ; Then, find the offset within the 512-byte set, based on the lane, + ; and add to the previous offset + mov %%TMP2, %%IDX + shr %%TMP2, 2 + shl %%TMP2, 4 ; * 16 + add %%TMP1, %%TMP2 + ;; Load pointer to the base address of keystream for lane %%IDX + lea %%KS_ADDR, [%%KS + %%TMP1] + + ; Read keyStr[MIN_LEN / 32] (last dwords of KS, based on tag_size) + movzx %%LEN_BUF, word [rsp + 2*%%IDX] + mov %%TMP2, %%LEN_BUF + shr %%TMP2, 5 + mov %%TMP3, %%TMP2 + shr %%TMP2, 2 + shl %%TMP2, (4+2) + and %%TMP3, 0x3 + shl %%TMP3, 2 + add %%TMP2, %%TMP3 ;; Offset to last dwords of KS, from base address +%if %%TAG_SIZE == 4 + ; Read 4-byte digest + mov DWORD(%%DIGEST), [%%T + 4*%%IDX] + + READ_AND_ROTATE_KS_DWORD %%KS_ADDR, %%LEN_BUF, %%TMP2, %%TMP1 + ; XOR with current digest + xor DWORD(%%DIGEST), DWORD(%%TMP2) + +%if %%KEY_SIZE == 128 + ; Read keystr[L - 1] (last dword of keyStr) + add %%LEN_BUF, (31 + 64) + shr %%LEN_BUF, 5 ; L + dec %%LEN_BUF + mov %%TMP2, %%LEN_BUF + shr %%TMP2, 2 + shl %%TMP2, (4+2) + and %%LEN_BUF, 0x3 + shl %%LEN_BUF, 2 + add %%LEN_BUF, %%TMP2 + mov DWORD(%%TMP2), [%%KS_ADDR + %%LEN_BUF] + ; XOR with current digest + xor DWORD(%%DIGEST), DWORD(%%TMP2) +%endif + + ; byte swap and write digest out + bswap DWORD(%%DIGEST) + mov [%%T + 4*%%IDX], DWORD(%%DIGEST) +%elif %%TAG_SIZE == 8 + ; Read 8-byte digest + mov %%DIGEST, [%%T + 8*%%IDX] + + READ_AND_ROTATE_KS_QWORD %%KS_ADDR, %%LEN_BUF, %%TMP2, %%TMP1, %%TMP3 + + ; XOR with current digest + xor %%DIGEST, %%TMP2 + + ; byte swap and write digest out + bswap %%DIGEST + ror %%DIGEST, 32 + mov [%%T + 8*%%IDX], %%DIGEST +%else ; %%TAG_SIZE == 16 + ;; Update digest in two steps: + ;; - First, read the first 12 bytes of KS[MIN_LEN/32], + ;; rotate them and XOR the qword with first qword of digest + ;; - Last, skip 8 bytes of KS[MIN_LEN/32] and read another 12 bytes, + ;; rotate them and XOR the qword with second qword of digest + shl %%IDX, 4 + ; Read first 8 bytes of digest + mov %%DIGEST, [%%T + %%IDX] + + READ_AND_ROTATE_KS_QWORD %%KS_ADDR, %%LEN_BUF, %%TMP2, %%TMP1, %%TMP3 + + ; XOR with current first half of digest + xor %%DIGEST, %%TMP2 + + ; byte swap and write first half of digest out + bswap %%DIGEST + ror %%DIGEST, 32 + mov [%%T + %%IDX], %%DIGEST + + ; Read next 8 bytes after keyStr[MIN_LEN / 32] + mov %%TMP2, %%LEN_BUF + shr %%TMP2, 5 + add %%TMP2, 2 ; Add 2 dwords to offset + mov %%TMP3, %%TMP2 + shr %%TMP2, 2 + shl %%TMP2, (4+2) + and %%TMP3, 0x3 + shl %%TMP3, 2 + add %%TMP2, %%TMP3 ;; Offset to last dwords of KS, from base address + + ; Read second 8 bytes of digest + mov %%DIGEST, [%%T + %%IDX + 8] + + READ_AND_ROTATE_KS_QWORD %%KS_ADDR, %%LEN_BUF, %%TMP2, %%TMP1, %%TMP3 + + ; XOR with current second half of digest + xor %%DIGEST, %%TMP2 + + ; byte swap and write second half of digest out + bswap %%DIGEST + ror %%DIGEST, 32 + mov [%%T + %%IDX + 8], %%DIGEST + shr %%IDX, 4 +%endif + +%%skip_comput: + inc %%IDX + cmp %%IDX, 16 + jne %%start_loop + + add rsp, 32 + + add DWORD(%%MIN_LEN), 31 + shr DWORD(%%MIN_LEN), 5 + shl DWORD(%%MIN_LEN), 2 ; Offset where to copy the last 4/8 bytes from + +%if %%KEY_SIZE == 128 +%define %%KS_WORDS_TO_COPY 2 +%else ;; %%KEY_SIZE == 256 +%if %%TAG_SIZE == 4 +%define %%KS_WORDS_TO_COPY 1 +%elif %%TAG_SIZE == 8 +%define %%KS_WORDS_TO_COPY 2 +%else ;; %%TAG_SIZE == 16 +%define %%KS_WORDS_TO_COPY 4 +%endif +%endif ;; %%KEY_SIZE + + mov DWORD(%%TMP1), DWORD(%%MIN_LEN) + shr DWORD(%%MIN_LEN), 4 + shl DWORD(%%MIN_LEN), (4+2) + and DWORD(%%TMP1), 0xf + add DWORD(%%MIN_LEN), DWORD(%%TMP1) +%if %%KS_WORDS_TO_COPY == 4 + ; Memcpy last 16 bytes of KS into start + or DWORD(%%TMP1), DWORD(%%TMP1) + jz %%_copy_16bytes + + cmp DWORD(%%TMP1), 8 + je %%_copy_8bytes_8bytes + ja %%_copy_4bytes_12bytes + + ; Fall-through if 16 bytes to copy are 12 contiguous bytes and 4 separated bytes +%%_copy_12bytes_4bytes: +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + mov %%TMP1, [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j], %%TMP1 + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + %%MIN_LEN + 8] + mov [%%KS + 512*%%i + 16*%%j + 8], DWORD(%%TMP1) + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + (48+12) + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j + 12], DWORD(%%TMP1) +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep + jmp %%_ks_copied + +%%_copy_8bytes_8bytes: +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + mov %%TMP1, [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j], %%TMP1 + mov %%TMP1, [%%KS + 512*%%i + 16*%%j + (48+8) + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j + 8], %%TMP1 +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep + jmp %%_ks_copied +%%_copy_4bytes_12bytes: +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j], DWORD(%%TMP1) + mov %%TMP1, [%%KS + 512*%%i + 16*%%j + (48+4) + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j + 4], %%TMP1 + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + (48+4) + %%MIN_LEN + 8] + mov [%%KS + 512*%%i + 16*%%j + 12], DWORD(%%TMP1) +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep + jmp %%_ks_copied +%%_copy_16bytes: +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + vmovdqa64 %%XTMP1, [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + vmovdqa64 [%%KS + 512*%%i + 16*%%j], %%XTMP1 +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep + +%elif %%KS_WORDS_TO_COPY == 2 + ; Memcpy last 8 bytes of KS into start + cmp DWORD(%%TMP1), 12 + je %%_copy_2dwords + +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + mov %%TMP1, [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j], %%TMP1 +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep + jmp %%_ks_copied + + ;; The 8 bytes of %%KS are separated +%%_copy_2dwords: +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j], DWORD(%%TMP1) + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + (48+4) + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j + 4], DWORD(%%TMP1) +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep +%elif %%KS_WORDS_TO_COPY == 1 + ; Memcpy last 4 bytes of KS into start +%assign %%i 0 +%rep 4 +%assign %%j 0 +%rep 4 + mov DWORD(%%TMP1), [%%KS + 512*%%i + 16*%%j + %%MIN_LEN] + mov [%%KS + 512*%%i + 16*%%j], DWORD(%%TMP1) +%assign %%j (%%j + 1) +%endrep +%assign %%i (%%i + 1) +%endrep +%endif ; %%KS_WORDS_TO_COPY +%%_ks_copied: + vzeroupper +%endmacro ; REMAINDER_16 + +;; +;; extern void asm_Eia3RemainderAVX512_16(uint32_t *T, const void **ks, +;; const void **data, uint16_t *len, +;; const uint64_t n_bits) +;; +;; @param [in] T: Array of digests for all 16 buffers +;; @param [in] KS : Array of pointers to key stream for all 16 buffers +;; @param [in] DATA : Array of pointers to data for all 16 buffers +;; @param [in] N_BITS : Number of common data bits to process +;; +align 64 +MKGLOBAL(ZUC128_REMAINDER_16,function,internal) +ZUC128_REMAINDER_16: + +%define T arg1 +%define KS arg2 +%define DATA arg3 +%define LEN arg4 + +%define N_BITS r10 + + endbranch64 + + mov N_BITS, arg5 + + FUNC_SAVE + + REMAINDER_16 T, KS, DATA, LEN, N_BITS, rax, rbx, r11, r12, r13, r14, r15, 128, 4 + + FUNC_RESTORE + + ret +;; +;; extern void asm_Eia3_256_RemainderAVX512_16(void *T, const void **ks, +;; const void **data, uint16_t *len, +;; const uint64_t n_bits, +;; const uint64_t tag_size) +;; +;; @param [in] T: Array of digests for all 16 buffers +;; @param [in] KS : Array of pointers to key stream for all 16 buffers +;; @param [in] DATA : Array of pointers to data for all 16 buffers +;; @param [in] N_BITS : Number data bits to process +;; @param [in] TAG_SIZE : Tag size (4, 8 or 16 bytes) +;; +align 64 +MKGLOBAL(ZUC256_REMAINDER_16,function,internal) +ZUC256_REMAINDER_16: + +%define T arg1 +%define KS arg2 +%define DATA arg3 +%define LEN arg4 + +%define N_BITS r10 + +%define TAG_SIZE arg6 + + endbranch64 + + mov N_BITS, arg5 + + cmp TAG_SIZE, 8 + je remainder_8B + jb remainder_4B + + ; Fall-through for 16-byte tag +remainder_16B: + FUNC_SAVE + + REMAINDER_16 T, KS, DATA, LEN, N_BITS, rax, rbx, r11, r12, r13, r14, r15, 256, 16 + + FUNC_RESTORE + + ret +remainder_8B: + FUNC_SAVE + + REMAINDER_16 T, KS, DATA, LEN, N_BITS, rax, rbx, r11, r12, r13, r14, r15, 256, 8 + + FUNC_RESTORE + + ret +remainder_4B: + FUNC_SAVE + + REMAINDER_16 T, KS, DATA, LEN, N_BITS, rax, rbx, r11, r12, r13, r14, r15, 256, 4 + + FUNC_RESTORE + + ret + +; Following functions only need AVX512 instructions (no VAES, GFNI, etc.) +%if USE_GFNI_VAES_VPCLMUL == 0 +;; +;; extern void asm_Eia3RemainderAVX512(uint32_t *T, const void *ks, +;; const void *data, uint64_t n_bits) +;; +;; Returns authentication update value to be XOR'ed with current authentication tag +;; +;; @param [in] T (digest pointer) +;; @param [in] KS (key stream pointer) +;; @param [in] DATA (data pointer) +;; @param [in] N_BITS (number data bits to process) +;; +align 64 +MKGLOBAL(asm_Eia3RemainderAVX512,function,internal) +asm_Eia3RemainderAVX512: +%ifdef LINUX + %define T rdi + %define KS rsi + %define DATA rdx + %define N_BITS rcx +%else + %define T rcx + %define KS rdx + %define DATA r8 + %define N_BITS r9 +%endif + +%define N_BYTES rbx +%define OFFSET r15 + + endbranch64 + + FUNC_SAVE + + vmovdqa xmm5, [rel bit_reverse_table_l] + vmovdqa xmm6, [rel bit_reverse_table_h] + vmovdqa xmm7, [rel bit_reverse_and_table] + vpxor xmm9, xmm9 + mov r12d, 0x55555555 + kmovd k2, r12d + + xor OFFSET, OFFSET +%assign I 0 +%rep 3 + cmp N_BITS, 128 + jb Eia3RoundsAVX512_dq_end + + ;; read 16 bytes and reverse bits + vmovdqu xmm0, [DATA + OFFSET] + vpand xmm1, xmm0, xmm7 + + vpandn xmm2, xmm7, xmm0 + vpsrld xmm2, 4 + + vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) + vpshufb xmm4, xmm5, xmm2 ; bit reverse high nibbles (use low table) + + vpor xmm8, xmm4 + ; xmm8 - bit reversed data bytes + + ;; ZUC authentication part + ;; - 4x32 data bits + ;; - set up KS +%if I != 0 + vmovdqa xmm11, xmm12 + vmovdqu xmm12, [KS + OFFSET + (4*4)] +%else + vmovdqu xmm11, [KS + (0*4)] + vmovdqu xmm12, [KS + (4*4)] +%endif + vpalignr xmm13, xmm12, xmm11, 8 + vpshufd xmm2, xmm11, 0x61 + vpshufd xmm3, xmm13, 0x61 + + ;; - set up DATA + vpshufd xmm0{k2}{z}, xmm8, 0x10 + vpshufd xmm1{k2}{z}, xmm8, 0x32 + + ;; - clmul + ;; - xor the results from 4 32-bit words together + vpclmulqdq xmm13, xmm0, xmm2, 0x00 + vpclmulqdq xmm14, xmm0, xmm2, 0x11 + vpclmulqdq xmm15, xmm1, xmm3, 0x00 + vpclmulqdq xmm8, xmm1, xmm3, 0x11 + + vpternlogq xmm13, xmm14, xmm8, 0x96 + vpternlogq xmm9, xmm13, xmm15, 0x96 + + add OFFSET, 16 + sub N_BITS, 128 +%assign I (I + 1) +%endrep +Eia3RoundsAVX512_dq_end: + + or N_BITS, N_BITS + jz Eia3RoundsAVX_end + + ; Get number of bytes + mov N_BYTES, N_BITS + add N_BYTES, 7 + shr N_BYTES, 3 + + lea r10, [rel byte64_len_to_mask_table] + kmovq k1, [r10 + N_BYTES*8] + + ;; Set up KS + vmovdqu xmm1, [KS + OFFSET] + vmovdqu xmm2, [KS + OFFSET + 16] + vpalignr xmm13, xmm2, xmm1, 8 + vpshufd xmm11, xmm1, 0x61 + vpshufd xmm12, xmm13, 0x61 + + ;; read up to 16 bytes of data, zero bits not needed if partial byte and bit-reverse + vmovdqu8 xmm0{k1}{z}, [DATA + OFFSET] + ; check if there is a partial byte (less than 8 bits in last byte) + mov rax, N_BITS + and rax, 0x7 + shl rax, 4 + lea r10, [rel bit_mask_table] + add r10, rax + + ; Get mask to clear last bits + vmovdqa xmm3, [r10] + + ; Shift left 16-N bytes to have the last byte always at the end of the XMM register + ; to apply mask, then restore by shifting right same amount of bytes + mov r10, 16 + sub r10, N_BYTES + XVPSLLB xmm0, r10, xmm4, r11 + vpandq xmm0, xmm3 + XVPSRLB xmm0, r10, xmm4, r11 + + ; Bit reverse input data + vpand xmm1, xmm0, xmm7 + + vpandn xmm2, xmm7, xmm0 + vpsrld xmm2, 4 + + vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) + vpshufb xmm3, xmm5, xmm2 ; bit reverse high nibbles (use low table) + + vpor xmm8, xmm3 + + ;; Set up DATA + vpshufd xmm0{k2}{z}, xmm8, 0x10 ; D 0-3 || Os || D 4-7 || 0s + vpshufd xmm1{k2}{z}, xmm8, 0x32 ; D 8-11 || 0s || D 12-15 || 0s + + ;; - clmul + ;; - xor the results from 4 32-bit words together + vpclmulqdq xmm13, xmm0, xmm11, 0x00 + vpclmulqdq xmm14, xmm0, xmm11, 0x11 + vpclmulqdq xmm15, xmm1, xmm12, 0x00 + vpclmulqdq xmm8, xmm1, xmm12, 0x11 + vpternlogq xmm9, xmm14, xmm13, 0x96 + vpternlogq xmm9, xmm15, xmm8, 0x96 + +Eia3RoundsAVX_end: + mov r11d, [T] + vmovq rax, xmm9 + shr rax, 32 + xor eax, r11d + + ; Read keyStr[N_BITS / 32] + lea r10, [N_BITS + OFFSET*8] ; Restore original N_BITS + shr r10, 5 + mov r11, [KS + r10*4] + + ; Rotate left by N_BITS % 32 + mov r12, rcx ; Save RCX + mov rcx, N_BITS + and rcx, 0x1F + rol r11, cl + mov rcx, r12 ; Restore RCX + + ; XOR with previous digest calculation + xor eax, r11d + + ; Read keyStr[L - 1] (last double word of keyStr) + lea r10, [N_BITS + OFFSET*8] ; Restore original N_BITS + add r10, (31 + 64) + shr r10, 5 ; L + dec r10 + mov r11d, [KS + r10 * 4] + + ; XOR with previous digest calculation and bswap it + xor eax, r11d + bswap eax + mov [T], eax + + FUNC_RESTORE + + ret + +;; +;;extern void asm_Eia3Round64BAVX512(uint32_t *T, const void *KS, const void *DATA) +;; +;; Updates authentication tag T based on keystream KS and DATA. +;; - it processes 64 bytes of DATA +;; - reads data in 16 byte chunks and bit reverses them +;; - reads and re-arranges KS +;; - employs clmul for the XOR & ROL part +;; +;; @param [in] T (digest pointer) +;; @param [in] KS (key stream pointer) +;; @param [in] DATA (data pointer) +;; +align 64 +MKGLOBAL(asm_Eia3Round64BAVX512,function,internal) +asm_Eia3Round64BAVX512: +%ifdef LINUX + %define T rdi + %define KS rsi + %define DATA rdx +%else + %define T rcx + %define KS rdx + %define DATA r8 +%endif + + endbranch64 + + FUNC_SAVE + + vmovdqa xmm5, [rel bit_reverse_table_l] + vmovdqa xmm6, [rel bit_reverse_table_h] + vmovdqa xmm7, [rel bit_reverse_and_table] + vpxor xmm9, xmm9 + + mov r12d, 0x55555555 + kmovd k1, r12d +%assign I 0 +%rep 4 + ;; read 16 bytes and reverse bits + vmovdqu xmm0, [DATA + 16*I] + vpand xmm1, xmm0, xmm7 + + vpandn xmm2, xmm7, xmm0 + vpsrld xmm2, 4 + + vpshufb xmm8, xmm6, xmm1 ; bit reverse low nibbles (use high table) + vpshufb xmm4, xmm5, xmm2 ; bit reverse high nibbles (use low table) + + vpor xmm8, xmm4 + ; xmm8 - bit reversed data bytes + + ;; ZUC authentication part + ;; - 4x32 data bits + ;; - set up KS +%if I != 0 + vmovdqa xmm11, xmm12 + vmovdqu xmm12, [KS + (I*16) + (4*4)] +%else + vmovdqu xmm11, [KS + (I*16) + (0*4)] + vmovdqu xmm12, [KS + (I*16) + (4*4)] +%endif + vpalignr xmm13, xmm12, xmm11, 8 + vpshufd xmm2, xmm11, 0x61 + vpshufd xmm3, xmm13, 0x61 + + ;; - set up DATA + vpshufd xmm0{k1}{z}, xmm8, 0x10 + vpshufd xmm1{k1}{z}, xmm8, 0x32 + + ;; - clmul + ;; - xor the results from 4 32-bit words together + vpclmulqdq xmm13, xmm0, xmm2, 0x00 + vpclmulqdq xmm14, xmm0, xmm2, 0x11 + vpclmulqdq xmm15, xmm1, xmm3, 0x00 + vpclmulqdq xmm8, xmm1, xmm3, 0x11 + + vpternlogq xmm13, xmm14, xmm8, 0x96 + vpternlogq xmm9, xmm13, xmm15, 0x96 + +%assign I (I + 1) +%endrep + + ;; - update T + vmovq rax, xmm9 + shr rax, 32 + mov r10d, [T] + xor eax, r10d + mov [T], eax + + FUNC_RESTORE + + ret + +%endif ; USE_GFNI_VAES_VPCLMUL == 0 + +;---------------------------------------------------------------------------------------- +;---------------------------------------------------------------------------------------- + +mksection stack-noexec diff --git a/lib/avx512_t2/README b/lib/avx512_t2/README new file mode 100644 index 0000000000000000000000000000000000000000..48aac0db7c7db6bef66d61c3c475807f03b355b3 --- /dev/null +++ b/lib/avx512_t2/README @@ -0,0 +1,3 @@ +AVX512 TYPE2: +- AVX512 TYPE1: AVX512F, AVX512VL, AVX512DQ, AVX512BW, AVX512CD, AESNI, PCLMULQDQ +- AVX512IFMA, AVX512VBMI, AVX512VBMI2, AVX512BITALG, AVX512VPOPCNTDQ, VAES, VPCLMULQDQ, GFNI, SHANI diff --git a/lib/avx512/aes128_gcm_by48_api_vaes_avx512.asm b/lib/avx512_t2/aes128_gcm_by48_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes128_gcm_by48_api_vaes_avx512.asm rename to lib/avx512_t2/aes128_gcm_by48_api_vaes_avx512.asm index dbf1da3c369e3847933cee27ba8c2387b4962dfb..90dbca2a355592b14b619f274983b29384c1d771 100644 --- a/lib/avx512/aes128_gcm_by48_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes128_gcm_by48_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "avx512/gcm_api_vaes_avx512.inc" +%include "avx512_t2/gcm_api_vaes_avx512.inc" diff --git a/lib/avx512/aes128_gcm_by48_sgl_api_vaes_avx512.asm b/lib/avx512_t2/aes128_gcm_by48_sgl_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes128_gcm_by48_sgl_api_vaes_avx512.asm rename to lib/avx512_t2/aes128_gcm_by48_sgl_api_vaes_avx512.asm index 6c29161388bc0f5a944a8e60544a357a751cb207..cd55ad0cea470a129c5910a11f87b68d0ae723c0 100644 --- a/lib/avx512/aes128_gcm_by48_sgl_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes128_gcm_by48_sgl_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "avx512/gcm_sgl_api_vaes_avx512.inc" +%include "avx512_t2/gcm_sgl_api_vaes_avx512.inc" diff --git a/lib/avx512/aes128_gmac_by48_api_vaes_avx512.asm b/lib/avx512_t2/aes128_gmac_by48_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes128_gmac_by48_api_vaes_avx512.asm rename to lib/avx512_t2/aes128_gmac_by48_api_vaes_avx512.asm index 6e050422cd798c42c4625995646fffbc1d89b387..7915728b86975c7dc4b3238c79ac11100edafd69 100644 --- a/lib/avx512/aes128_gmac_by48_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes128_gmac_by48_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "avx512/gcm_gmac_api_vaes_avx512.inc" +%include "avx512_t2/gcm_gmac_api_vaes_avx512.inc" diff --git a/lib/avx512/aes192_gcm_by48_api_vaes_avx512.asm b/lib/avx512_t2/aes192_gcm_by48_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes192_gcm_by48_api_vaes_avx512.asm rename to lib/avx512_t2/aes192_gcm_by48_api_vaes_avx512.asm index 8e7f4c6f1c37cc9ea9ff11298646255267c822d5..05693ed933ba933fb41b5db4f0af00141451838d 100644 --- a/lib/avx512/aes192_gcm_by48_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes192_gcm_by48_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "avx512/gcm_api_vaes_avx512.inc" +%include "avx512_t2/gcm_api_vaes_avx512.inc" diff --git a/lib/avx512/aes192_gcm_by48_sgl_api_vaes_avx512.asm b/lib/avx512_t2/aes192_gcm_by48_sgl_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes192_gcm_by48_sgl_api_vaes_avx512.asm rename to lib/avx512_t2/aes192_gcm_by48_sgl_api_vaes_avx512.asm index cff7c9a7b2ba3a1cab1bcd426a6d9bbbd8200554..f48c2aaa5986f2586842e417d2afdbaa141f1a18 100644 --- a/lib/avx512/aes192_gcm_by48_sgl_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes192_gcm_by48_sgl_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "avx512/gcm_sgl_api_vaes_avx512.inc" +%include "avx512_t2/gcm_sgl_api_vaes_avx512.inc" diff --git a/lib/avx512/aes192_gmac_by48_api_vaes_avx512.asm b/lib/avx512_t2/aes192_gmac_by48_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes192_gmac_by48_api_vaes_avx512.asm rename to lib/avx512_t2/aes192_gmac_by48_api_vaes_avx512.asm index 8b5bcea6ccedfa93ba702f6419017f0e25d9ecee..71d785fd0a9bb5a2804d250bf358b977a90b5cd5 100644 --- a/lib/avx512/aes192_gmac_by48_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes192_gmac_by48_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "avx512/gcm_gmac_api_vaes_avx512.inc" +%include "avx512_t2/gcm_gmac_api_vaes_avx512.inc" diff --git a/lib/avx512/aes256_gcm_by48_api_vaes_avx512.asm b/lib/avx512_t2/aes256_gcm_by48_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes256_gcm_by48_api_vaes_avx512.asm rename to lib/avx512_t2/aes256_gcm_by48_api_vaes_avx512.asm index 5661f5be9f52969cecf221560329609bf28a04d9..b302958cba4b6f490b651f9bdc4dce9efa256ec3 100644 --- a/lib/avx512/aes256_gcm_by48_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes256_gcm_by48_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "avx512/gcm_api_vaes_avx512.inc" +%include "avx512_t2/gcm_api_vaes_avx512.inc" diff --git a/lib/avx512/aes256_gcm_by48_sgl_api_vaes_avx512.asm b/lib/avx512_t2/aes256_gcm_by48_sgl_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes256_gcm_by48_sgl_api_vaes_avx512.asm rename to lib/avx512_t2/aes256_gcm_by48_sgl_api_vaes_avx512.asm index 1f49cfa37c240d2e530704e8e5e7c276af450818..bf705eb8f8d8f79d4cb1ebde31c5a34dc0bcb65d 100644 --- a/lib/avx512/aes256_gcm_by48_sgl_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes256_gcm_by48_sgl_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "avx512/gcm_sgl_api_vaes_avx512.inc" +%include "avx512_t2/gcm_sgl_api_vaes_avx512.inc" diff --git a/lib/avx512/aes256_gmac_by48_api_vaes_avx512.asm b/lib/avx512_t2/aes256_gmac_by48_api_vaes_avx512.asm similarity index 97% rename from lib/avx512/aes256_gmac_by48_api_vaes_avx512.asm rename to lib/avx512_t2/aes256_gmac_by48_api_vaes_avx512.asm index 43b7d8306eae288c40cbeae810ddc1b39a3170f1..bf648ffb2dd19138798b30bf47b16fe1516822c8 100644 --- a/lib/avx512/aes256_gmac_by48_api_vaes_avx512.asm +++ b/lib/avx512_t2/aes256_gmac_by48_api_vaes_avx512.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "avx512/gcm_gmac_api_vaes_avx512.inc" +%include "avx512_t2/gcm_gmac_api_vaes_avx512.inc" diff --git a/lib/avx512/aes_cbc_dec_by16_vaes_avx512.asm b/lib/avx512_t2/aes_cbc_dec_by16_vaes_avx512.asm similarity index 99% rename from lib/avx512/aes_cbc_dec_by16_vaes_avx512.asm rename to lib/avx512_t2/aes_cbc_dec_by16_vaes_avx512.asm index b81784f783ded03c9b0c8456aaa5557efe59c5c9..39ee837b2b512600e43c162f6719c513984a6f4b 100644 --- a/lib/avx512/aes_cbc_dec_by16_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cbc_dec_by16_vaes_avx512.asm @@ -29,7 +29,7 @@ %include "include/reg_sizes.asm" %include "include/aes_common.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + %define zIV zmm0 %define zBLK_0_3 zmm1 %define zBLK_4_7 zmm2 @@ -471,7 +471,6 @@ mksection .text ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbc_dec_128_vaes_avx512,function,internal) aes_cbc_dec_128_vaes_avx512: - endbranch64 %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif @@ -490,7 +489,6 @@ aes_cbc_dec_128_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbc_dec_192_vaes_avx512,function,internal) aes_cbc_dec_192_vaes_avx512: - endbranch64 %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif @@ -509,7 +507,6 @@ aes_cbc_dec_192_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbc_dec_256_vaes_avx512,function,internal) aes_cbc_dec_256_vaes_avx512: - endbranch64 %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif diff --git a/lib/avx512/aes_cbc_enc_vaes_avx512.asm b/lib/avx512_t2/aes_cbc_enc_vaes_avx512.asm similarity index 64% rename from lib/avx512/aes_cbc_enc_vaes_avx512.asm rename to lib/avx512_t2/aes_cbc_enc_vaes_avx512.asm index baad52d8fbeb21a232238593bb9248823ceed73a..c916a83dd21597da5c6d6bbf1dbd3e54616c905f 100644 --- a/lib/avx512/aes_cbc_enc_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cbc_enc_vaes_avx512.asm @@ -31,33 +31,39 @@ %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + struc STACK _gpr_save: resq 4 +_lane_masks: resw 16 endstruc %define GPR_SAVE_AREA rsp + _gpr_save +%define LANE_MASKS rsp + _lane_masks %ifdef LINUX %define arg1 rdi %define arg2 rsi -%define arg3 rcx -%define arg4 rdx +%define arg3 rdx +%define arg4 rcx +%define IA0 arg3 +%define IN arg4 %else %define arg1 rcx %define arg2 rdx -%define arg3 rdi -%define arg4 rsi +%define arg3 r8 +%define arg4 r9 +%define IA0 rsi +%define IN rdi %endif +%define IA1 rbx +%define IA2 rax +%define OUT rbp + +%define VALID_LANES k7 %define ARG arg1 %define LEN arg2 -%define IA0 rax -%define IA1 rbx -%define IA2 arg3 -%define IN arg4 -%define OUT rbp %define IN_L0 r8 %define IN_L1 r9 %define IN_L2 r10 @@ -108,6 +114,9 @@ endstruc %define MAC_TYPE_CBC 1 %define MAC_TYPE_XCBC 2 +%define SUBMIT 0 +%define FLUSH 1 + ;; Save registers states %macro FUNC_SAVE 0 sub rsp, STACK_size @@ -129,7 +138,6 @@ endstruc %endif mov r15, [GPR_SAVE_AREA + 8*3] add rsp, STACK_size - vzeroupper %endmacro ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -165,6 +173,104 @@ endstruc vshufi64x2 %%IN_OUT_3, %%ZTMP_2, %%ZTMP_3, 0xee %endmacro +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; LOAD_STORE_VAR_x4 - variable load/store of 0-4 blocks (16 bytes) for 4 lanes +; Number of blocks determined by masks in LANE_MASKS table on the stack +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro LOAD_STORE_VAR_x4 15 +%define %%LANE_A %1 ; [in] lane index to load/store (numerical) +%define %%LANE_B %2 ; [in] lane index to load/store (numerical) +%define %%LANE_C %3 ; [in] lane index to load/store (numerical) +%define %%LANE_D %4 ; [in] lane index to load/store (numerical) +%define %%DATA_PTR %5 ; [in] GP reg with ptr to lane input table +%define %%OFFSET %6 ; [in] GP reg input/output buffer offset +%define %%ZDATA0 %7 ; [in/out] ZMM reg to load/store data +%define %%ZDATA1 %8 ; [in/out] ZMM reg to load/store data +%define %%ZDATA2 %9 ; [in/out] ZMM reg to load/store data +%define %%ZDATA3 %10 ; [in/out] ZMM reg to load/store data +%define %%GP0 %11 ; [clobbered] tmp GP reg +%define %%GP1 %12 ; [clobbered] tmp GP reg +%define %%LOAD_STORE %13 ; [in] string value to select LOAD or STORE +%define %%M1 %14 ; [clobbered] mask reg +%define %%M2 %15 ; [clobbered] mask reg + + mov %%GP0, [%%DATA_PTR + 8*(%%LANE_A)] + mov %%GP1, [%%DATA_PTR + 8*(%%LANE_B)] + + kmovw %%M1, [LANE_MASKS + 2*(%%LANE_A)] + kmovw %%M2, [LANE_MASKS + 2*(%%LANE_B)] + +%ifidn %%LOAD_STORE, LOAD + vmovdqu32 %%ZDATA0{%%M1}{z}, [%%GP0 + %%OFFSET] + vmovdqu32 %%ZDATA1{%%M2}{z}, [%%GP1 + %%OFFSET] + + mov %%GP0, [%%DATA_PTR + 8*(%%LANE_C)] + mov %%GP1, [%%DATA_PTR + 8*(%%LANE_D)] + + kmovw %%M1, [LANE_MASKS + 2*(%%LANE_C)] + kmovw %%M2, [LANE_MASKS + 2*(%%LANE_D)] + + vmovdqu32 %%ZDATA2{%%M1}{z}, [%%GP0 + %%OFFSET] + vmovdqu32 %%ZDATA3{%%M2}{z}, [%%GP1 + %%OFFSET] +%else ; STORE + vmovdqu32 [%%GP0 + %%OFFSET]{%%M1}, %%ZDATA0 + vmovdqu32 [%%GP1 + %%OFFSET]{%%M2}, %%ZDATA1 + + mov %%GP0, [%%DATA_PTR + 8*(%%LANE_C)] + mov %%GP1, [%%DATA_PTR + 8*(%%LANE_D)] + + kmovw %%M1, [LANE_MASKS + 2*(%%LANE_C)] + kmovw %%M2, [LANE_MASKS + 2*(%%LANE_D)] + + vmovdqu32 [%%GP0 + %%OFFSET]{%%M1}, %%ZDATA2 + vmovdqu32 [%%GP1 + %%OFFSET]{%%M2}, %%ZDATA3 +%endif +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +; PRELOADED_LOAD_STORE_VAR_x4 +; - Variable size load/store of 0-4 blocks for 4 lanes +; - Input pointers are already loaded into GP registers +; - Number of blocks determined by masks in LANE_MASKS table on the stack +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro PRELOADED_LOAD_STORE_VAR_x4 18 +%define %%IN0 %1 ; [in] GP reg with lane input pointer +%define %%IN1 %2 ; [in] GP reg with lane input pointer +%define %%IN2 %3 ; [in] GP reg with lane input pointer +%define %%IN3 %4 ; [in] GP reg with lane input pointer +%define %%OFFSET %5 ; [in] GP reg input/output buffer offset +%define %%ZDATA0 %6 ; [in/out] ZMM reg to load/store data +%define %%ZDATA1 %7 ; [in/out] ZMM reg to load/store data +%define %%ZDATA2 %8 ; [in/out] ZMM reg to load/store data +%define %%ZDATA3 %9 ; [in/out] ZMM reg to load/store data +%define %%LOAD_STORE %10 ; [in] string value to select LOAD or STORE +%define %%LANE_A %11 ; [in] lane ID +%define %%LANE_B %12 ; [in] lane ID +%define %%LANE_C %13 ; [in] lane ID +%define %%LANE_D %14 ; [in] lane ID +%define %%M1 %15 ; [clobbered] mask reg +%define %%M2 %16 ; [clobbered] mask reg +%define %%M3 %17 ; [clobbered] mask reg +%define %%M4 %18 ; [clobbered] mask reg + + kmovw %%M1, [LANE_MASKS + 2*(%%LANE_A)] + kmovw %%M2, [LANE_MASKS + 2*(%%LANE_B)] + kmovw %%M3, [LANE_MASKS + 2*(%%LANE_C)] + kmovw %%M4, [LANE_MASKS + 2*(%%LANE_D)] + +%ifidn %%LOAD_STORE, LOAD + vmovdqu32 %%ZDATA0{%%M1}{z}, [%%IN0 + %%OFFSET] + vmovdqu32 %%ZDATA1{%%M2}{z}, [%%IN1 + %%OFFSET] + vmovdqu32 %%ZDATA2{%%M3}{z}, [%%IN2 + %%OFFSET] + vmovdqu32 %%ZDATA3{%%M4}{z}, [%%IN3 + %%OFFSET] +%else ; STORE + vmovdqu32 [%%IN0 + %%OFFSET]{%%M1}, %%ZDATA0 + vmovdqu32 [%%IN1 + %%OFFSET]{%%M2}, %%ZDATA1 + vmovdqu32 [%%IN2 + %%OFFSET]{%%M3}, %%ZDATA2 + vmovdqu32 [%%IN3 + %%OFFSET]{%%M4}, %%ZDATA3 +%endif +%endmacro + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; LOAD_STORE - loads/stores 1-4 blocks (16 bytes) for 4 lanes into ZMM registers ; - Loads 4 blocks by default @@ -192,43 +298,43 @@ endstruc %if %%NUM_ARGS <= 13 ;; %%MASK_REG not set, assume 4 block load/store %ifidn %%LOAD_STORE, LOAD - vmovdqu8 %%ZDATA0, [%%GP0 + %%OFFSET] - vmovdqu8 %%ZDATA1, [%%GP1 + %%OFFSET] + vmovdqu32 %%ZDATA0, [%%GP0 + %%OFFSET] + vmovdqu32 %%ZDATA1, [%%GP1 + %%OFFSET] mov %%GP0, [%%DATA_PTR + 8*(%%LANE_C)] mov %%GP1, [%%DATA_PTR + 8*(%%LANE_D)] - vmovdqu8 %%ZDATA2, [%%GP0 + %%OFFSET] - vmovdqu8 %%ZDATA3, [%%GP1 + %%OFFSET] + vmovdqu32 %%ZDATA2, [%%GP0 + %%OFFSET] + vmovdqu32 %%ZDATA3, [%%GP1 + %%OFFSET] %else ; STORE - vmovdqu8 [%%GP0 + %%OFFSET], %%ZDATA0 - vmovdqu8 [%%GP1 + %%OFFSET], %%ZDATA1 + vmovdqu32 [%%GP0 + %%OFFSET], %%ZDATA0 + vmovdqu32 [%%GP1 + %%OFFSET], %%ZDATA1 mov %%GP0, [%%DATA_PTR + 8*(%%LANE_C)] mov %%GP1, [%%DATA_PTR + 8*(%%LANE_D)] - vmovdqu8 [%%GP0 + %%OFFSET], %%ZDATA2 - vmovdqu8 [%%GP1 + %%OFFSET], %%ZDATA3 + vmovdqu32 [%%GP0 + %%OFFSET], %%ZDATA2 + vmovdqu32 [%%GP1 + %%OFFSET], %%ZDATA3 %endif %else ;; %%MASK_REG argument passed - 1, 2, or 3 block load/store %ifidn %%LOAD_STORE, LOAD - vmovdqu8 %%ZDATA0{%%MASK_REG}{z}, [%%GP0 + %%OFFSET] - vmovdqu8 %%ZDATA1{%%MASK_REG}{z}, [%%GP1 + %%OFFSET] + vmovdqu32 %%ZDATA0{%%MASK_REG}{z}, [%%GP0 + %%OFFSET] + vmovdqu32 %%ZDATA1{%%MASK_REG}{z}, [%%GP1 + %%OFFSET] mov %%GP0, [%%DATA_PTR + 8*(%%LANE_C)] mov %%GP1, [%%DATA_PTR + 8*(%%LANE_D)] - vmovdqu8 %%ZDATA2{%%MASK_REG}{z}, [%%GP0 + %%OFFSET] - vmovdqu8 %%ZDATA3{%%MASK_REG}{z}, [%%GP1 + %%OFFSET] + vmovdqu32 %%ZDATA2{%%MASK_REG}{z}, [%%GP0 + %%OFFSET] + vmovdqu32 %%ZDATA3{%%MASK_REG}{z}, [%%GP1 + %%OFFSET] %else ; STORE - vmovdqu8 [%%GP0 + %%OFFSET]{%%MASK_REG}, %%ZDATA0 - vmovdqu8 [%%GP1 + %%OFFSET]{%%MASK_REG}, %%ZDATA1 + vmovdqu32 [%%GP0 + %%OFFSET]{%%MASK_REG}, %%ZDATA0 + vmovdqu32 [%%GP1 + %%OFFSET]{%%MASK_REG}, %%ZDATA1 mov %%GP0, [%%DATA_PTR + 8*(%%LANE_C)] mov %%GP1, [%%DATA_PTR + 8*(%%LANE_D)] - vmovdqu8 [%%GP0 + %%OFFSET]{%%MASK_REG}, %%ZDATA2 - vmovdqu8 [%%GP1 + %%OFFSET]{%%MASK_REG}, %%ZDATA3 + vmovdqu32 [%%GP0 + %%OFFSET]{%%MASK_REG}, %%ZDATA2 + vmovdqu32 [%%GP1 + %%OFFSET]{%%MASK_REG}, %%ZDATA3 %endif %endif ;; %%NUM_ARGS %endmacro @@ -255,31 +361,102 @@ endstruc %if %%NUM_ARGS <= 10 ;; %%MASK_REG not set, assume 4 block load/store %ifidn %%LOAD_STORE, LOAD - vmovdqu8 %%ZDATA0, [%%IN0 + %%OFFSET] - vmovdqu8 %%ZDATA1, [%%IN1 + %%OFFSET] - vmovdqu8 %%ZDATA2, [%%IN2 + %%OFFSET] - vmovdqu8 %%ZDATA3, [%%IN3 + %%OFFSET] + vmovdqu32 %%ZDATA0, [%%IN0 + %%OFFSET] + vmovdqu32 %%ZDATA1, [%%IN1 + %%OFFSET] + vmovdqu32 %%ZDATA2, [%%IN2 + %%OFFSET] + vmovdqu32 %%ZDATA3, [%%IN3 + %%OFFSET] %else ; STORE - vmovdqu8 [%%IN0 + %%OFFSET], %%ZDATA0 - vmovdqu8 [%%IN1 + %%OFFSET], %%ZDATA1 - vmovdqu8 [%%IN2 + %%OFFSET], %%ZDATA2 - vmovdqu8 [%%IN3 + %%OFFSET], %%ZDATA3 + vmovdqu32 [%%IN0 + %%OFFSET], %%ZDATA0 + vmovdqu32 [%%IN1 + %%OFFSET], %%ZDATA1 + vmovdqu32 [%%IN2 + %%OFFSET], %%ZDATA2 + vmovdqu32 [%%IN3 + %%OFFSET], %%ZDATA3 %endif %else ;; %%MASK_REG argument passed - 1, 2, or 3 block load/store %ifidn %%LOAD_STORE, LOAD - vmovdqu8 %%ZDATA0{%%MASK_REG}{z}, [%%IN0 + %%OFFSET] - vmovdqu8 %%ZDATA1{%%MASK_REG}{z}, [%%IN1 + %%OFFSET] - vmovdqu8 %%ZDATA2{%%MASK_REG}{z}, [%%IN2 + %%OFFSET] - vmovdqu8 %%ZDATA3{%%MASK_REG}{z}, [%%IN3 + %%OFFSET] + vmovdqu32 %%ZDATA0{%%MASK_REG}{z}, [%%IN0 + %%OFFSET] + vmovdqu32 %%ZDATA1{%%MASK_REG}{z}, [%%IN1 + %%OFFSET] + vmovdqu32 %%ZDATA2{%%MASK_REG}{z}, [%%IN2 + %%OFFSET] + vmovdqu32 %%ZDATA3{%%MASK_REG}{z}, [%%IN3 + %%OFFSET] %else ; STORE - vmovdqu8 [%%IN0 + %%OFFSET]{%%MASK_REG}, %%ZDATA0 - vmovdqu8 [%%IN1 + %%OFFSET]{%%MASK_REG}, %%ZDATA1 - vmovdqu8 [%%IN2 + %%OFFSET]{%%MASK_REG}, %%ZDATA2 - vmovdqu8 [%%IN3 + %%OFFSET]{%%MASK_REG}, %%ZDATA3 + vmovdqu32 [%%IN0 + %%OFFSET]{%%MASK_REG}, %%ZDATA0 + vmovdqu32 [%%IN1 + %%OFFSET]{%%MASK_REG}, %%ZDATA1 + vmovdqu32 [%%IN2 + %%OFFSET]{%%MASK_REG}, %%ZDATA2 + vmovdqu32 [%%IN3 + %%OFFSET]{%%MASK_REG}, %%ZDATA3 %endif %endif ;; %%NUM_ARGS %endmacro +;; LOAD_STORE wrapper used to select load/store operation mode +%macro LOAD_STORE_4 14 +%define %%LANE_A %1 ; [in] lane index to load/store (numerical) +%define %%LANE_B %2 ; [in] lane index to load/store (numerical) +%define %%LANE_C %3 ; [in] lane index to load/store (numerical) +%define %%LANE_D %4 ; [in] lane index to load/store (numerical) +%define %%DATA_PTR %5 ; [in] GP reg with ptr to lane input table +%define %%OFFSET %6 ; [in] GP reg input/output buffer offset +%define %%ZDATA0 %7 ; [in/out] ZMM reg to load/store data +%define %%ZDATA1 %8 ; [in/out] ZMM reg to load/store data +%define %%ZDATA2 %9 ; [in/out] ZMM reg to load/store data +%define %%ZDATA3 %10 ; [in/out] ZMM reg to load/store data +%define %%GP0 %11 ; [clobbered] tmp GP reg +%define %%GP1 %12 ; [clobbered] tmp GP reg +%define %%LOAD_STORE %13 ; [in] string value to select LOAD or STORE +%define %%MODE %14 ; [in] load/store operation mode + +%ifidn %%MODE, SUBMIT_MODE + LOAD_STORE_x4 %%LANE_A, %%LANE_B, %%LANE_C, %%LANE_D, %%DATA_PTR, \ + %%OFFSET, %%ZDATA0, %%ZDATA1, %%ZDATA2, %%ZDATA3, \ + %%GP0, %%GP1, %%LOAD_STORE +%endif + +%ifidn %%MODE, SUBMIT_FINAL_MODE + LOAD_STORE_x4 %%LANE_A, %%LANE_B, %%LANE_C, %%LANE_D, %%DATA_PTR, \ + %%OFFSET, %%ZDATA0, %%ZDATA1, %%ZDATA2, %%ZDATA3, \ + %%GP0, %%GP1, %%LOAD_STORE, k1 +%endif +%ifidn %%MODE, FLUSH_MODE + LOAD_STORE_VAR_x4 %%LANE_A, %%LANE_B, %%LANE_C, %%LANE_D, %%DATA_PTR, \ + %%OFFSET, %%ZDATA0, %%ZDATA1, %%ZDATA2, %%ZDATA3, \ + %%GP0, %%GP1, %%LOAD_STORE, k1, k2 +%endif +%endmacro + +;; PRELOADED_LOAD_STORE wrapper used to select load/store operation mode +%macro PRELOADED_LOAD_STORE_4 11-15 +%define %%IN0 %1 ; [in] GP reg with lane input pointer +%define %%IN1 %2 ; [in] GP reg with lane input pointer +%define %%IN2 %3 ; [in] GP reg with lane input pointer +%define %%IN3 %4 ; [in] GP reg with lane input pointer +%define %%OFFSET %5 ; [in] GP reg input/output buffer offset +%define %%ZDATA0 %6 ; [in/out] ZMM reg to load/store data +%define %%ZDATA1 %7 ; [in/out] ZMM reg to load/store data +%define %%ZDATA2 %8 ; [in/out] ZMM reg to load/store data +%define %%ZDATA3 %9 ; [in/out] ZMM reg to load/store data +%define %%LOAD_STORE %10 ; [in] string value to select LOAD or STORE +%define %%MODE %11 ; [in] load/store operation mode +%define %%LANE_A %12 ; [in] lane ID +%define %%LANE_B %13 ; [in] lane ID +%define %%LANE_C %14 ; [in] lane ID +%define %%LANE_D %15 ; [in] lane ID + +%ifidn %%MODE, SUBMIT_MODE + PRELOADED_LOAD_STORE_x4 %%IN0, %%IN1, %%IN2, %%IN3, %%OFFSET, \ + %%ZDATA0, %%ZDATA1, %%ZDATA2, %%ZDATA3, \ + %%LOAD_STORE +%endif +%ifidn %%MODE, SUBMIT_FINAL_MODE + PRELOADED_LOAD_STORE_x4 %%IN0, %%IN1, %%IN2, %%IN3, %%OFFSET, \ + %%ZDATA0, %%ZDATA1, %%ZDATA2, %%ZDATA3, \ + %%LOAD_STORE, k1 +%endif +%ifidn %%MODE, FLUSH_MODE + PRELOADED_LOAD_STORE_VAR_x4 %%IN0, %%IN1, %%IN2, %%IN3, %%OFFSET, \ + %%ZDATA0, %%ZDATA1, %%ZDATA2, %%ZDATA3, \ + %%LOAD_STORE, %%LANE_A, %%LANE_B, \ + %%LANE_C, %%LANE_D, k1, k2, k3, k4 +%endif +%endmacro + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; AESENC_ROUNDS_x16 macro ; - 16 lanes, 1 block per lane @@ -335,7 +512,7 @@ endstruc ; - each loop encrypts 4 blocks across 16 lanes ; - stop when %%LENGTH is less than 64 bytes (4 blocks) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%macro ENCRYPT_16_PARALLEL 30 +%macro ENCRYPT_16_PARALLEL 31 %define %%ZIV00_03 %1 ;; [in] lane 0-3 IVs %define %%ZIV04_07 %2 ;; [in] lane 4-7 IVs %define %%ZIV08_11 %3 ;; [in] lane 8-11 IVs @@ -365,12 +542,13 @@ endstruc %define %%ZTMP3 %27 ;; [clobbered] tmp ZMM register %define %%TMP0 %28 ;; [clobbered] tmp GP register %define %%TMP1 %29 ;; [clobbered] tmp GP register -%define %%MAC_TYPE %30 ;; MAC_TYPE_NONE/CBC/XCBC flag +%define %%MAC_TYPE %30 ;; [in] MAC_TYPE_NONE/CBC/XCBC flag +%define %%SUBMIT_FLUSH %31 ;; [in] SUBMIT/FLUSH flag %if %%MAC_TYPE == MAC_TYPE_XCBC %define %%KP ARG + _aes_xcbc_args_key_tab %else -%define %%KP ARG + _aesarg_key_tab +%define %%KP ARG + _aes_args_key_tab %endif %define %%K00_03_OFFSET 0 %define %%K04_07_OFFSET 64 @@ -381,6 +559,17 @@ endstruc cmp %%LENGTH, 64 jl %%encrypt_16_done +%if %%SUBMIT_FLUSH == FLUSH +%define %%MODE FLUSH_MODE + + ;; update lane mask table + mov WORD(%%TMP0), 0xffff + vpbroadcastw YWORD(%%ZTMP0){VALID_LANES}{z}, WORD(%%TMP0) + vmovdqu16 [LANE_MASKS], YWORD(%%ZTMP0) + +%else +%define %%MODE SUBMIT_MODE +%endif xor %%IDX, %%IDX ;; skip length check on first loop jmp %%encrypt_16_first @@ -390,32 +579,33 @@ endstruc jl %%encrypt_16_end %%encrypt_16_first: + ;; load 4 plaintext blocks for lanes 0-3 - PRELOADED_LOAD_STORE_x4 IN_L0, IN_L1, IN_L2, IN_L3, %%IDX, \ - %%B0L00_03, %%B1L00_03, %%B2L00_03, \ - %%B3L00_03, LOAD + PRELOADED_LOAD_STORE_4 IN_L0, IN_L1, IN_L2, IN_L3, %%IDX, \ + %%B0L00_03, %%B1L00_03, %%B2L00_03, \ + %%B3L00_03, LOAD, %%MODE, 0, 1, 2, 3 TRANSPOSE_4x4 %%B0L00_03, %%B1L00_03, %%B2L00_03, %%B3L00_03, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 ;; load 4 plaintext blocks for lanes 4-7 - LOAD_STORE_x4 4, 5, 6, 7, IN, %%IDX, %%B0L04_07, %%B1L04_07, \ - %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, LOAD + LOAD_STORE_4 4, 5, 6, 7, IN, %%IDX, %%B0L04_07, %%B1L04_07, \ + %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, LOAD, %%MODE TRANSPOSE_4x4 %%B0L04_07, %%B1L04_07, %%B2L04_07, %%B3L04_07, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 ;; load 4 plaintext blocks for lanes 8-11 - PRELOADED_LOAD_STORE_x4 IN_L8, IN_L9, IN_L10, IN_L11, %%IDX, \ + PRELOADED_LOAD_STORE_4 IN_L8, IN_L9, IN_L10, IN_L11, %%IDX, \ %%B0L08_11, %%B1L08_11, %%B2L08_11, \ - %%B3L08_11, LOAD + %%B3L08_11, LOAD, %%MODE, 8, 9, 10, 11 TRANSPOSE_4x4 %%B0L08_11, %%B1L08_11, %%B2L08_11, %%B3L08_11, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 ;; load 4 plaintext blocks for lanes 12-15 - LOAD_STORE_x4 12, 13, 14, 15, IN, %%IDX, %%B0L12_15, %%B1L12_15, \ - %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, LOAD + LOAD_STORE_4 12, 13, 14, 15, IN, %%IDX, %%B0L12_15, %%B1L12_15, \ + %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, LOAD, %%MODE TRANSPOSE_4x4 %%B0L12_15, %%B1L12_15, %%B2L12_15, %%B3L12_15, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 @@ -468,29 +658,29 @@ endstruc TRANSPOSE_4x4 %%B0L00_03, %%B1L00_03, %%B2L00_03, %%B3L00_03, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 0, 1, 2, 3, OUT, %%IDX, %%B0L00_03, %%B1L00_03, \ - %%B2L00_03, %%B3L00_03, %%TMP0, %%TMP1, STORE + LOAD_STORE_4 0, 1, 2, 3, OUT, %%IDX, %%B0L00_03, %%B1L00_03, \ + %%B2L00_03, %%B3L00_03, %%TMP0, %%TMP1, STORE, %%MODE ;; write back cipher text for lanes 4-7 TRANSPOSE_4x4 %%B0L04_07, %%B1L04_07, %%B2L04_07, %%B3L04_07, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 4, 5, 6, 7, OUT, %%IDX, %%B0L04_07, %%B1L04_07, \ - %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, STORE + LOAD_STORE_4 4, 5, 6, 7, OUT, %%IDX, %%B0L04_07, %%B1L04_07, \ + %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, STORE, %%MODE ;; write back cipher text for lanes 8-11 TRANSPOSE_4x4 %%B0L08_11, %%B1L08_11, %%B2L08_11, %%B3L08_11, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 8, 9, 10, 11, OUT, %%IDX, %%B0L08_11, %%B1L08_11, \ - %%B2L08_11, %%B3L08_11, %%TMP0, %%TMP1, STORE + LOAD_STORE_4 8, 9, 10, 11, OUT, %%IDX, %%B0L08_11, %%B1L08_11, \ + %%B2L08_11, %%B3L08_11, %%TMP0, %%TMP1, STORE, %%MODE ;; write back cipher text for lanes 12-15 TRANSPOSE_4x4 %%B0L12_15, %%B1L12_15, %%B2L12_15, %%B3L12_15, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 12, 13, 14, 15, OUT, %%IDX, %%B0L12_15, %%B1L12_15, \ - %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, STORE + LOAD_STORE_4 12, 13, 14, 15, OUT, %%IDX, %%B0L12_15, %%B1L12_15, \ + %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, STORE, %%MODE %endif ;; MAC_TYPE sub %%LENGTH, 64 @@ -522,10 +712,11 @@ endstruc %%encrypt_16_done: %endmacro + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; ENCRYPT_16_FINAL Encodes final blocks (less than 4) across 16 lanes ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%macro ENCRYPT_16_FINAL 30 +%macro ENCRYPT_16_FINAL 31 %define %%ZIV00_03 %1 ;; [in] lane 0-3 IVs %define %%ZIV04_07 %2 ;; [in] lane 4-7 IVs %define %%ZIV08_11 %3 ;; [in] lane 8-11 IVs @@ -556,11 +747,13 @@ endstruc %define %%TMP1 %28 ;; [clobbered] tmp GP register %define %%NUM_BLKS %29 ;; [in] number of blocks (numerical value) %define %%MAC_TYPE %30 ;; MAC_TYPE_NONE/CBC/XCBC flag +%define %%SUBMIT_FLUSH %31 ;; SUBMIT/FLUSH flag + %if %%MAC_TYPE == MAC_TYPE_XCBC -%define %%KP ARG + _aesxcbcarg_key_tab +%define %%KP ARG + _aes_xcbc_args_key_tab %else -%define %%KP ARG + _aesarg_key_tab +%define %%KP ARG + _aes_args_key_tab %endif %define %%K00_03_OFFSET 0 %define %%K04_07_OFFSET 64 @@ -568,43 +761,53 @@ endstruc %define %%K12_15_OFFSET 192 %if %%NUM_BLKS == 1 - mov %%TMP0, 0x0000_0000_0000_ffff - kmovq k1, %%TMP0 + mov DWORD(%%TMP0), 0xf %elif %%NUM_BLKS == 2 - mov %%TMP0, 0x0000_0000_ffff_ffff - kmovq k1, %%TMP0 + mov DWORD(%%TMP0), 0xff %elif %%NUM_BLKS == 3 - mov %%TMP0, 0x0000_ffff_ffff_ffff - kmovq k1, %%TMP0 + mov DWORD(%%TMP0), 0xfff +%endif + +%if %%SUBMIT_FLUSH == FLUSH +%define %%MODE FLUSH_MODE + + ;; update lane mask table + vpbroadcastw YWORD(%%ZTMP0){VALID_LANES}{z}, WORD(%%TMP0) + vmovdqu16 [LANE_MASKS], YWORD(%%ZTMP0) + +%else +%define %%MODE SUBMIT_FINAL_MODE + kmovw k1, DWORD(%%TMP0) + %endif xor %%IDX, %%IDX ;; load 4 plaintext blocks for lanes 0-3 - PRELOADED_LOAD_STORE_x4 IN_L0, IN_L1, IN_L2, IN_L3, %%IDX, \ + PRELOADED_LOAD_STORE_4 IN_L0, IN_L1, IN_L2, IN_L3, %%IDX, \ %%B0L00_03, %%B1L00_03, %%B2L00_03, \ - %%B3L00_03, LOAD, k1 + %%B3L00_03, LOAD, %%MODE, 0, 1, 2, 3 TRANSPOSE_4x4 %%B0L00_03, %%B1L00_03, %%B2L00_03, %%B3L00_03, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 ;; load 4 plaintext blocks for lanes 4-7 - LOAD_STORE_x4 4, 5, 6, 7, IN, %%IDX, %%B0L04_07, %%B1L04_07, \ - %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, LOAD, k1 + LOAD_STORE_4 4, 5, 6, 7, IN, %%IDX, %%B0L04_07, %%B1L04_07, \ + %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, LOAD, %%MODE TRANSPOSE_4x4 %%B0L04_07, %%B1L04_07, %%B2L04_07, %%B3L04_07, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 ;; load 4 plaintext blocks for lanes 8-11 - PRELOADED_LOAD_STORE_x4 IN_L8, IN_L9, IN_L10, IN_L11, %%IDX, \ + PRELOADED_LOAD_STORE_4 IN_L8, IN_L9, IN_L10, IN_L11, %%IDX, \ %%B0L08_11, %%B1L08_11, %%B2L08_11, \ - %%B3L08_11, LOAD, k1 + %%B3L08_11, LOAD, %%MODE, 8, 9, 10, 11 TRANSPOSE_4x4 %%B0L08_11, %%B1L08_11, %%B2L08_11, %%B3L08_11, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 ;; load 4 plaintext blocks for lanes 12-15 - LOAD_STORE_x4 12, 13, 14, 15, IN, %%IDX, %%B0L12_15, %%B1L12_15, \ - %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, LOAD, k1 + LOAD_STORE_4 12, 13, 14, 15, IN, %%IDX, %%B0L12_15, %%B1L12_15, \ + %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, LOAD, %%MODE TRANSPOSE_4x4 %%B0L12_15, %%B1L12_15, %%B2L12_15, %%B3L12_15, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 @@ -667,29 +870,29 @@ endstruc TRANSPOSE_4x4 %%B0L00_03, %%B1L00_03, %%B2L00_03, %%B3L00_03, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 0, 1, 2, 3, OUT, %%IDX, %%B0L00_03, %%B1L00_03, \ - %%B2L00_03, %%B3L00_03, %%TMP0, %%TMP1, STORE, k1 + LOAD_STORE_4 0, 1, 2, 3, OUT, %%IDX, %%B0L00_03, %%B1L00_03, \ + %%B2L00_03, %%B3L00_03, %%TMP0, %%TMP1, STORE, %%MODE ;; write back cipher text for lanes 4-7 TRANSPOSE_4x4 %%B0L04_07, %%B1L04_07, %%B2L04_07, %%B3L04_07, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 4, 5, 6, 7, OUT, %%IDX, %%B0L04_07, %%B1L04_07, \ - %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, STORE, k1 + LOAD_STORE_4 4, 5, 6, 7, OUT, %%IDX, %%B0L04_07, %%B1L04_07, \ + %%B2L04_07, %%B3L04_07, %%TMP0, %%TMP1, STORE, %%MODE ;; write back cipher text for lanes 8-11 TRANSPOSE_4x4 %%B0L08_11, %%B1L08_11, %%B2L08_11, %%B3L08_11, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 8, 9, 10, 11, OUT, %%IDX, %%B0L08_11, %%B1L08_11, \ - %%B2L08_11, %%B3L08_11, %%TMP0, %%TMP1, STORE, k1 + LOAD_STORE_4 8, 9, 10, 11, OUT, %%IDX, %%B0L08_11, %%B1L08_11, \ + %%B2L08_11, %%B3L08_11, %%TMP0, %%TMP1, STORE, %%MODE ;; write back cipher text for lanes 12-15 TRANSPOSE_4x4 %%B0L12_15, %%B1L12_15, %%B2L12_15, %%B3L12_15, \ %%ZTMP0, %%ZTMP1, %%ZTMP2, %%ZTMP3 - LOAD_STORE_x4 12, 13, 14, 15, OUT, %%IDX, %%B0L12_15, %%B1L12_15, \ - %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, STORE, k1 + LOAD_STORE_4 12, 13, 14, 15, OUT, %%IDX, %%B0L12_15, %%B1L12_15, \ + %%B2L12_15, %%B3L12_15, %%TMP0, %%TMP1, STORE, %%MODE %endif ;; !CBC_MAC ;; update in/out pointers @@ -716,9 +919,10 @@ endstruc ; First encrypts block up to multiple of 4 ; Then encrypts final blocks (less than 4) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%macro CBC_ENC 2 +%macro CBC_ENC 3 %define %%ROUNDS %1 %define %%MAC_TYPE %2 +%define %%SUBMIT_FLUSH %3 %define %%K00_03_OFFSET 0 %define %%K04_07_OFFSET 64 @@ -729,10 +933,15 @@ endstruc %define %%IV ARG + _aes_xcbc_args_ICV %define %%IN ARG + _aes_xcbc_args_in %else -%define %%KP ARG + _aesarg_key_tab -%define %%IV ARG + _aesarg_IV -%define %%IN ARG + _aesarg_in -%define %%OUT ARG + _aesarg_out +%define %%KP ARG + _aes_args_key_tab +%define %%IV ARG + _aes_args_IV +%define %%IN ARG + _aes_args_in +%define %%OUT ARG + _aes_args_out +%endif + +;; check if flush +%if %%SUBMIT_FLUSH == FLUSH + kmovw VALID_LANES, DWORD(arg3) ;; store valid lane mask %endif ;; load transpose tables @@ -770,7 +979,8 @@ endstruc ENCRYPT_16_PARALLEL ZIV00_03, ZIV04_07, ZIV08_11, ZIV12_15, \ LEN, %%ROUNDS, IA0, ZT0, ZT1, ZT2, ZT3, ZT4, ZT5, \ ZT6, ZT7, ZT8, ZT9, ZT10, ZT11, ZT12, ZT13, ZT14, \ - ZT15, ZT16, ZT17, ZT18, ZT19, IA1, IA2, %%MAC_TYPE + ZT15, ZT16, ZT17, ZT18, ZT19, IA1, IA2, \ + %%MAC_TYPE, %%SUBMIT_FLUSH ;; get num remaining blocks shr LEN, 4 @@ -785,19 +995,19 @@ endstruc ENCRYPT_16_FINAL ZIV00_03, ZIV04_07, ZIV08_11, ZIV12_15, \ %%ROUNDS, IA0, ZT0, ZT1, ZT2, ZT3, ZT4, ZT5, ZT6, ZT7, \ ZT8, ZT9, ZT10, ZT11, ZT12, ZT13, ZT14, ZT15, ZT16, ZT17, \ - ZT18, ZT19, IA1, IA2, 3, %%MAC_TYPE - jmp %%_cbc_enc_done + ZT18, ZT19, IA1, IA2, 3, %%MAC_TYPE, %%SUBMIT_FLUSH + jmp %%_cbc_enc_done %%_final_blocks_1: ENCRYPT_16_FINAL ZIV00_03, ZIV04_07, ZIV08_11, ZIV12_15, \ %%ROUNDS, IA0, ZT0, ZT1, ZT2, ZT3, ZT4, ZT5, ZT6, ZT7, \ ZT8, ZT9, ZT10, ZT11, ZT12, ZT13, ZT14, ZT15, ZT16, ZT17, \ - ZT18, ZT19, IA1, IA2, 1, %%MAC_TYPE - jmp %%_cbc_enc_done + ZT18, ZT19, IA1, IA2, 1, %%MAC_TYPE, %%SUBMIT_FLUSH + jmp %%_cbc_enc_done %%_final_blocks_2: ENCRYPT_16_FINAL ZIV00_03, ZIV04_07, ZIV08_11, ZIV12_15, \ %%ROUNDS, IA0, ZT0, ZT1, ZT2, ZT3, ZT4, ZT5, ZT6, ZT7, \ ZT8, ZT9, ZT10, ZT11, ZT12, ZT13, ZT14, ZT15, ZT16, ZT17, \ - ZT18, ZT19, IA1, IA2, 2, %%MAC_TYPE + ZT18, ZT19, IA1, IA2, 2, %%MAC_TYPE, %%SUBMIT_FLUSH %%_cbc_enc_done: ;; store IV's per lane vmovdqa64 [%%IV + 16*0], ZIV00_03 @@ -827,17 +1037,9 @@ mksection .text ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbc_enc_128_vaes_avx512,function,internal) aes_cbc_enc_128_vaes_avx512: - endbranch64 FUNC_SAVE - CBC_ENC 9, MAC_TYPE_NONE + CBC_ENC 9, MAC_TYPE_NONE, SUBMIT FUNC_RESTORE - -%ifdef SAFE_DATA - clear_all_zmms_asm -%else - vzeroupper -%endif ;; SAFE_DATA - ret ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -845,26 +1047,18 @@ aes_cbc_enc_128_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbc_enc_192_vaes_avx512,function,internal) aes_cbc_enc_192_vaes_avx512: - endbranch64 FUNC_SAVE - CBC_ENC 11, MAC_TYPE_NONE + CBC_ENC 11, MAC_TYPE_NONE, SUBMIT FUNC_RESTORE - -%ifdef SAFE_DATA - clear_all_zmms_asm -%else - vzeroupper -%endif ;; SAFE_DATA - ret + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; void aes_cbc_enc_256_vaes_avx512(AES_ARGS *args, uint64_t len_in_bytes); ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbc_enc_256_vaes_avx512,function,internal) aes_cbc_enc_256_vaes_avx512: - endbranch64 FUNC_SAVE - CBC_ENC 13, MAC_TYPE_NONE + CBC_ENC 13, MAC_TYPE_NONE, SUBMIT FUNC_RESTORE ret @@ -873,17 +1067,9 @@ aes_cbc_enc_256_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes128_cbc_mac_vaes_avx512,function,internal) aes128_cbc_mac_vaes_avx512: - endbranch64 FUNC_SAVE - CBC_ENC 9, MAC_TYPE_CBC + CBC_ENC 9, MAC_TYPE_CBC, SUBMIT FUNC_RESTORE - -%ifdef SAFE_DATA - clear_all_zmms_asm -%else - vzeroupper -%endif ;; SAFE_DATA - ret ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -891,17 +1077,9 @@ aes128_cbc_mac_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes256_cbc_mac_vaes_avx512,function,internal) aes256_cbc_mac_vaes_avx512: - endbranch64 FUNC_SAVE - CBC_ENC 13, MAC_TYPE_CBC + CBC_ENC 13, MAC_TYPE_CBC, SUBMIT FUNC_RESTORE - -%ifdef SAFE_DATA - clear_all_zmms_asm -%else - vzeroupper -%endif ;; SAFE_DATA - ret ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -909,17 +1087,81 @@ aes256_cbc_mac_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_xcbc_mac_128_vaes_avx512,function,internal) aes_xcbc_mac_128_vaes_avx512: - endbranch64 FUNC_SAVE - CBC_ENC 9, MAC_TYPE_XCBC + CBC_ENC 9, MAC_TYPE_XCBC, SUBMIT FUNC_RESTORE + ret -%ifdef SAFE_DATA - clear_all_zmms_asm -%else - vzeroupper -%endif ;; SAFE_DATA +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void aes_cbc_enc_128_flush_vaes_avx512(AES_ARGS *args, +;; uint64_t len_in_bytes, +;; uint16_t valid_lane_mask); +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(aes_cbc_enc_128_flush_vaes_avx512,function,internal) +aes_cbc_enc_128_flush_vaes_avx512: + FUNC_SAVE + CBC_ENC 9, MAC_TYPE_NONE, FLUSH + FUNC_RESTORE + ret + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void aes_cbc_enc_192_flush_vaes_avx512(AES_ARGS *args, +;; uint64_t len_in_bytes, +;; uint16_t valid_lane_mask); +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(aes_cbc_enc_192_flush_vaes_avx512,function,internal) +aes_cbc_enc_192_flush_vaes_avx512: + FUNC_SAVE + CBC_ENC 11, MAC_TYPE_NONE, FLUSH + FUNC_RESTORE + ret +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void aes_cbc_enc_256_flush_vaes_avx512(AES_ARGS *args, +;; uint64_t len_in_bytes, +;; uint16_t valid_lane_mask); +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(aes_cbc_enc_256_flush_vaes_avx512,function,internal) +aes_cbc_enc_256_flush_vaes_avx512: + FUNC_SAVE + CBC_ENC 13, MAC_TYPE_NONE, FLUSH + FUNC_RESTORE + ret + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void aes128_cbc_mac_flush_vaes_avx512(AES_ARGS *args, +;; uint64_t len_in_bytes, +;; uint16_t valid_lane_mask); +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(aes128_cbc_mac_flush_vaes_avx512,function,internal) +aes128_cbc_mac_flush_vaes_avx512: + FUNC_SAVE + CBC_ENC 9, MAC_TYPE_CBC, FLUSH + FUNC_RESTORE + ret + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void aes256_cbc_mac_flush_vaes_avx512(AES_ARGS *args, +;; uint64_t len_in_bytes, +;; uint16_t valid_lane_mask); +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(aes256_cbc_mac_flush_vaes_avx512,function,internal) +aes256_cbc_mac_flush_vaes_avx512: + FUNC_SAVE + CBC_ENC 13, MAC_TYPE_CBC, FLUSH + FUNC_RESTORE + ret + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void aes_xcbc_mac_128_flush_vaes_avx512(AES_ARGS *args, +;; uint64_t len_in_bytes, +;; uint16_t valid_lane_mask); +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(aes_xcbc_mac_128_flush_vaes_avx512,function,internal) +aes_xcbc_mac_128_flush_vaes_avx512: + FUNC_SAVE + CBC_ENC 9, MAC_TYPE_XCBC, FLUSH + FUNC_RESTORE ret mksection stack-noexec diff --git a/lib/avx512/aes_cbcs_dec_by16_vaes_avx512.asm b/lib/avx512_t2/aes_cbcs_dec_by16_vaes_avx512.asm similarity index 96% rename from lib/avx512/aes_cbcs_dec_by16_vaes_avx512.asm rename to lib/avx512_t2/aes_cbcs_dec_by16_vaes_avx512.asm index efea7a85d21b210bb7daae6eb3283a08d6c9d6ad..2f5adea422e39b8252de9cbf130a7e2fe9e9b3fb 100644 --- a/lib/avx512/aes_cbcs_dec_by16_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cbcs_dec_by16_vaes_avx512.asm @@ -26,8 +26,8 @@ ;; %define CBCS -%include "avx512/aes_cbc_dec_by16_vaes_avx512.asm" -%include "include/cet.inc" +%include "avx512_t2/aes_cbc_dec_by16_vaes_avx512.asm" + %define len rax mksection .text @@ -37,7 +37,6 @@ mksection .text ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbcs_1_9_dec_128_vaes_avx512,function,internal) aes_cbcs_1_9_dec_128_vaes_avx512: - endbranch64 %ifndef LINUX mov len, [rsp + 8*5] %else diff --git a/lib/avx512/aes_cbcs_enc_vaes_avx512.asm b/lib/avx512_t2/aes_cbcs_enc_vaes_avx512.asm similarity index 98% rename from lib/avx512/aes_cbcs_enc_vaes_avx512.asm rename to lib/avx512_t2/aes_cbcs_enc_vaes_avx512.asm index 82015d45fa92c27c4b1c97fbddd46b76ddcd4cf5..83a02d694bc75c38ea7eff7947f1703073a94061 100644 --- a/lib/avx512/aes_cbcs_enc_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cbcs_enc_vaes_avx512.asm @@ -31,10 +31,6 @@ %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" -struc STACK -_gpr_save: resq 1 -endstruc %define GPR_SAVE_AREA rsp + _gpr_save @@ -104,16 +100,13 @@ endstruc ;; Save registers states %macro FUNC_SAVE 0 - sub rsp, STACK_size - mov [GPR_SAVE_AREA + 8*0], rbp + push rbp %endmacro ;; Restore register states %macro FUNC_RESTORE 0 ;; XMMs are saved at a higher level - mov rbp, [GPR_SAVE_AREA + 8*0] - add rsp, STACK_size - vzeroupper + pop rbp %endmacro %macro LOAD_STORE_4x1 10 @@ -454,15 +447,9 @@ mksection .text ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cbcs_1_9_enc_128_vaes_avx512,function,internal) aes_cbcs_1_9_enc_128_vaes_avx512: - endbranch64 FUNC_SAVE CBCS_ENC 9, 160 FUNC_RESTORE - -%ifdef SAFE_DATA - clear_all_zmms_asm -%endif ;; SAFE_DATA - ret mksection stack-noexec diff --git a/lib/avx512/aes_cntr_api_by16_vaes_avx512.asm b/lib/avx512_t2/aes_cntr_api_by16_vaes_avx512.asm similarity index 96% rename from lib/avx512/aes_cntr_api_by16_vaes_avx512.asm rename to lib/avx512_t2/aes_cntr_api_by16_vaes_avx512.asm index f00625cbb5bc2d9ca81b82d130219a1c3303ef35..62df455b3b18f623a9a28be1973d4e2cbcc2ba1b 100644 --- a/lib/avx512/aes_cntr_api_by16_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cntr_api_by16_vaes_avx512.asm @@ -27,14 +27,13 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/aes_cntr_by16_vaes_avx512.inc" +%include "avx512_t2/aes_cntr_by16_vaes_avx512.inc" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;void aes_cntr_128_submit_vaes_avx512 (IMB_JOB *job) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_128_submit_vaes_avx512,function,internal) aes_cntr_128_submit_vaes_avx512: - endbranch64 FUNC_SAVE CNTR ;; arg1 - [in] job ;; arg2 - [in] NROUNDS @@ -49,7 +48,6 @@ aes_cntr_128_submit_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_192_submit_vaes_avx512,function,internal) aes_cntr_192_submit_vaes_avx512: - endbranch64 FUNC_SAVE CNTR ;; arg1 - [in] job ;; arg2 - [in] NROUNDS @@ -64,7 +62,6 @@ aes_cntr_192_submit_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_256_submit_vaes_avx512,function,internal) aes_cntr_256_submit_vaes_avx512: - endbranch64 FUNC_SAVE CNTR ;; arg1 - [in] job ;; arg2 - [in] NROUNDS diff --git a/lib/avx512/aes_cntr_bit_api_by16_vaes_avx512.asm b/lib/avx512_t2/aes_cntr_bit_api_by16_vaes_avx512.asm similarity index 96% rename from lib/avx512/aes_cntr_bit_api_by16_vaes_avx512.asm rename to lib/avx512_t2/aes_cntr_bit_api_by16_vaes_avx512.asm index a7b4770663533f7cb2457a2000aa00fa66b53de8..431a8119d094e62533ba171e216e2affe562c52d 100644 --- a/lib/avx512/aes_cntr_bit_api_by16_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cntr_bit_api_by16_vaes_avx512.asm @@ -27,14 +27,13 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/aes_cntr_by16_vaes_avx512.inc" +%include "avx512_t2/aes_cntr_by16_vaes_avx512.inc" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;void aes_cntr_bit_128_submit_vaes_avx512 (IMB_JOB *job) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_bit_128_submit_vaes_avx512,function,internal) aes_cntr_bit_128_submit_vaes_avx512: - endbranch64 FUNC_SAVE CNTR_BIT ;; arg1 - [in] job ;; arg2 - [in] NROUNDS @@ -49,7 +48,6 @@ aes_cntr_bit_128_submit_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_bit_192_submit_vaes_avx512,function,internal) aes_cntr_bit_192_submit_vaes_avx512: - endbranch64 FUNC_SAVE CNTR_BIT ;; arg1 - [in] job ;; arg2 - [in] NROUNDS @@ -64,7 +62,6 @@ aes_cntr_bit_192_submit_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_bit_256_submit_vaes_avx512,function,internal) aes_cntr_bit_256_submit_vaes_avx512: - endbranch64 FUNC_SAVE CNTR_BIT ;; arg1 - [in] job ;; arg2 - [in] NROUNDS diff --git a/lib/avx512/aes_cntr_by16_vaes_avx512.inc b/lib/avx512_t2/aes_cntr_by16_vaes_avx512.inc similarity index 99% rename from lib/avx512/aes_cntr_by16_vaes_avx512.inc rename to lib/avx512_t2/aes_cntr_by16_vaes_avx512.inc index 6e3fc34ab2914e6aa2bd994e6c9d156455efbd9f..8e859aaae1b9d894a50fcba6866b71c1e584965c 100644 --- a/lib/avx512/aes_cntr_by16_vaes_avx512.inc +++ b/lib/avx512_t2/aes_cntr_by16_vaes_avx512.inc @@ -35,11 +35,10 @@ %include "include/mb_mgr_datastruct.asm" %include "include/imb_job.asm" %include "include/memcpy.asm" -%include "include/cet.inc" %include "include/aes_common.asm" %include "include/const.inc" %include "include/clear_regs.asm" -%include "include/cet.inc" + mksection .rodata default rel diff --git a/lib/avx512/aes_cntr_ccm_api_by16_vaes_avx512.asm b/lib/avx512_t2/aes_cntr_ccm_api_by16_vaes_avx512.asm similarity index 96% rename from lib/avx512/aes_cntr_ccm_api_by16_vaes_avx512.asm rename to lib/avx512_t2/aes_cntr_ccm_api_by16_vaes_avx512.asm index 5f57046d0383a697e201e5ec9575dfec45addb13..6dca75720ee143795d45608c6d0fb8a67ed72ed4 100644 --- a/lib/avx512/aes_cntr_ccm_api_by16_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cntr_ccm_api_by16_vaes_avx512.asm @@ -27,14 +27,13 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/aes_cntr_by16_vaes_avx512.inc" +%include "avx512_t2/aes_cntr_by16_vaes_avx512.inc" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;IMB_JOB * aes_cntr_ccm_128_vaes_avx512(IMB_JOB *job) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_ccm_128_vaes_avx512,function,internal) aes_cntr_ccm_128_vaes_avx512: - endbranch64 FUNC_SAVE CNTR ;; arg1 - [in] job ;; arg2 - [in] NROUNDS @@ -49,7 +48,6 @@ aes_cntr_ccm_128_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_ccm_256_vaes_avx512,function,internal) aes_cntr_ccm_256_vaes_avx512: - endbranch64 FUNC_SAVE CNTR ;; arg1 - [in] job ;; arg2 - [in] NROUNDS diff --git a/lib/avx512/aes_cntr_pon_api_by16_vaes_avx512.asm b/lib/avx512_t2/aes_cntr_pon_api_by16_vaes_avx512.asm similarity index 96% rename from lib/avx512/aes_cntr_pon_api_by16_vaes_avx512.asm rename to lib/avx512_t2/aes_cntr_pon_api_by16_vaes_avx512.asm index fdc7596fb919e791969c6828d4d679643a54e3a2..74d59b4c6c32b4b4410595e044f7babc370f41d3 100644 --- a/lib/avx512/aes_cntr_pon_api_by16_vaes_avx512.asm +++ b/lib/avx512_t2/aes_cntr_pon_api_by16_vaes_avx512.asm @@ -28,14 +28,13 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/aes_cntr_by16_vaes_avx512.inc" +%include "avx512_t2/aes_cntr_by16_vaes_avx512.inc" ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;void aes_cntr_pon_enc_128_vaes_avx512 (void *src, void *dst, void *iv, void *keys, uint64_t length, uint32_t *bip) ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_pon_enc_128_vaes_avx512,function,internal) aes_cntr_pon_enc_128_vaes_avx512: - endbranch64 CNTR_PON_ENC_DEC ENCRYPT ret @@ -45,8 +44,6 @@ aes_cntr_pon_enc_128_vaes_avx512: ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; MKGLOBAL(aes_cntr_pon_dec_128_vaes_avx512,function,internal) aes_cntr_pon_dec_128_vaes_avx512: - endbranch64 - CNTR_PON_ENC_DEC DECRYPT ret diff --git a/lib/avx512/aes_docsis_dec_avx512.asm b/lib/avx512_t2/aes_docsis_dec_avx512.asm similarity index 99% rename from lib/avx512/aes_docsis_dec_avx512.asm rename to lib/avx512_t2/aes_docsis_dec_avx512.asm index 0afea8d3ca10bb7b479acc455ba98343925065b2..f1dab4132c1e6ba15d91e85295fd353ada815f30 100644 --- a/lib/avx512/aes_docsis_dec_avx512.asm +++ b/lib/avx512_t2/aes_docsis_dec_avx512.asm @@ -31,7 +31,7 @@ %include "include/os.asm" %include "include/clear_regs.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" + ;; In System V AMD64 ABI ;; callee saves: RBX, RBP, R12-R15 ;; Windows x64 ABI @@ -1063,7 +1063,6 @@ mksection .text align 64 MKGLOBAL(aes_docsis128_dec_crc32_avx512,function,internal) aes_docsis128_dec_crc32_avx512: - endbranch64 AES_DOCSIS_DEC_CRC32 9 ret @@ -1071,7 +1070,6 @@ aes_docsis128_dec_crc32_avx512: align 64 MKGLOBAL(aes_docsis256_dec_crc32_avx512,function,internal) aes_docsis256_dec_crc32_avx512: - endbranch64 AES_DOCSIS_DEC_CRC32 13 ret diff --git a/lib/avx512/aes_docsis_dec_vaes_avx512.asm b/lib/avx512_t2/aes_docsis_dec_vaes_avx512.asm similarity index 99% rename from lib/avx512/aes_docsis_dec_vaes_avx512.asm rename to lib/avx512_t2/aes_docsis_dec_vaes_avx512.asm index b8fd8ffed4d9cbf426fc1650be6e54356aedb73c..c03684aed734eb004e60c091a0502bbe930a1bee 100644 --- a/lib/avx512/aes_docsis_dec_vaes_avx512.asm +++ b/lib/avx512_t2/aes_docsis_dec_vaes_avx512.asm @@ -32,7 +32,7 @@ %include "include/clear_regs.asm" %include "include/aes_common.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" + default rel extern ethernet_fcs_avx512_local @@ -1510,7 +1510,6 @@ mksection .text align 64 MKGLOBAL(aes_docsis128_dec_crc32_vaes_avx512,function,internal) aes_docsis128_dec_crc32_vaes_avx512: - endbranch64 AES_DOCSIS_DEC_CRC32 9 ret @@ -1518,7 +1517,6 @@ aes_docsis128_dec_crc32_vaes_avx512: align 64 MKGLOBAL(aes_docsis256_dec_crc32_vaes_avx512,function,internal) aes_docsis256_dec_crc32_vaes_avx512: - endbranch64 AES_DOCSIS_DEC_CRC32 13 ret diff --git a/lib/avx512/aes_docsis_enc_avx512.asm b/lib/avx512_t2/aes_docsis_enc_avx512.asm similarity index 99% rename from lib/avx512/aes_docsis_enc_avx512.asm rename to lib/avx512_t2/aes_docsis_enc_avx512.asm index 6ab393db02c178dc90935cb6aaf124160360efb6..8a6a4e09097eeb5bf87757f95d5fec6a7ad00cd0 100644 --- a/lib/avx512/aes_docsis_enc_avx512.asm +++ b/lib/avx512_t2/aes_docsis_enc_avx512.asm @@ -36,7 +36,7 @@ %include "include/reg_sizes.asm" %include "include/const.inc" %include "include/clear_regs.asm" -%include "include/cet.inc" + %define APPEND(a,b) a %+ b %define CRC_LANE_STATE_TO_START 0x01 @@ -1389,7 +1389,6 @@ APPEND(%%_skip_clear_,I): align 64 MKGLOBAL(submit_job_aes_docsis128_enc_crc32_avx512,function,internal) submit_job_aes_docsis128_enc_crc32_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ @@ -1411,7 +1410,6 @@ submit_job_aes_docsis128_enc_crc32_avx512: align 64 MKGLOBAL(submit_job_aes_docsis256_enc_crc32_avx512,function,internal) submit_job_aes_docsis256_enc_crc32_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ @@ -1431,7 +1429,6 @@ submit_job_aes_docsis256_enc_crc32_avx512: align 64 MKGLOBAL(flush_job_aes_docsis128_enc_crc32_avx512,function,internal) flush_job_aes_docsis128_enc_crc32_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ @@ -1451,7 +1448,6 @@ flush_job_aes_docsis128_enc_crc32_avx512: align 64 MKGLOBAL(flush_job_aes_docsis256_enc_crc32_avx512,function,internal) flush_job_aes_docsis256_enc_crc32_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ diff --git a/lib/avx512/aes_docsis_enc_vaes_avx512.asm b/lib/avx512_t2/aes_docsis_enc_vaes_avx512.asm similarity index 99% rename from lib/avx512/aes_docsis_enc_vaes_avx512.asm rename to lib/avx512_t2/aes_docsis_enc_vaes_avx512.asm index 1e3e8ec3502acbf3c25aae5a590e1b5fb7d539c5..fa7adb88e13ca3e507cd4a3ac303d635a8f15f3d 100644 --- a/lib/avx512/aes_docsis_enc_vaes_avx512.asm +++ b/lib/avx512_t2/aes_docsis_enc_vaes_avx512.asm @@ -35,7 +35,7 @@ %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + %define APPEND(a,b) a %+ b %define CRC_LANE_STATE_TO_START 0x01 @@ -1987,7 +1987,6 @@ align 32 align 64 MKGLOBAL(submit_job_aes_docsis128_enc_crc32_vaes_avx512,function,internal) submit_job_aes_docsis128_enc_crc32_vaes_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ @@ -2009,7 +2008,6 @@ submit_job_aes_docsis128_enc_crc32_vaes_avx512: align 64 MKGLOBAL(submit_job_aes_docsis256_enc_crc32_vaes_avx512,function,internal) submit_job_aes_docsis256_enc_crc32_vaes_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ @@ -2029,7 +2027,6 @@ submit_job_aes_docsis256_enc_crc32_vaes_avx512: align 64 MKGLOBAL(flush_job_aes_docsis128_enc_crc32_vaes_avx512,function,internal) flush_job_aes_docsis128_enc_crc32_vaes_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ @@ -2049,7 +2046,6 @@ flush_job_aes_docsis128_enc_crc32_vaes_avx512: align 64 MKGLOBAL(flush_job_aes_docsis256_enc_crc32_vaes_avx512,function,internal) flush_job_aes_docsis256_enc_crc32_vaes_avx512: - endbranch64 FUNC_ENTRY SUBMIT_FLUSH_DOCSIS_CRC32 arg1, arg2, \ diff --git a/lib/avx512_t2/aes_ecb_vaes_avx512.asm b/lib/avx512_t2/aes_ecb_vaes_avx512.asm new file mode 100644 index 0000000000000000000000000000000000000000..a4287a302ff4f51d33b892f2bc75d733167c568e --- /dev/null +++ b/lib/avx512_t2/aes_ecb_vaes_avx512.asm @@ -0,0 +1,245 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB encrypt/decrypt on 16n bytes doing AES by 16 + +; YMM registers are clobbered. Saving/restoring must be done at a higher level + +; void aes_ecb_x_y_vaes_avx512(void *in, +; UINT128 keys[], +; void *out, +; UINT64 len_bytes); +; +; x = direction (enc/dec) +; y = key size (128/192/256) +; arg 1: IN: pointer to input (cipher text) +; arg 2: KEYS: pointer to keys +; arg 3: OUT: pointer to output (plain text) +; arg 4: LEN: length in bytes (multiple of 16) +; + +%include "include/os.asm" +%include "include/clear_regs.asm" +%include "include/aes_common.asm" + +%define AES_ECB_ENC_128 aes_ecb_enc_128_vaes_avx512 +%define AES_ECB_DEC_128 aes_ecb_dec_128_vaes_avx512 +%define AES_ECB_ENC_192 aes_ecb_enc_192_vaes_avx512 +%define AES_ECB_DEC_192 aes_ecb_dec_192_vaes_avx512 +%define AES_ECB_ENC_256 aes_ecb_enc_256_vaes_avx512 +%define AES_ECB_DEC_256 aes_ecb_dec_256_vaes_avx512 +%ifdef LINUX +%define IN rdi +%define KEYS rsi +%define OUT rdx +%define LEN rcx +%else +%define IN rcx +%define KEYS rdx +%define OUT r8 +%define LEN r9 +%endif +%define IDX rax +%define TMP r11 + +%define YKEY1 ymm1 +%define YDATA0 ymm2 +%define YDATA1 ymm3 +%define YDATA2 ymm4 +%define YDATA3 ymm5 +%define YDATA4 ymm6 +%define YDATA5 ymm7 +%define YDATA6 ymm8 +%define YDATA7 ymm9 + +mksection .text + +%macro AES_ECB 2 +%define %%NROUNDS %1 ; [in] Number of AES rounds, numerical value +%define %%DIR %2 ; [in] Direction (encrypt/decrypt) +%ifidn %%DIR, ENC +%define AES YMM_AESENC_ROUND_BLOCKS_0_16 +%else ; DIR = DEC +%define AES YMM_AESDEC_ROUND_BLOCKS_0_16 +%endif + + or LEN, LEN + jz %%done + + xor IDX, IDX + mov TMP, LEN + and TMP, 255 + jz %%main_loop + + ; branch to different code block based on remainder + cmp TMP, 8*16 + je %%initial_num_blocks_is_8 + jb %%initial_num_blocks_is_7_1 + cmp TMP, 12*16 + je %%initial_num_blocks_is_12 + jb %%initial_num_blocks_is_11_9 + ;; 15, 14 or 13 + cmp TMP, 14*16 + ja %%initial_num_blocks_is_15 + je %%initial_num_blocks_is_14 + jmp %%initial_num_blocks_is_13 +%%initial_num_blocks_is_11_9: + ;; 11, 10 or 9 + cmp TMP, 10*16 + ja %%initial_num_blocks_is_11 + je %%initial_num_blocks_is_10 + jmp %%initial_num_blocks_is_9 +%%initial_num_blocks_is_7_1: + cmp TMP, 4*16 + je %%initial_num_blocks_is_4 + jb %%initial_num_blocks_is_3_1 + ;; 7, 6 or 5 + cmp TMP, 6*16 + ja %%initial_num_blocks_is_7 + je %%initial_num_blocks_is_6 + jmp %%initial_num_blocks_is_5 +%%initial_num_blocks_is_3_1: + ;; 3, 2 or 1 + cmp TMP, 2*16 + ja %%initial_num_blocks_is_3 + je %%initial_num_blocks_is_2 + ;; fall through for `jmp %%initial_num_blocks_is_1` + +%assign num_blocks 1 +%rep 15 + + %%initial_num_blocks_is_ %+ num_blocks : +%assign %%I 0 + ; load initial blocks + YMM_LOAD_BLOCKS_0_16 num_blocks, IN, 0, YDATA0,\ + YDATA1, YDATA2, YDATA3, YDATA4, YDATA5,\ + YDATA6, YDATA7 + +; Perform AES encryption/decryption on initial blocks +%rep (%%NROUNDS + 1) ; 10/12/14 + vbroadcasti128 YKEY1, [KEYS + %%I*16] + AES YDATA0, YDATA1, YDATA2, YDATA3, YDATA4,\ + YDATA5, YDATA6, YDATA7, YKEY1, %%I, no_data,\ + no_data, no_data, no_data, no_data, no_data,\ + no_data, no_data, num_blocks, (%%NROUNDS - 1) +%assign %%I (%%I + 1) +%endrep + + ; store initial blocks + YMM_STORE_BLOCKS_0_16 num_blocks, OUT, 0, YDATA0, YDATA1,\ + YDATA2, YDATA3, YDATA4, YDATA5, YDATA6, YDATA7 + + add IDX, num_blocks*16 + cmp IDX, LEN + je %%done + +%assign num_blocks (num_blocks + 1) + jmp %%main_loop +%endrep + +align 16 +%%main_loop: + ; load the next 16 blocks into ymm registers + YMM_LOAD_BLOCKS_0_16 16, {IN + IDX}, 0, YDATA0, YDATA1,\ + YDATA2, YDATA3, YDATA4, YDATA5, YDATA6, YDATA7 + + ; Perform AES encryption/decryption on 16 blocks +%assign %%ROUNDNO 0 ; current key number +%rep (%%NROUNDS + 1) ; 10/12/14 + vbroadcasti128 YKEY1, [KEYS + %%ROUNDNO*16] + + AES YDATA0, YDATA1, YDATA2, YDATA3, YDATA4, YDATA5,\ + YDATA6, YDATA7, YKEY1, %%ROUNDNO, no_data, no_data,\ + no_data, no_data, no_data, no_data, no_data, no_data,\ + 16, (%%NROUNDS - 1) + +%assign %%ROUNDNO (%%ROUNDNO + 1) +%endrep + + ; write 16 blocks to output + YMM_STORE_BLOCKS_0_16 16, (OUT + IDX), 0, YDATA0, YDATA1,\ + YDATA2, YDATA3, YDATA4, YDATA5, YDATA6, YDATA7 + + add IDX, 16*16 + cmp IDX, LEN + jne %%main_loop + +%%done: + +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif +%endmacro + +;; +;; AES-ECB 128 functions +;; +align 16 +MKGLOBAL(AES_ECB_ENC_128,function,internal) +AES_ECB_ENC_128: + AES_ECB 10, ENC + ret + +align 16 +MKGLOBAL(AES_ECB_DEC_128,function,internal) +AES_ECB_DEC_128: + AES_ECB 10, DEC + ret + +;; +;; AES-ECB 192 functions +;; +align 16 +MKGLOBAL(AES_ECB_ENC_192,function,internal) +AES_ECB_ENC_192: + AES_ECB 12, ENC + ret + +align 16 +MKGLOBAL(AES_ECB_DEC_192,function,internal) +AES_ECB_DEC_192: + AES_ECB 12, DEC + ret + +;; +;; AES-ECB 256 functions +;; +align 16 +MKGLOBAL(AES_ECB_ENC_256,function,internal) +AES_ECB_ENC_256: + AES_ECB 14, ENC + ret + +align 16 +MKGLOBAL(AES_ECB_DEC_256,function,internal) +AES_ECB_DEC_256: + AES_ECB 14, DEC + ret + +mksection stack-noexec diff --git a/lib/avx512/crc16_x25_avx512.asm b/lib/avx512_t2/crc16_x25_avx512.asm similarity index 100% rename from lib/avx512/crc16_x25_avx512.asm rename to lib/avx512_t2/crc16_x25_avx512.asm diff --git a/lib/avx512/crc32_by16_vclmul_avx512.asm b/lib/avx512_t2/crc32_by16_vclmul_avx512.asm similarity index 99% rename from lib/avx512/crc32_by16_vclmul_avx512.asm rename to lib/avx512_t2/crc32_by16_vclmul_avx512.asm index 19af2e44ad75df191bdf8c6987908070d4424735..03e38cc34922facaff68f95e0b9e402afc45fcba 100644 --- a/lib/avx512/crc32_by16_vclmul_avx512.asm +++ b/lib/avx512_t2/crc32_by16_vclmul_avx512.asm @@ -40,6 +40,8 @@ %include "include/clear_regs.asm" %include "include/crc32.inc" %include "include/cet.inc" +%include "include/clear_regs.asm" + [bits 64] default rel @@ -280,6 +282,11 @@ crc32_by16_vclmul_avx512: vpextrd eax, xmm7, 1 .cleanup: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif ret align 32 diff --git a/lib/avx512/crc32_fp_avx512.asm b/lib/avx512_t2/crc32_fp_avx512.asm similarity index 100% rename from lib/avx512/crc32_fp_avx512.asm rename to lib/avx512_t2/crc32_fp_avx512.asm diff --git a/lib/avx512/crc32_iuup_avx512.asm b/lib/avx512_t2/crc32_iuup_avx512.asm similarity index 100% rename from lib/avx512/crc32_iuup_avx512.asm rename to lib/avx512_t2/crc32_iuup_avx512.asm diff --git a/lib/avx512/crc32_lte_avx512.asm b/lib/avx512_t2/crc32_lte_avx512.asm similarity index 100% rename from lib/avx512/crc32_lte_avx512.asm rename to lib/avx512_t2/crc32_lte_avx512.asm diff --git a/lib/avx512/crc32_refl_by16_vclmul_avx512.asm b/lib/avx512_t2/crc32_refl_by16_vclmul_avx512.asm similarity index 98% rename from lib/avx512/crc32_refl_by16_vclmul_avx512.asm rename to lib/avx512_t2/crc32_refl_by16_vclmul_avx512.asm index 3000564df0805a4f0d3db26dc2395e611324c4cc..6b9a735b9825e986415bcbbf36e50707107d2b5e 100644 --- a/lib/avx512/crc32_refl_by16_vclmul_avx512.asm +++ b/lib/avx512_t2/crc32_refl_by16_vclmul_avx512.asm @@ -40,6 +40,8 @@ %include "include/clear_regs.asm" %include "include/crc32_refl.inc" %include "include/cet.inc" +%include "include/clear_regs.asm" + [bits 64] default rel @@ -270,6 +272,11 @@ crc32_refl_by16_vclmul_avx512: vpextrd eax, xmm7, 2 .cleanup: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif not eax ret diff --git a/lib/avx512/crc32_sctp_avx512.asm b/lib/avx512_t2/crc32_sctp_avx512.asm similarity index 100% rename from lib/avx512/crc32_sctp_avx512.asm rename to lib/avx512_t2/crc32_sctp_avx512.asm diff --git a/lib/avx512/crc32_wimax_avx512.asm b/lib/avx512_t2/crc32_wimax_avx512.asm similarity index 100% rename from lib/avx512/crc32_wimax_avx512.asm rename to lib/avx512_t2/crc32_wimax_avx512.asm diff --git a/lib/avx512/ethernet_fcs_avx512.asm b/lib/avx512_t2/ethernet_fcs_avx512.asm similarity index 100% rename from lib/avx512/ethernet_fcs_avx512.asm rename to lib/avx512_t2/ethernet_fcs_avx512.asm diff --git a/lib/avx512/gcm_api_vaes_avx512.inc b/lib/avx512_t2/gcm_api_vaes_avx512.inc similarity index 99% rename from lib/avx512/gcm_api_vaes_avx512.inc rename to lib/avx512_t2/gcm_api_vaes_avx512.inc index 53a33d5c09a20e922cdd52d858d633705652b1de..1b76667d6473572fd9e646c40f76fed147bbf32e 100644 --- a/lib/avx512/gcm_api_vaes_avx512.inc +++ b/lib/avx512_t2/gcm_api_vaes_avx512.inc @@ -27,7 +27,7 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/gcm_vaes_avx512.inc" +%include "avx512_t2/gcm_vaes_avx512.inc" %include "include/error.inc" %include "include/os.asm" diff --git a/lib/avx512/gcm_gmac_api_vaes_avx512.inc b/lib/avx512_t2/gcm_gmac_api_vaes_avx512.inc similarity index 99% rename from lib/avx512/gcm_gmac_api_vaes_avx512.inc rename to lib/avx512_t2/gcm_gmac_api_vaes_avx512.inc index 8b174d3a3dad3a861a36848b01b94f1a4ae6474f..8ca8df8d5f30387165ddf8d60ca75f824055b995 100644 --- a/lib/avx512/gcm_gmac_api_vaes_avx512.inc +++ b/lib/avx512_t2/gcm_gmac_api_vaes_avx512.inc @@ -27,7 +27,7 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/gcm_vaes_avx512.inc" +%include "avx512_t2/gcm_vaes_avx512.inc" %include "include/error.inc" %ifndef GCM_GMAC_API_VAES_AVX512_INC diff --git a/lib/avx512/gcm_sgl_api_vaes_avx512.inc b/lib/avx512_t2/gcm_sgl_api_vaes_avx512.inc similarity index 99% rename from lib/avx512/gcm_sgl_api_vaes_avx512.inc rename to lib/avx512_t2/gcm_sgl_api_vaes_avx512.inc index 20acc9ac3665c6ea444dcc29713e4884712ca4bc..1ecc7ba19f0adcab7d50be1ecfd148da9d145880 100644 --- a/lib/avx512/gcm_sgl_api_vaes_avx512.inc +++ b/lib/avx512_t2/gcm_sgl_api_vaes_avx512.inc @@ -27,7 +27,7 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "avx512/gcm_vaes_avx512.inc" +%include "avx512_t2/gcm_vaes_avx512.inc" %include "include/error.inc" %include "include/os.asm" diff --git a/lib/avx512/gcm_vaes_avx512.inc b/lib/avx512_t2/gcm_vaes_avx512.inc similarity index 96% rename from lib/avx512/gcm_vaes_avx512.inc rename to lib/avx512_t2/gcm_vaes_avx512.inc index a0797369fef0e95291b6fc0f32bd54e6afd240bf..e6241b32259e4196eaea8bf30abcb155984c13e9 100644 --- a/lib/avx512/gcm_vaes_avx512.inc +++ b/lib/avx512_t2/gcm_vaes_avx512.inc @@ -233,7 +233,7 @@ ;;; %%INPTR address is 64 byte aligned ;;; - there is an option to pass ready blocks through ZMM registers too. ;;; 4 extra parameters need to passed in such case and 21st argument can be empty -%macro GHASH_16 21-25 +%macro GHASH_16 21-22 %define %%TYPE %1 ; [in] ghash type: start (xor hash), mid, end (same as mid; no reduction), ; end_reduce (end with reduction), start_reduce %define %%GH %2 ; [in/out] ZMM ghash sum: high 128-bits @@ -256,10 +256,7 @@ %define %%ZTMP7 %19 ; [clobbered] temporary ZMM %define %%ZTMP8 %20 ; [clobbered] temporary ZMM %define %%ZTMP9 %21 ; [clobbered] temporary ZMM -%define %%DAT0 %22 ; [in] ZMM with 4 blocks of input data (INPTR, INOFF, INDIS unused) -%define %%DAT1 %23 ; [in] ZMM with 4 blocks of input data (INPTR, INOFF, INDIS unused) -%define %%DAT2 %24 ; [in] ZMM with 4 blocks of input data (INPTR, INOFF, INDIS unused) -%define %%DAT3 %25 ; [in] ZMM with 4 blocks of input data (INPTR, INOFF, INDIS unused) +%define %%SHUFM %22 ; [in] ZMM with shuffle mask - provided only when input data needs shuffling %assign start_ghash 0 %assign do_reduction 0 @@ -278,14 +275,15 @@ %endif ;; ghash blocks 0-3 -%if %0 == 21 - vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS] +%if %0 == 22 + vmovdqu64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS] + vpshufb %%ZTMP9, %%ZTMP9, %%SHUFM %else -%xdefine %%ZTMP9 %%DAT0 + vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS] %endif %if start_ghash != 0 - vpxorq %%ZTMP9, %%HASH + vpxorq %%ZTMP9, %%ZTMP9, %%HASH %endif vmovdqu64 %%ZTMP8, [%%HKPTR + %%HKOFF + %%HKDIS] vpclmulqdq %%ZTMP0, %%ZTMP9, %%ZTMP8, 0x11 ; T0H = a1*b1 @@ -293,10 +291,11 @@ vpclmulqdq %%ZTMP2, %%ZTMP9, %%ZTMP8, 0x01 ; T0M1 = a1*b0 vpclmulqdq %%ZTMP3, %%ZTMP9, %%ZTMP8, 0x10 ; T0M2 = a0*b1 ;; ghash blocks 4-7 -%if %0 == 21 - vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 64] +%if %0 == 22 + vmovdqu64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 64] + vpshufb %%ZTMP9, %%ZTMP9, %%SHUFM %else -%xdefine %%ZTMP9 %%DAT1 + vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 64] %endif vmovdqu64 %%ZTMP8, [%%HKPTR + %%HKOFF + %%HKDIS + 64] vpclmulqdq %%ZTMP4, %%ZTMP9, %%ZTMP8, 0x11 ; T1H = a1*b1 @@ -316,10 +315,11 @@ vpternlogq %%GM, %%ZTMP3, %%ZTMP7, 0x96 ; GM += T0M2 + T1M1 %endif ;; ghash blocks 8-11 -%if %0 == 21 - vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 128] +%if %0 == 22 + vmovdqu64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 128] + vpshufb %%ZTMP9, %%ZTMP9, %%SHUFM %else -%xdefine %%ZTMP9 %%DAT2 + vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 128] %endif vmovdqu64 %%ZTMP8, [%%HKPTR + %%HKOFF + %%HKDIS + 128] vpclmulqdq %%ZTMP0, %%ZTMP9, %%ZTMP8, 0x11 ; T0H = a1*b1 @@ -327,10 +327,11 @@ vpclmulqdq %%ZTMP2, %%ZTMP9, %%ZTMP8, 0x01 ; T0M1 = a1*b0 vpclmulqdq %%ZTMP3, %%ZTMP9, %%ZTMP8, 0x10 ; T0M2 = a0*b1 ;; ghash blocks 12-15 -%if %0 == 21 - vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 192] +%if %0 == 22 + vmovdqu64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 192] + vpshufb %%ZTMP9, %%ZTMP9, %%SHUFM %else -%xdefine %%ZTMP9 %%DAT3 + vmovdqa64 %%ZTMP9, [%%INPTR + %%INOFF + %%INDIS + 192] %endif vmovdqu64 %%ZTMP8, [%%HKPTR + %%HKOFF + %%HKDIS + 192] vpclmulqdq %%ZTMP4, %%ZTMP9, %%ZTMP8, 0x11 ; T1H = a1*b1 @@ -744,139 +745,187 @@ mov %%T1, %%A_IN ; T1 = AAD mov %%T2, %%A_LEN ; T2 = aadLen - or %%T2, %%T2 - jz %%_CALC_AAD_done + cmp %%T2, (16*16) + jb %%_less_than_16x16 vmovdqa64 %%SHFMSK, [rel SHUF_MASK] %%_get_AAD_loop48x16: cmp %%T2, (48*16) - jl %%_exit_AAD_loop48x16 - - vmovdqu64 %%ZT1, [%%T1 + 64*0] ; Blocks 0-3 - vmovdqu64 %%ZT2, [%%T1 + 64*1] ; Blocks 4-7 - vmovdqu64 %%ZT3, [%%T1 + 64*2] ; Blocks 8-11 - vmovdqu64 %%ZT4, [%%T1 + 64*3] ; Blocks 12-15 - vpshufb %%ZT1, %%SHFMSK - vpshufb %%ZT2, %%SHFMSK - vpshufb %%ZT3, %%SHFMSK - vpshufb %%ZT4, %%SHFMSK + jb %%_exit_AAD_loop48x16 GHASH_16 start, %%ZT5, %%ZT6, %%ZT7, \ - NO_INPUT_PTR, NO_INPUT_PTR, NO_INPUT_PTR, \ + %%T1, (0*64), 0, \ %%GDATA_KEY, HashKey_48, 0, ZWORD(%%AAD_HASH), \ %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ - %%ZT14, %%ZT15, %%ZT16, NO_ZMM, \ - %%ZT1, %%ZT2, %%ZT3, %%ZT4 - - vmovdqu64 %%ZT1, [%%T1 + 16*16 + 64*0] ; Blocks 16-19 - vmovdqu64 %%ZT2, [%%T1 + 16*16 + 64*1] ; Blocks 20-23 - vmovdqu64 %%ZT3, [%%T1 + 16*16 + 64*2] ; Blocks 24-27 - vmovdqu64 %%ZT4, [%%T1 + 16*16 + 64*3] ; Blocks 28-31 - vpshufb %%ZT1, %%SHFMSK - vpshufb %%ZT2, %%SHFMSK - vpshufb %%ZT3, %%SHFMSK - vpshufb %%ZT4, %%SHFMSK + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK GHASH_16 mid, %%ZT5, %%ZT6, %%ZT7, \ - NO_INPUT_PTR, NO_INPUT_PTR, NO_INPUT_PTR, \ + %%T1, (4*64), 0, \ %%GDATA_KEY, HashKey_32, 0, NO_HASH_IN_OUT, \ %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ - %%ZT14, %%ZT15, %%ZT16, NO_ZMM, \ - %%ZT1, %%ZT2, %%ZT3, %%ZT4 - - vmovdqu64 %%ZT1, [%%T1 + 32*16 + 64*0] ; Blocks 32-35 - vmovdqu64 %%ZT2, [%%T1 + 32*16 + 64*1] ; Blocks 36-39 - vmovdqu64 %%ZT3, [%%T1 + 32*16 + 64*2] ; Blocks 40-43 - vmovdqu64 %%ZT4, [%%T1 + 32*16 + 64*3] ; Blocks 44-47 - vpshufb %%ZT1, %%SHFMSK - vpshufb %%ZT2, %%SHFMSK - vpshufb %%ZT3, %%SHFMSK - vpshufb %%ZT4, %%SHFMSK + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK GHASH_16 end_reduce, %%ZT5, %%ZT6, %%ZT7, \ - NO_INPUT_PTR, NO_INPUT_PTR, NO_INPUT_PTR, \ + %%T1, (8*64), 0, \ %%GDATA_KEY, HashKey_16, 0, ZWORD(%%AAD_HASH), \ %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ - %%ZT14, %%ZT15, %%ZT16, NO_ZMM, \ - %%ZT1, %%ZT2, %%ZT3, %%ZT4 + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK - sub %%T2, (48*16) + sub %%T2, (12*64) je %%_CALC_AAD_done - add %%T1, (48*16) + add %%T1, (12*64) jmp %%_get_AAD_loop48x16 %%_exit_AAD_loop48x16: ; Less than 48x16 bytes remaining + cmp %%T2, (16*16) + jb %%_less_than_16x16 + je %%_equal_16x16 cmp %%T2, (32*16) - jl %%_less_than_32x16 + jb %%_less_than_32x16 + je %%_equal_32x16 + + ;; calculate offset to hash key to start with + lea %%T3, [%%T2 + 15] + and %%T3, ~15 + neg %%T3 + add %%T3, HashKey_1 + 16 ; Get next 16 blocks - vmovdqu64 %%ZT1, [%%T1 + 64*0] - vmovdqu64 %%ZT2, [%%T1 + 64*1] - vmovdqu64 %%ZT3, [%%T1 + 64*2] - vmovdqu64 %%ZT4, [%%T1 + 64*3] - vpshufb %%ZT1, %%SHFMSK - vpshufb %%ZT2, %%SHFMSK - vpshufb %%ZT3, %%SHFMSK - vpshufb %%ZT4, %%SHFMSK + GHASH_16 start, %%ZT5, %%ZT6, %%ZT7, \ + %%T1, (0*64), 0, \ + %%GDATA_KEY, %%T3, 0, ZWORD(%%AAD_HASH), \ + %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK + + GHASH_16 mid, %%ZT5, %%ZT6, %%ZT7, \ + %%T1, (4*64), 0, \ + %%GDATA_KEY, {%%T3 + 16*16}, 0, ZWORD(%%AAD_HASH), \ + %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK + + sub %%T2, (32*16) + add %%T1, (32*16) + jmp %%_less_than_16x16_remain +%%_equal_32x16: GHASH_16 start, %%ZT5, %%ZT6, %%ZT7, \ - NO_INPUT_PTR, NO_INPUT_PTR, NO_INPUT_PTR, \ + %%T1, (0*64), 0, \ %%GDATA_KEY, HashKey_32, 0, ZWORD(%%AAD_HASH), \ %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ - %%ZT14, %%ZT15, %%ZT16, NO_ZMM, \ - %%ZT1, %%ZT2, %%ZT3, %%ZT4 - - vmovdqu64 %%ZT1, [%%T1 + 16*16 + 64*0] - vmovdqu64 %%ZT2, [%%T1 + 16*16 + 64*1] - vmovdqu64 %%ZT3, [%%T1 + 16*16 + 64*2] - vmovdqu64 %%ZT4, [%%T1 + 16*16 + 64*3] - vpshufb %%ZT1, %%SHFMSK - vpshufb %%ZT2, %%SHFMSK - vpshufb %%ZT3, %%SHFMSK - vpshufb %%ZT4, %%SHFMSK + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK GHASH_16 end_reduce, %%ZT5, %%ZT6, %%ZT7, \ - NO_INPUT_PTR, NO_INPUT_PTR, NO_INPUT_PTR, \ + %%T1, (4*64), 0, \ %%GDATA_KEY, HashKey_16, 0, ZWORD(%%AAD_HASH), \ %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ - %%ZT14, %%ZT15, %%ZT16, NO_ZMM, \ - %%ZT1, %%ZT2, %%ZT3, %%ZT4 - - sub %%T2, (32*16) - je %%_CALC_AAD_done - - add %%T1, (32*16) - jmp %%_less_than_16x16 + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK + jmp %%_CALC_AAD_done %%_less_than_32x16: - cmp %%T2, (16*16) - jl %%_less_than_16x16 - ; Get next 16 blocks - vmovdqu64 %%ZT1, [%%T1 + 64*0] - vmovdqu64 %%ZT2, [%%T1 + 64*1] - vmovdqu64 %%ZT3, [%%T1 + 64*2] - vmovdqu64 %%ZT4, [%%T1 + 64*3] - vpshufb %%ZT1, %%SHFMSK - vpshufb %%ZT2, %%SHFMSK - vpshufb %%ZT3, %%SHFMSK - vpshufb %%ZT4, %%SHFMSK + ;; calculate offset to hash key to start with + lea %%T3, [%%T2 + 15] + and %%T3, ~15 + neg %%T3 + add %%T3, HashKey_1 + 16 + + GHASH_16 start, %%ZT5, %%ZT6, %%ZT7, \ + %%T1, (0*64), 0, \ + %%GDATA_KEY, %%T3, 0, ZWORD(%%AAD_HASH), \ + %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK + sub %%T2, (16*16) + add %%T1, (16*16) + jmp %%_less_than_16x16_remain +%%_equal_16x16: GHASH_16 start_reduce, %%ZT5, %%ZT6, %%ZT7, \ - NO_INPUT_PTR, NO_INPUT_PTR, NO_INPUT_PTR, \ + %%T1, (0*64), 0, \ %%GDATA_KEY, HashKey_16, 0, ZWORD(%%AAD_HASH), \ %%ZT0, %%ZT8, %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ - %%ZT14, %%ZT15, %%ZT16, NO_ZMM, \ - %%ZT1, %%ZT2, %%ZT3, %%ZT4 + %%ZT14, %%ZT15, %%ZT16, %%ZT1, %%SHFMSK + jmp %%_CALC_AAD_done - sub %%T2, (16*16) - je %%_CALC_AAD_done + ; Less than 16x16 bytes remaining +%%_less_than_16x16_remain: + ;; ZT5 (H), ZT6 (M), ZT7 (L) contain ghash sums + ;; prep mask source address + lea %%T3, [rel byte64_len_to_mask_table] + lea %%T3, [%%T3 + %%T2*8] - add %%T1, (16*16) + ;; calculate number of blocks to ghash (including partial bytes) + add DWORD(%%T2), 15 + shr DWORD(%%T2), 4 + jz %%_CALC_AAD_done ;; catch zero length + cmp DWORD(%%T2), 2 + jb %%_AAD_blocks_cont_1 + je %%_AAD_blocks_cont_2 + cmp DWORD(%%T2), 4 + jb %%_AAD_blocks_cont_3 + je %%_AAD_blocks_cont_4 + cmp DWORD(%%T2), 6 + jb %%_AAD_blocks_cont_5 + je %%_AAD_blocks_cont_6 + cmp DWORD(%%T2), 8 + jb %%_AAD_blocks_cont_7 + je %%_AAD_blocks_cont_8 + cmp DWORD(%%T2), 10 + jb %%_AAD_blocks_cont_9 + je %%_AAD_blocks_cont_10 + cmp DWORD(%%T2), 12 + jb %%_AAD_blocks_cont_11 + je %%_AAD_blocks_cont_12 + cmp DWORD(%%T2), 14 + jb %%_AAD_blocks_cont_13 + je %%_AAD_blocks_cont_14 + cmp DWORD(%%T2), 15 + je %%_AAD_blocks_cont_15 + ;; fall through for 16 blocks - ; Less than 16x16 bytes remaining + ;; The flow of each of these cases is identical: + ;; - load blocks plain text + ;; - shuffle loaded blocks + ;; - xor in current hash value into block 0 + ;; - perform up multiplications with ghash keys + ;; - jump to reduction code + +%assign I 16 + ;; generate all 16 cases using preprocessor +%rep 16 + +%%_AAD_blocks_cont_ %+ I: +%if I > 12 + sub %%T3, 12 * 16 * 8 +%elif I > 8 + sub %%T3, 8 * 16 * 8 +%elif I > 4 + sub %%T3, 4 * 16 * 8 +%endif + kmovq %%MASKREG, [%%T3] + + ZMM_LOAD_MASKED_BLOCKS_0_16 \ + I, %%T1, 0, \ + %%ZT14, %%ZT15, %%ZT16, %%ZT17, %%MASKREG + + ZMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 \ + I, vpshufb, \ + %%ZT14, %%ZT15, %%ZT16, %%ZT17, \ + %%ZT14, %%ZT15, %%ZT16, %%ZT17, \ + %%SHFMSK, %%SHFMSK, %%SHFMSK, %%SHFMSK + + GHASH_1_TO_16 %%GDATA_KEY, ZWORD(%%AAD_HASH), \ + %%ZT0, %%ZT1, %%ZT2, %%ZT3, %%ZT4, \ + %%ZT9, %%ZT10, %%ZT11, %%ZT12, \ + ZWORD(%%AAD_HASH), %%ZT14, %%ZT15, %%ZT16, %%ZT17, I, \ + %%ZT5, %%ZT6, %%ZT7 + jmp %%_CALC_AAD_done + +%assign I (I - 1) +%endrep + + ; Less than 16x16 bytes %%_less_than_16x16: ;; prep mask source address lea %%T3, [rel byte64_len_to_mask_table] @@ -885,6 +934,7 @@ ;; calculate number of blocks to ghash (including partial bytes) add DWORD(%%T2), 15 shr DWORD(%%T2), 4 + jz %%_CALC_AAD_done ;; catch zero length cmp DWORD(%%T2), 2 jb %%_AAD_blocks_1 je %%_AAD_blocks_2 @@ -922,6 +972,14 @@ %rep 16 %%_AAD_blocks_ %+ I: +%if I >= 3 + vmovdqa64 %%SHFMSK, [rel SHUF_MASK] +%elif I == 2 + vmovdqa64 YWORD(%%SHFMSK), [rel SHUF_MASK] +%elif I == 1 + vmovdqa64 XWORD(%%SHFMSK), [rel SHUF_MASK] +%endif + %if I > 12 sub %%T3, 12 * 16 * 8 %elif I > 8 diff --git a/lib/avx512/mb_mgr_aes128_cbc_enc_flush_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_cbc_enc_flush_avx512.asm similarity index 82% rename from lib/avx512/mb_mgr_aes128_cbc_enc_flush_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_cbc_enc_flush_avx512.asm index 1c50bd3dbf8b34e89c9383709cb274447a903578..90bfd10407d0e3e02fb6f114e6542ce5c07c7772 100644 --- a/lib/avx512/mb_mgr_aes128_cbc_enc_flush_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_cbc_enc_flush_avx512.asm @@ -30,9 +30,10 @@ %include "include/mb_mgr_datastruct.asm" %include "include/constants.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" +%include "include/clear_regs.asm" + %ifndef AES_CBC_ENC_X16 -%define AES_CBC_ENC_X16 aes_cbc_enc_128_vaes_avx512 +%define AES_CBC_ENC_X16 aes_cbc_enc_128_flush_vaes_avx512 %define FLUSH_JOB_AES_ENC flush_job_aes128_enc_vaes_avx512 %define NUM_KEYS 11 %endif @@ -47,9 +48,11 @@ mksection .text %ifdef LINUX %define arg1 rdi %define arg2 rsi +%define arg3 rdx %else %define arg1 rcx %define arg2 rdx +%define arg3 r8 %endif %define state arg1 @@ -62,7 +65,6 @@ mksection .text %define unused_lanes rbx %define tmp1 rbx -%define good_lane rdx %define iv rdx %define tmp2 rax @@ -71,8 +73,8 @@ mksection .text %define tmp rbp %define idx rbp -%define tmp3 r8 -%define tmp4 r9 +%define tmp3 r9 +%define tmp4 r10 %endif ; copy IV's and round keys into NULL lanes @@ -153,7 +155,6 @@ endstruc ; arg 2 : job MKGLOBAL(FLUSH_JOB_AES_ENC,function,internal) FLUSH_JOB_AES_ENC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -16 @@ -180,31 +181,22 @@ FLUSH_JOB_AES_ENC: vmovdqu64 zmm2, [state + _aes_job_in_lane + (8*PTR_SZ)] vpcmpq k1, zmm1, zmm0, 4 ; NEQ vpcmpq k2, zmm2, zmm0, 4 ; NEQ - kmovw DWORD(tmp), k1 - kmovw DWORD(tmp1), k2 - mov DWORD(tmp2), DWORD(tmp1) - shl DWORD(tmp2), 8 - or DWORD(tmp2), DWORD(tmp) ; mask of non-null jobs in tmp2 - not BYTE(tmp) - kmovw k4, DWORD(tmp) - not BYTE(tmp1) - kmovw k5, DWORD(tmp1) - mov DWORD(tmp), DWORD(tmp2) - not WORD(tmp) - kmovw k6, DWORD(tmp) ; mask of NULL jobs in k4, k5 and k6 - mov DWORD(tmp), DWORD(tmp2) + kshiftlw k2, k2, 8 + korw k6, k2, k1 + kmovw DWORD(arg3), k6 ; mask of non-null lanes in arg3 + knotw k6, k6 + kshiftrw k5, k6, 8 ; mask of NULL jobs in k4, k5 and k6 + xor tmp2, tmp2 - bsf WORD(tmp2), WORD(tmp) ; index of the 1st set bit in tmp2 + bsf WORD(tmp2), WORD(arg3) ; index of the 1st set bit in tmp2 - ;; copy good lane data into NULL lanes - mov tmp, [state + _aes_args_in + tmp2*8] - vpbroadcastq zmm1, tmp - vmovdqa64 [state + _aes_args_in + (0*PTR_SZ)]{k4}, zmm1 - vmovdqa64 [state + _aes_args_in + (8*PTR_SZ)]{k5}, zmm1 - ;; - out pointer + ;; copy good lane output pointer into NULL lanes in & out + ;; NOTE: NULL lanes not updated so any valid address can be used mov tmp, [state + _aes_args_out + tmp2*8] vpbroadcastq zmm1, tmp - vmovdqa64 [state + _aes_args_out + (0*PTR_SZ)]{k4}, zmm1 + vmovdqa64 [state + _aes_args_in + (0*PTR_SZ)]{k6}, zmm1 + vmovdqa64 [state + _aes_args_in + (8*PTR_SZ)]{k5}, zmm1 + vmovdqa64 [state + _aes_args_out + (0*PTR_SZ)]{k6}, zmm1 vmovdqa64 [state + _aes_args_out + (8*PTR_SZ)]{k5}, zmm1 ;; - set len to UINT16_MAX @@ -217,16 +209,10 @@ FLUSH_JOB_AES_ENC: ;; Find min length for lanes 0-7 vphminposuw xmm2, xmm0 - ;; scale up good lane idx before copying IV and keys - shl tmp2, 4 - ; extract min length of lanes 0-7 vpextrw DWORD(len2), xmm2, 0 ; min value vpextrw DWORD(idx), xmm2, 1 ; min index - ;; - copy IV and round keys to null lanes - COPY_IV_KEYS_TO_NULL_LANES tmp2, tmp1, tmp3, xmm4, xmm5, k6 - ;; Update lens and find min for lanes 8-15 vextracti128 xmm1, ymm0, 1 vphminposuw xmm2, xmm1 @@ -237,12 +223,16 @@ FLUSH_JOB_AES_ENC: add DWORD(idx), 8 ; but index +8 mov len2, tmp3 ; min len use_min: + or len2, len2 + je len_is_0 + vpbroadcastw ymm3, WORD(len2) vpsubw ymm0, ymm0, ymm3 vmovdqa [state + _aes_lens], ymm0 ; "state" and "args" are the same address, arg1 ; len is arg2 + ; valid lane mask is arg3 call AES_CBC_ENC_X16 ; state and idx are intact @@ -258,18 +248,25 @@ len_is_0: sub qword [state + _aes_lanes_in_use], 1 %ifdef SAFE_DATA - ; Set bit of lane of returned job - xor DWORD(tmp3), DWORD(tmp3) - bts DWORD(tmp3), DWORD(idx) - kmovw k1, DWORD(tmp3) - korw k6, k1, k6 - - ;; Clear IV and expanded keys of returned job and "NULL lanes" - ;; (k6 contains the mask of the jobs) - CLEAR_IV_KEYS_IN_NULL_LANES tmp1, xmm0, k6 + ;; Clear IV + vpxorq xmm0, xmm0 + shl idx, 4 ; multiply by 16 + + ;; Clear expanded keys +%assign round 0 +%rep NUM_KEYS + vmovdqa [state + _aesarg_key_tab + round * (16*16) + idx], xmm0 +%assign round (round + 1) +%endrep + %endif return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif ;; SAFE_DATA mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] diff --git a/lib/avx512/mb_mgr_aes128_cbc_enc_submit_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_cbc_enc_submit_avx512.asm similarity index 98% rename from lib/avx512/mb_mgr_aes128_cbc_enc_submit_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_cbc_enc_submit_avx512.asm index 6b8b34653fe92a2a54b6bff59ef65f10ac0c255a..2acf75f7582c1694a2603dbc3eebd8b37dfc3768 100644 --- a/lib/avx512/mb_mgr_aes128_cbc_enc_submit_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_cbc_enc_submit_avx512.asm @@ -28,9 +28,10 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" +%include "include/clear_regs.asm" + %ifndef AES_CBC_ENC_X16 %define AES_CBC_ENC_X16 aes_cbc_enc_128_vaes_avx512 %define NUM_KEYS 11 @@ -139,7 +140,6 @@ endstruc ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_ENC,function,internal) SUBMIT_JOB_AES_ENC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -16 @@ -252,13 +252,14 @@ len_is_0: vmovdqa [state + _aesarg_key_tab + round * (16*16) + idx], xmm0 %assign round (round + 1) %endrep - %endif return: -%ifndef SAFE_DATA +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper -%endif +%endif ;; SAFE_DATA mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] diff --git a/lib/avx512/mb_mgr_aes128_cbcs_1_9_flush_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_cbcs_1_9_flush_avx512.asm similarity index 98% rename from lib/avx512/mb_mgr_aes128_cbcs_1_9_flush_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_cbcs_1_9_flush_avx512.asm index 67f90f362a8fe624f99a81d598251eed7bed235a..f7bb94f6e81cf8d58ec6a3b3ec379d4056098329 100644 --- a/lib/avx512/mb_mgr_aes128_cbcs_1_9_flush_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_cbcs_1_9_flush_avx512.asm @@ -26,8 +26,8 @@ ;; %define CBCS -%include "avx512/mb_mgr_aes128_cbc_enc_flush_avx512.asm" -%include "include/cet.inc" +%include "avx512_t2/mb_mgr_aes128_cbc_enc_flush_avx512.asm" + %define AES_CBCS_ENC_X16 aes_cbcs_1_9_enc_128_vaes_avx512 %define FLUSH_JOB_AES_CBCS_ENC flush_job_aes128_cbcs_1_9_enc_vaes_avx512 @@ -39,7 +39,6 @@ extern AES_CBCS_ENC_X16 ; arg 2 : job MKGLOBAL(FLUSH_JOB_AES_CBCS_ENC,function,internal) FLUSH_JOB_AES_CBCS_ENC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -16 @@ -198,6 +197,11 @@ len_is_0: %endif return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif ;; SAFE_DATA mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] diff --git a/lib/avx512/mb_mgr_aes128_cbcs_1_9_submit_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_cbcs_1_9_submit_avx512.asm similarity index 98% rename from lib/avx512/mb_mgr_aes128_cbcs_1_9_submit_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_cbcs_1_9_submit_avx512.asm index ae6569ae24c7c8b74cc3d4b0d528235081e8b954..6497f60fc2cc96d8c4361234a85847023019a16f 100644 --- a/lib/avx512/mb_mgr_aes128_cbcs_1_9_submit_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_cbcs_1_9_submit_avx512.asm @@ -26,8 +26,7 @@ ;; %define CBCS -%include "avx512/mb_mgr_aes128_cbc_enc_submit_avx512.asm" -%include "include/cet.inc" +%include "avx512_t2/mb_mgr_aes128_cbc_enc_submit_avx512.asm" %define AES_CBCS_ENC_X16 aes_cbcs_1_9_enc_128_vaes_avx512 %define NUM_KEYS 11 @@ -41,7 +40,6 @@ extern AES_CBCS_ENC_X16 ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_CBCS_ENC,function,internal) SUBMIT_JOB_AES_CBCS_ENC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -16 @@ -197,6 +195,11 @@ len_is_0: %endif return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif ;; SAFE_DATA mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] diff --git a/lib/avx512/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm similarity index 94% rename from lib/avx512/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm index 23532fadf65f96f0cc64c237014d95d0064cadff..bc5be4525cc34f83d9f6d38ea065a227f6d1e33b 100644 --- a/lib/avx512/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm @@ -28,10 +28,10 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" %include "include/memcpy.asm" +%include "include/clear_regs.asm" %ifndef AES_CBC_MAC %define AES_CBC_MAC aes128_cbc_mac_vaes_avx512 @@ -495,7 +495,30 @@ endstruc COPY_IV_KEYS_TO_NULL_LANES tmp2, tmp4, tmp3, xmm4, xmm5, k6 ;; Find min length for lanes 0-7 - vphminposuw min_len_idx, XWORD(ccm_lens) + vphminposuw min_len_idx, XWORD(ccm_lens) + jmp %%_ccm_round + +%%_ccm_round_flush: + ;; find null lanes + ;; - vphminposuw already issued + ;; - lens updated + vpxorq zmm7, zmm7, zmm7 + vmovdqu64 zmm8, [state + _aes_ccm_job_in_lane + (0*PTR_SZ)] + vmovdqu64 zmm9, [state + _aes_ccm_job_in_lane + (8*PTR_SZ)] + vpcmpq k4, zmm8, zmm7, 0 ; EQ + vpcmpq k5, zmm9, zmm7, 0 ; EQ + kshiftlw k6, k5, 8 + korw k6, k6, k4 ; masks of NULL jobs in k4 (8), k5 (8) and k6 (16) + knotw k7, k6 ; mask of non-NULL jobs + kmovw DWORD(tmp), k7 + bsf DWORD(tmp2), DWORD(tmp) ; index of the 1st set bit in tmp + + ;; copy good lane data into NULL lanes + mov tmp, [state + _aes_ccm_args_in + tmp2*8] + vpbroadcastq zmm8, tmp + vmovdqa64 [state + _aes_ccm_args_in + (0*PTR_SZ)]{k4}, zmm8 + vmovdqa64 [state + _aes_ccm_args_in + (8*PTR_SZ)]{k5}, zmm8 + %endif ; end FLUSH %%_ccm_round: @@ -602,7 +625,9 @@ endstruc %endif ;; SAFE_DATA %%_return: -%ifndef SAFE_DATA +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper %endif mov rbx, [rsp + _gpr_save + 8*0] @@ -653,8 +678,11 @@ endstruc vpbroadcastw ccm_lens{k1}, WORD(tmp) vmovdqa64 [state + _aes_cmac_lens], ccm_lens vphminposuw min_len_idx, XWORD(ccm_lens) - +%ifidn %%SUBMIT_FLUSH, SUBMIT jmp %%_ccm_round +%else + jmp %%_ccm_round_flush +%endif %%_prepare_partial_block_to_auth: ; Check if partial block needs to be hashed @@ -685,8 +713,11 @@ endstruc %%_finish_partial_block_copy: vmovdqa [init_block_addr], xtmp0 mov [state + _aes_ccm_args_in + min_idx * 8], init_block_addr - +%ifidn %%SUBMIT_FLUSH, SUBMIT jmp %%_ccm_round +%else + jmp %%_ccm_round_flush +%endif %endmacro align 64 @@ -695,14 +726,12 @@ align 64 ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_CCM_AUTH,function,internal) SUBMIT_JOB_AES_CCM_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CCM_AUTH_AVX SUBMIT ; IMB_JOB * flush_job_aes128/256_ccm_auth_vaes_avx512(MB_MGR_CCM_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_AES_CCM_AUTH,function,internal) FLUSH_JOB_AES_CCM_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CCM_AUTH_AVX FLUSH mksection stack-noexec diff --git a/lib/avx512/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm similarity index 82% rename from lib/avx512/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm index 156b6c70864aeb6440dd93933d432d0fae4847fc..9a1acc8a84ea12f9e6fe5571f0aa01b64c5332de 100644 --- a/lib/avx512/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm @@ -28,18 +28,20 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" +%include "include/clear_regs.asm" %ifndef AES_CBC_MAC %define AES_CBC_MAC aes128_cbc_mac_vaes_avx512 +%define AES_CBC_MAC_FLUSH aes128_cbc_mac_flush_vaes_avx512 %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes128_cmac_auth_vaes_avx512 %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes128_cmac_auth_vaes_avx512 %define NUM_KEYS 11 %endif extern AES_CBC_MAC +extern AES_CBC_MAC_FLUSH mksection .rodata default rel @@ -51,9 +53,11 @@ mksection .text %ifdef LINUX %define arg1 rdi %define arg2 rsi +%define arg3 rdx %else %define arg1 rcx %define arg2 rdx +%define arg3 r8 %endif %define state arg1 @@ -148,71 +152,6 @@ endstruc %endif %endmacro -; copy IV's and round keys into NULL lanes -%macro COPY_IV_KEYS_TO_NULL_LANES 6 -%define %%IDX %1 ; [in] GP with good lane idx (scaled x16) -%define %%NULL_MASK %2 ; [clobbered] GP to store NULL lane mask -%define %%KEY_TAB %3 ; [clobbered] GP to store key table pointer -%define %%XTMP1 %4 ; [clobbered] temp XMM reg -%define %%XTMP2 %5 ; [clobbered] temp XMM reg -%define %%MASK_REG %6 ; [in] mask register - - vmovdqa64 %%XTMP1, [state + _aes_cmac_args_IV + %%IDX] - lea %%KEY_TAB, [state + _aes_cmac_args_key_tab] - kmovw DWORD(%%NULL_MASK), %%MASK_REG - -%assign j 0 ; outer loop to iterate through round keys -%rep 15 - vmovdqa64 %%XTMP2, [%%KEY_TAB + j + %%IDX] - -%assign k 0 ; inner loop to iterate through lanes -%rep 16 - bt %%NULL_MASK, k - jnc %%_skip_copy %+ j %+ _ %+ k - -%if j == 0 ;; copy IVs for each lane just once - vmovdqa64 [state + _aes_cmac_args_IV + (k*16)], %%XTMP1 -%endif - ;; copy key for each lane - vmovdqa64 [%%KEY_TAB + j + (k*16)], %%XTMP2 -%%_skip_copy %+ j %+ _ %+ k: -%assign k (k + 1) -%endrep - -%assign j (j + 256) -%endrep - -%endmacro - -; clear IVs, scratch buffers and round key's in NULL lanes -%macro CLEAR_IV_KEYS_SCRATCH_IN_NULL_LANES 3 -%define %%NULL_MASK %1 ; [clobbered] GP to store NULL lane mask -%define %%XTMP %2 ; [clobbered] temp XMM reg -%define %%MASK_REG %3 ; [in] mask register - - vpxorq %%XTMP, %%XTMP - kmovw DWORD(%%NULL_MASK), %%MASK_REG -%assign k 0 ; outer loop to iterate through lanes -%rep 16 - bt %%NULL_MASK, k - jnc %%_skip_clear %+ k - - ;; clean lane scratch and IV buffers - vmovdqa64 [state + _aes_cmac_scratch + (k*16)], %%XTMP - vmovdqa64 [state + _aes_cmac_args_IV + (k*16)], %%XTMP - -%assign j 0 ; inner loop to iterate through round keys -%rep NUM_KEYS - vmovdqa64 [state + _aes_cmac_args_key_tab + j + (k*16)], %%XTMP -%assign j (j + 256) - -%endrep -%%_skip_clear %+ k: -%assign k (k + 1) -%endrep - -%endmacro - ;;; =========================================================================== ;;; AES CMAC job submit & flush ;;; =========================================================================== @@ -366,21 +305,15 @@ endstruc vmovdqu64 zmm2, [state + _aes_cmac_job_in_lane + (8*PTR_SZ)] vpcmpq k1, zmm1, zmm0, 4 ; NEQ vpcmpq k2, zmm2, zmm0, 4 ; NEQ - kmovw DWORD(tmp), k1 - kmovw DWORD(tmp4), k2 - mov DWORD(tmp2), DWORD(tmp4) - shl DWORD(tmp2), 8 - or DWORD(tmp2), DWORD(tmp) ; mask of non-null jobs in tmp2 - not BYTE(tmp) - kmovw k4, DWORD(tmp) - not BYTE(tmp4) - kmovw k5, DWORD(tmp4) - mov DWORD(tmp), DWORD(tmp2) - not WORD(tmp) - kmovw k6, DWORD(tmp) ; mask of NULL jobs in k4, k5 and k6 - mov DWORD(tmp), DWORD(tmp2) + + kshiftlw k2, k2, 8 + korw k6, k2, k1 + kmovw DWORD(arg3), k6 ; mask of non-null lanes in arg3 + knotw k6, k6 + kmovw k4, k6 + kshiftrw k5, k4, 8 ; mask of NULL jobs in k4, k5 and k6 xor tmp2, tmp2 - bsf WORD(tmp2), WORD(tmp) ; index of the 1st set bit in tmp2 + bsf WORD(tmp2), WORD(arg3) ; index of the 1st set bit in tmp2 ;; copy good lane data into NULL lanes mov tmp, [state + _aes_cmac_args_in + tmp2*8] @@ -395,12 +328,6 @@ endstruc vmovdqu16 ymm0{k6}, ymm3 vmovdqa64 [state + _aes_cmac_lens], ymm0 - ;; scale up good lane idx before copying IV and keys - shl tmp2, 4 - - ;; - copy IV and round keys to null lanes - COPY_IV_KEYS_TO_NULL_LANES tmp2, tmp4, tmp3, xmm4, xmm5, k6 - ;; Find min length for lanes 0-7 vphminposuw xmm2, xmm0 @@ -428,7 +355,12 @@ endstruc ; "state" and "args" are the same address, arg1 ; len2 is arg2 +%ifidn %%SUBMIT_FLUSH, SUBMIT call AES_CBC_MAC +%else + ; valid lane mask is arg3 + call AES_CBC_MAC_FLUSH +%endif ; state and idx are intact vmovdqa ymm0, [state + _aes_cmac_lens] ; preload lens @@ -442,16 +374,9 @@ endstruc ; Set len to 16 mov tmp3, 16 -%ifndef LINUX - mov tmp2, rcx ; save rcx -%endif - mov rcx, idx - mov DWORD(tmp4), 1 - shl DWORD(tmp4), cl -%ifndef LINUX - mov rcx, tmp2 ; restore rcx -%endif - kmovq k1, tmp4 + xor tmp4, tmp4 + bts WORD(tmp4), WORD(idx) + kmovw k1, DWORD(tmp4) vpbroadcastw ymm1, WORD(tmp3) vmovdqu16 ymm0{k1}, ymm1 @@ -478,6 +403,11 @@ endstruc kshiftrw k5, k4, 8 ;; lanes 8-15 mask in k5 vmovdqa64 [state + _aes_cmac_args_in + (0*PTR_SZ)]{k4}, zmm1 vmovdqa64 [state + _aes_cmac_args_in + (8*PTR_SZ)]{k5}, zmm1 + + ;; reset valid lanes in arg3 + knotw k4, k6 + kmovw DWORD(arg3), k4 + %else ;; only update processed lane input pointer on submit mov [state + _aes_cmac_args_in + idx*8], m_last @@ -537,7 +467,6 @@ endstruc %ifdef SAFE_DATA vpxor xmm0, xmm0 -%ifidn %%SUBMIT_FLUSH, SUBMIT ;; Clear IV and scratch memory of returned job shl idx, 4 vmovdqa [state + _aes_cmac_scratch + idx], xmm0 @@ -550,24 +479,16 @@ endstruc %assign round (round + 1) %endrep -%else - ;; Clear keys and scratch memory of returned job and "NULL lanes" - xor DWORD(tmp2), DWORD(tmp2) - bts DWORD(tmp2), DWORD(idx) - kmovw k1, DWORD(tmp2) - korw k6, k1, k6 - - ;; Clear IVs, keys and scratch buffers of returned job and "NULL lanes" - ;; (k6 contains the mask of the jobs) - CLEAR_IV_KEYS_SCRATCH_IN_NULL_LANES tmp4, xmm0, k6 -%endif ;; SUBMIT +%endif ;; SAFE_DATA +%%_return: +%ifdef SAFE_DATA + clear_all_zmms_asm %else vzeroupper %endif ;; SAFE_DATA -%%_return: - mov rbx, [rsp + _gpr_save + 8*0] + mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] mov r13, [rsp + _gpr_save + 8*3] @@ -715,14 +636,12 @@ align 64 ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_CMAC_AUTH,function,internal) SUBMIT_JOB_AES_CMAC_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CMAC_VAES_AVX512 SUBMIT ; IMB_JOB * flush_job_aes_cmac_auth_vaes_avx512(MB_MGR_CMAC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_AES_CMAC_AUTH,function,internal) FLUSH_JOB_AES_CMAC_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CMAC_VAES_AVX512 FLUSH mksection stack-noexec diff --git a/lib/avx512/mb_mgr_aes128_xcbc_submit_flush_x16_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_aes128_xcbc_submit_flush_x16_vaes_avx512.asm similarity index 81% rename from lib/avx512/mb_mgr_aes128_xcbc_submit_flush_x16_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_aes128_xcbc_submit_flush_x16_vaes_avx512.asm index daaa17cbbe0618d72a56eba2a4af26a3f1a6276c..ee900051ea17ae3dbf56122365700e940acd72d9 100644 --- a/lib/avx512/mb_mgr_aes128_xcbc_submit_flush_x16_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes128_xcbc_submit_flush_x16_vaes_avx512.asm @@ -28,19 +28,21 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" +%include "include/clear_regs.asm" %ifndef AES_XCBC_X16 %define AES_XCBC_X16 aes_xcbc_mac_128_vaes_avx512 +%define AES_XCBC_X16_FLUSH aes_xcbc_mac_128_flush_vaes_avx512 %define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_vaes_avx512 %define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_vaes_avx512 %define NUM_KEYS 11 %endif extern AES_XCBC_X16 +extern AES_XCBC_X16_FLUSH mksection .rodata default rel @@ -58,9 +60,11 @@ mksection .text %ifdef LINUX %define arg1 rdi %define arg2 rsi +%define arg3 rdx %else %define arg1 rcx %define arg2 rdx +%define arg3 r8 %endif %define state arg1 @@ -138,70 +142,6 @@ endstruc vextracti64x2 [%%COL + ROW*10], %%ZTMP, 2 %endmacro -; copy IV's and round keys into NULL lanes -%macro COPY_IV_KEYS_TO_NULL_LANES 6 -%define %%IDX %1 ; [in] GP with good lane idx (scaled x16) -%define %%NULL_MASK %2 ; [clobbered] GP to store NULL lane mask -%define %%KEY_TAB %3 ; [clobbered] GP to store key table pointer -%define %%XTMP1 %4 ; [clobbered] temp XMM reg -%define %%XTMP2 %5 ; [clobbered] temp XMM reg -%define %%MASK_REG %6 ; [in] mask register - - vmovdqa64 %%XTMP1, [state + _aes_xcbc_args_ICV + %%IDX] - lea %%KEY_TAB, [state + _aes_xcbc_args_key_tab] - kmovw DWORD(%%NULL_MASK), %%MASK_REG - -%assign j 0 ; outer loop to iterate through round keys -%rep NUM_KEYS - vmovdqa64 %%XTMP2, [%%KEY_TAB + j + %%IDX] - -%assign k 0 ; inner loop to iterate through lanes -%rep 16 - bt %%NULL_MASK, k - jnc %%_skip_copy %+ j %+ _ %+ k - -%if j == 0 ;; copy IVs for each lane just once - vmovdqa64 [state + _aes_xcbc_args_ICV + (k*16)], %%XTMP1 -%endif - ;; copy key for each lane - vmovdqa64 [%%KEY_TAB + j + (k*16)], %%XTMP2 -%%_skip_copy %+ j %+ _ %+ k: -%assign k (k + 1) -%endrep - -%assign j (j + 256) -%endrep - -%endmacro - -; clear final block buffers and round key's in NULL lanes -%macro CLEAR_KEYS_FINAL_BLK_IN_NULL_LANES 3 -%define %%NULL_MASK %1 ; [clobbered] GP to store NULL lane mask -%define %%YTMP %2 ; [clobbered] temp YMM reg -%define %%MASK_REG %3 ; [in] mask register - - vpxor %%YTMP, %%YTMP - kmovw DWORD(%%NULL_MASK), %%MASK_REG -%assign k 0 ; outer loop to iterate through lanes -%rep 16 - bt %%NULL_MASK, k - jnc %%_skip_clear %+ k - - ;; clear final blocks and ICV buffers - vmovdqa [state + _aes_xcbc_ldata + k * _XCBC_LANE_DATA_size + _xcbc_final_block], %%YTMP - vmovdqa [state + _aes_xcbc_args_ICV + k * 16], XWORD(%%YTMP) -%assign j 0 ; inner loop to iterate through round keys -%rep NUM_KEYS - vmovdqa [state + _aes_xcbc_args_key_tab + j + (k*16)], XWORD(%%YTMP) -%assign j (j + 256) - -%endrep -%%_skip_clear %+ k: -%assign k (k + 1) -%endrep - -%endmacro - ;;; =========================================================================== ;;; AES XCBC job submit & flush ;;; =========================================================================== @@ -304,10 +244,10 @@ endstruc %assign i (i - 1) %endrep kmovw k6, DWORD(tmp) - movzx tmp3, BYTE(tmp) - kmovw k4, DWORD(tmp3) - shr tmp, 8 - kmovw k5, DWORD(tmp) + kmovw k4, k6 + kshiftrw k5, k6, 8 + kmovw DWORD(arg3), k6 + not WORD(arg3) ; mask of non-null lanes in arg3 mov tmp, [state + _aes_xcbc_args_in + tmp2*8] vpbroadcastq zmm1, tmp @@ -321,12 +261,6 @@ endstruc vmovdqu16 ymm0{k6}, ymm3 vmovdqa64 [state + _aes_xcbc_lens], ymm0 - ;; scale up good lane idx before copying IV and keys - shl tmp2, 4 - - ;; - copy IV and round keys to null lanes - COPY_IV_KEYS_TO_NULL_LANES tmp2, tmp, tmp3, xmm4, xmm5, k6 - ;; Find min length for lanes 0-7 vphminposuw xmm2, xmm0 @@ -355,7 +289,12 @@ endstruc ; "state" and "args" are the same address, arg1 ; len is arg2 - call AES_XCBC_X16 +%ifidn %%SUBMIT_FLUSH, SUBMIT + call AES_XCBC_X16 +%else + ; valid lane mask is arg3 + call AES_XCBC_X16_FLUSH +%endif ; state and idx are intact %%_len_is_0: @@ -370,8 +309,9 @@ endstruc ;; Update lane len vmovdqa64 ymm0, [state + _aes_xcbc_lens] - SHIFT_GP 1, idx, tmp, tmp3, left - kmovq k1, tmp + xor tmp, tmp + bts WORD(tmp), WORD(idx) + kmovq k1, tmp mov tmp3, 16 vpbroadcastw ymm1, WORD(tmp3) @@ -399,6 +339,10 @@ endstruc kshiftrw k5, k4, 8 ;; lanes 8-15 mask in k5 vmovdqa64 [state + _aes_xcbc_args_in + (0*PTR_SZ)]{k4}, zmm1 vmovdqa64 [state + _aes_xcbc_args_in + (8*PTR_SZ)]{k5}, zmm1 + + ;; reset valid lanes in arg3 + knotw k4, k6 + kmovw DWORD(arg3), k4 %else ;; only update processed lane input pointer on submit mov [state + _aes_xcbc_args_in + 8*idx], tmp @@ -426,8 +370,7 @@ endstruc vmovdqu8 [icv]{k1}, xmm0 %ifdef SAFE_DATA - vpxor ymm0, ymm0 -%ifidn %%SUBMIT_FLUSH, SUBMIT + vpxor xmm0, xmm0, xmm0 ;; Clear final block (32 bytes) vmovdqa [lane_data + _xcbc_final_block], ymm0 @@ -437,23 +380,16 @@ endstruc vmovdqa [state + _aes_xcbc_args_key_tab + round * (16*16) + idx], xmm0 %assign round (round + 1) %endrep - -%else ;; FLUSH - ;; Clear keys and final blocks of returned job and "NULL lanes" - shr idx, 4 ;; divide by 16 to restore lane idx - xor DWORD(tmp), DWORD(tmp) - bts DWORD(tmp), DWORD(idx) - kmovw k1, DWORD(tmp) - korw k6, k1, k6 - ;; k6 contains the mask of the jobs - CLEAR_KEYS_FINAL_BLK_IN_NULL_LANES tmp, ymm0, k6 -%endif -%else - vzeroupper %endif %%_return: +%ifdef SAFE_DATA + clear_all_zmms_asm +%else + vzeroupper +%endif ;; SAFE_DATA + mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -514,14 +450,12 @@ align 64 ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_XCBC,function,internal) SUBMIT_JOB_AES_XCBC: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_XCBC_VAES_AVX512 SUBMIT ; IMB_JOB * flush_job_aes_xcbc_vaes_avx512(MB_MGR_XCBC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_AES_XCBC,function,internal) FLUSH_JOB_AES_XCBC: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_XCBC_VAES_AVX512 FLUSH mksection stack-noexec diff --git a/lib/avx512/mb_mgr_aes192_cbc_enc_flush_avx512.asm b/lib/avx512_t2/mb_mgr_aes192_cbc_enc_flush_avx512.asm similarity index 93% rename from lib/avx512/mb_mgr_aes192_cbc_enc_flush_avx512.asm rename to lib/avx512_t2/mb_mgr_aes192_cbc_enc_flush_avx512.asm index dd6d9e0a744748d40b134b9297a927240aa10a19..8a0416c940a4c9cef119feb9c6b9d759da51c914 100644 --- a/lib/avx512/mb_mgr_aes192_cbc_enc_flush_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes192_cbc_enc_flush_avx512.asm @@ -25,7 +25,7 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; -%define AES_CBC_ENC_X16 aes_cbc_enc_192_vaes_avx512 +%define AES_CBC_ENC_X16 aes_cbc_enc_192_flush_vaes_avx512 %define FLUSH_JOB_AES_ENC flush_job_aes192_enc_vaes_avx512 %define NUM_KEYS 13 -%include "avx512/mb_mgr_aes128_cbc_enc_flush_avx512.asm" +%include "avx512_t2/mb_mgr_aes128_cbc_enc_flush_avx512.asm" diff --git a/lib/avx512/mb_mgr_aes192_cbc_enc_submit_avx512.asm b/lib/avx512_t2/mb_mgr_aes192_cbc_enc_submit_avx512.asm similarity index 96% rename from lib/avx512/mb_mgr_aes192_cbc_enc_submit_avx512.asm rename to lib/avx512_t2/mb_mgr_aes192_cbc_enc_submit_avx512.asm index 8ec9458f47ab124c432c5d29f5dcdb43e00e448c..1bf43590b9c2ef67187d05db71876b13705cf187 100644 --- a/lib/avx512/mb_mgr_aes192_cbc_enc_submit_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes192_cbc_enc_submit_avx512.asm @@ -28,4 +28,4 @@ %define AES_CBC_ENC_X16 aes_cbc_enc_192_vaes_avx512 %define SUBMIT_JOB_AES_ENC submit_job_aes192_enc_vaes_avx512 %define NUM_KEYS 13 -%include "avx512/mb_mgr_aes128_cbc_enc_submit_avx512.asm" +%include "avx512_t2/mb_mgr_aes128_cbc_enc_submit_avx512.asm" diff --git a/lib/avx512/mb_mgr_aes256_cbc_enc_flush_avx512.asm b/lib/avx512_t2/mb_mgr_aes256_cbc_enc_flush_avx512.asm similarity index 93% rename from lib/avx512/mb_mgr_aes256_cbc_enc_flush_avx512.asm rename to lib/avx512_t2/mb_mgr_aes256_cbc_enc_flush_avx512.asm index 3a057bf71bdee66db1d4e6a166c1a8e5d8bec1e5..a3ec6b0cc48bbcc48092e973c793cc35c3123af1 100644 --- a/lib/avx512/mb_mgr_aes256_cbc_enc_flush_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes256_cbc_enc_flush_avx512.asm @@ -25,7 +25,7 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; -%define AES_CBC_ENC_X16 aes_cbc_enc_256_vaes_avx512 +%define AES_CBC_ENC_X16 aes_cbc_enc_256_flush_vaes_avx512 %define FLUSH_JOB_AES_ENC flush_job_aes256_enc_vaes_avx512 %define NUM_KEYS 15 -%include "avx512/mb_mgr_aes128_cbc_enc_flush_avx512.asm" +%include "avx512_t2/mb_mgr_aes128_cbc_enc_flush_avx512.asm" diff --git a/lib/avx512/mb_mgr_aes256_cbc_enc_submit_avx512.asm b/lib/avx512_t2/mb_mgr_aes256_cbc_enc_submit_avx512.asm similarity index 96% rename from lib/avx512/mb_mgr_aes256_cbc_enc_submit_avx512.asm rename to lib/avx512_t2/mb_mgr_aes256_cbc_enc_submit_avx512.asm index afb7f1d22f7d8495ba8484323f273552d0e09b93..3e001baf8c712e1bc46160b8a7f53238a76848f8 100644 --- a/lib/avx512/mb_mgr_aes256_cbc_enc_submit_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes256_cbc_enc_submit_avx512.asm @@ -28,4 +28,4 @@ %define AES_CBC_ENC_X16 aes_cbc_enc_256_vaes_avx512 %define SUBMIT_JOB_AES_ENC submit_job_aes256_enc_vaes_avx512 %define NUM_KEYS 15 -%include "avx512/mb_mgr_aes128_cbc_enc_submit_avx512.asm" +%include "avx512_t2/mb_mgr_aes128_cbc_enc_submit_avx512.asm" diff --git a/lib/avx512/mb_mgr_aes256_ccm_auth_submit_flush_x16_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_aes256_ccm_auth_submit_flush_x16_vaes_avx512.asm similarity index 95% rename from lib/avx512/mb_mgr_aes256_ccm_auth_submit_flush_x16_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_aes256_ccm_auth_submit_flush_x16_vaes_avx512.asm index 8356e8540a91494cd780f751ad50d5a8c2d5ad79..6bfdf6ea539c50821c071a6a16400fdf0dc9dc96 100644 --- a/lib/avx512/mb_mgr_aes256_ccm_auth_submit_flush_x16_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes256_ccm_auth_submit_flush_x16_vaes_avx512.asm @@ -32,4 +32,4 @@ %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes256_ccm_auth_vaes_avx512 %endif -%include "avx512/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm" +%include "avx512_t2/mb_mgr_aes128_ccm_auth_submit_flush_x16_vaes_avx512.asm" diff --git a/lib/avx512/mb_mgr_aes256_cmac_submit_flush_x16_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_aes256_cmac_submit_flush_x16_vaes_avx512.asm similarity index 93% rename from lib/avx512/mb_mgr_aes256_cmac_submit_flush_x16_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_aes256_cmac_submit_flush_x16_vaes_avx512.asm index 8a43c6c375eb598bb51e18cf3c8995a305859f24..d0224cf06849f73e278dd57230b717603a851506 100644 --- a/lib/avx512/mb_mgr_aes256_cmac_submit_flush_x16_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_aes256_cmac_submit_flush_x16_vaes_avx512.asm @@ -27,9 +27,10 @@ %ifndef AES_CBC_MAC %define AES_CBC_MAC aes256_cbc_mac_vaes_avx512 +%define AES_CBC_MAC_FLUSH aes256_cbc_mac_flush_vaes_avx512 %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes256_cmac_auth_vaes_avx512 %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes256_cmac_auth_vaes_avx512 %define NUM_KEYS 15 %endif -%include "avx512/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm" +%include "avx512_t2/mb_mgr_aes128_cmac_submit_flush_x16_vaes_avx512.asm" diff --git a/lib/avx512_t2/mb_mgr_avx512_t2.c b/lib/avx512_t2/mb_mgr_avx512_t2.c new file mode 100644 index 0000000000000000000000000000000000000000..5447b79714ce7974fc9f46a3bf155d413e8ee1bb --- /dev/null +++ b/lib/avx512_t2/mb_mgr_avx512_t2.c @@ -0,0 +1,602 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define AVX512 + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/snow3g_submit.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/gcm.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_avx_type1.h" /* AESNI */ +#include "include/arch_avx2_type1.h" /* MD5 */ +#include "include/arch_avx512_type1.h" +#include "include/arch_avx512_type2.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms_avx +#define RESTORE_XMMS restore_xmms_avx + +/* JOB API */ +#define SUBMIT_JOB submit_job_avx512_t2 +#define FLUSH_JOB flush_job_avx512_t2 +#define QUEUE_SIZE queue_size_avx512_t2 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_avx512_t2 +#define GET_NEXT_JOB get_next_job_avx512_t2 +#define GET_COMPLETED_JOB get_completed_job_avx512_t2 +#define SUBMIT_BURST submit_burst_avx512_t2 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx512_t2 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx512_t2 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx512_t2 +#define SUBMIT_HASH_BURST submit_hash_burst_avx512_t2 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx512_t2 +#define GET_NEXT_BURST get_next_burst_avx512_t2 +#define SUBMIT_BURST submit_burst_avx512_t2 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx512_t2 +#define FLUSH_BURST flush_burst_avx512_t2 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx512_t2 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx512_t2 +#define SUBMIT_HASH_BURST submit_hash_burst_avx512_t2 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx512_t2 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_AVX512_T2 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_AVX512_T2 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_AVX512_T2 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_AVX512_T2 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_AVX512_T2 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_vaes_avx512 +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_vaes_avx512 +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_vaes_avx512 +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_vaes_avx512 +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_vaes_avx512 +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_vaes_avx512 + +#define SUBMIT_JOB_AES_GCM_DEC vaes_submit_gcm_dec_avx512 +#define SUBMIT_JOB_AES_GCM_ENC vaes_submit_gcm_enc_avx512 + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_vaes_avx512 +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_vaes_avx512 +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_vaes_avx512 + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_vaes_avx512 +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_vaes_avx512 +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_vaes_avx512 + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_vaes_avx512 +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_vaes_avx512 +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_vaes_avx512 + +#define AES_CBC_DEC_128 aes_cbc_dec_128_vaes_avx512 +#define AES_CBC_DEC_192 aes_cbc_dec_192_vaes_avx512 +#define AES_CBC_DEC_256 aes_cbc_dec_256_vaes_avx512 + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_vaes_avx512 +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_vaes_avx512 +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_vaes_avx512 +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_vaes_avx512 + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_avx512 +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_avx512 +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_avx512 +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_avx512 +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_avx512 +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_avx512 + +#define AES_ECB_ENC_128 aes_ecb_enc_128_vaes_avx512 +#define AES_ECB_ENC_192 aes_ecb_enc_192_vaes_avx512 +#define AES_ECB_ENC_256 aes_ecb_enc_256_vaes_avx512 +#define AES_ECB_DEC_128 aes_ecb_dec_128_vaes_avx512 +#define AES_ECB_DEC_192 aes_ecb_dec_192_vaes_avx512 +#define AES_ECB_DEC_256 aes_ecb_dec_256_vaes_avx512 + +/* AES-CTR */ +#define SUBMIT_JOB_AES_CTR_128 aes_cntr_128_submit_vaes_avx512 +#define SUBMIT_JOB_AES_CTR_192 aes_cntr_192_submit_vaes_avx512 +#define SUBMIT_JOB_AES_CTR_256 aes_cntr_256_submit_vaes_avx512 +#define SUBMIT_JOB_AES_CTR_128_BIT aes_cntr_bit_128_submit_vaes_avx512 +#define SUBMIT_JOB_AES_CTR_192_BIT aes_cntr_bit_192_submit_vaes_avx512 +#define SUBMIT_JOB_AES_CTR_256_BIT aes_cntr_bit_256_submit_vaes_avx512 + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_vaes_avx512 +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_vaes_avx512 + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_vaes_avx512 +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_vaes_avx512 + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_vaes_avx512 +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_vaes_avx512 + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_vaes_avx512 +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_vaes_avx512 + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_vaes_avx512 +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_vaes_avx512 + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_avx512 +#define AES_CFB_256_ONE aes_cfb_256_one_avx512 + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_vaes_avx512 +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_vaes_avx512 + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_vaes_avx512 +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_vaes_avx512 +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_vaes_avx512 +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_vaes_avx512 + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_avx512 +#define FLUSH_JOB_SHA1 flush_job_sha1_avx512 +#define SUBMIT_JOB_SHA224 submit_job_sha224_avx512 +#define FLUSH_JOB_SHA224 flush_job_sha224_avx512 +#define SUBMIT_JOB_SHA256 submit_job_sha256_avx512 +#define FLUSH_JOB_SHA256 flush_job_sha256_avx512 +#define SUBMIT_JOB_SHA384 submit_job_sha384_avx512 +#define FLUSH_JOB_SHA384 flush_job_sha384_avx512 +#define SUBMIT_JOB_SHA512 submit_job_sha512_avx512 +#define FLUSH_JOB_SHA512 flush_job_sha512_avx512 + +/* HMAC-SHA1/224/256/384/512 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_avx512 +#define FLUSH_JOB_HMAC flush_job_hmac_avx512 +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_avx512 +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_avx512 +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_avx512 +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_avx512 +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_avx512 +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_avx512 +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_avx512 +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_avx512 +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_avx2 +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_avx2 + +/* DES & 3DES */ +#define SUBMIT_JOB_DES_CBC_ENC submit_job_des_cbc_enc_avx512 +#define FLUSH_JOB_DES_CBC_ENC flush_job_des_cbc_enc_avx512 + +#define SUBMIT_JOB_DES_CBC_DEC submit_job_des_cbc_dec_avx512 +#define FLUSH_JOB_DES_CBC_DEC flush_job_des_cbc_dec_avx512 + +#define SUBMIT_JOB_3DES_CBC_ENC submit_job_3des_cbc_enc_avx512 +#define FLUSH_JOB_3DES_CBC_ENC flush_job_3des_cbc_enc_avx512 + +#define SUBMIT_JOB_3DES_CBC_DEC submit_job_3des_cbc_dec_avx512 +#define FLUSH_JOB_3DES_CBC_DEC flush_job_3des_cbc_dec_avx512 + +/* DES-DOCSIS */ +#define SUBMIT_JOB_DOCSIS_DES_ENC submit_job_docsis_des_enc_avx512 +#define FLUSH_JOB_DOCSIS_DES_ENC flush_job_docsis_des_enc_avx512 + +#define SUBMIT_JOB_DOCSIS_DES_DEC submit_job_docsis_des_dec_avx512 +#define FLUSH_JOB_DOCSIS_DES_DEC flush_job_docsis_des_dec_avx512 + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_avx512 +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_avx512 +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_avx512 +#define POLY1305_MAC poly1305_mac_fma_avx512 + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_gfni_avx512 +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_gfni_avx512 +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_gfni_avx512 +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_gfni_avx512 +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_gfni_avx512 +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_gfni_avx512 +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_gfni_avx512 +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_gfni_avx512 + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_avx +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_avx + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB *submit_snow3g_uea2_job_vaes_avx512(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_vaes_avx512(snow3g_uea2_ooo, job); +} + +static IMB_JOB *flush_snow3g_uea2_job_vaes_avx512(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_vaes_avx512(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_vaes_avx512 +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_vaes_avx512 + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_vaes_avx512 +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_vaes_avx512 + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_avx_local + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS_SEC_CRC_ENC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, + const uint64_t key_size); +__forceinline +IMB_JOB * +FLUSH_JOB_DOCSIS_SEC_CRC_ENC(MB_MGR_DOCSIS_AES_OOO *state, + const uint64_t key_size); + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS_SEC_CRC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, + const uint64_t key_size); + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS128_SEC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job); + +__forceinline +IMB_JOB * +SUBMIT_JOB_DOCSIS256_SEC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job); + +static IMB_JOB * +submit_job_docsis128_sec_crc_dec_vaes_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job) +{ + (void) state; + + if (job->msg_len_to_hash_in_bytes == 0) { + if (job->msg_len_to_cipher_in_bytes == 0) { + /* NO cipher, NO CRC32 */ + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; + } + + /* Cipher, NO CRC32 */ + return SUBMIT_JOB_DOCSIS128_SEC_DEC(state, job); + } + + /* Cipher + CRC32 // CRC32 */ + aes_docsis128_dec_crc32_vaes_avx512(job); + + return job; +} + +static IMB_JOB * +submit_job_docsis256_sec_crc_dec_vaes_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job) +{ + (void) state; + + if (job->msg_len_to_hash_in_bytes == 0) { + if (job->msg_len_to_cipher_in_bytes == 0) { + /* NO cipher, NO CRC32 */ + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; + } + + /* Cipher, NO CRC32 */ + return SUBMIT_JOB_DOCSIS256_SEC_DEC(state, job); + } + + /* Cipher + CRC32 // CRC32 */ + aes_docsis256_dec_crc32_vaes_avx512(job); + + return job; +} + +#define SUBMIT_JOB_DOCSIS128_SEC_CRC_ENC submit_job_aes_docsis128_enc_crc32_vaes_avx512 +#define SUBMIT_JOB_DOCSIS256_SEC_CRC_ENC submit_job_aes_docsis256_enc_crc32_vaes_avx512 +#define FLUSH_JOB_DOCSIS128_SEC_CRC_ENC flush_job_aes_docsis128_enc_crc32_vaes_avx512 +#define FLUSH_JOB_DOCSIS256_SEC_CRC_ENC flush_job_aes_docsis256_enc_crc32_vaes_avx512 +#define SUBMIT_JOB_DOCSIS128_SEC_CRC_DEC submit_job_docsis128_sec_crc_dec_vaes_avx512 +#define SUBMIT_JOB_DOCSIS256_SEC_CRC_DEC submit_job_docsis256_sec_crc_dec_vaes_avx512 + +/* ====================================================================== */ + +static void +reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 16); + ooo_mgr_aes_reset(state->aes192_ooo, 16); + ooo_mgr_aes_reset(state->aes256_ooo, 16); + + /* DOCSIS SEC BPI (AES CBC + AES CFB for partial block) + * uses same settings as AES CBC. + */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 16); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 16); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 16); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 16); + + /* DES, 3DES and DOCSIS DES (DES CBC + DES CFB for partial block) */ + ooo_mgr_des_reset(state->des_enc_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->des_dec_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->des3_enc_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->des3_dec_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->docsis_des_enc_ooo, AVX512_NUM_DES_LANES); + ooo_mgr_des_reset(state->docsis_des_dec_ooo, AVX512_NUM_DES_LANES); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 16); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 16); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 16); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 16); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, AVX512_NUM_SHA1_LANES); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, + AVX512_NUM_SHA256_LANES); + + /* Init HMAC/SHA256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, + AVX512_NUM_SHA256_LANES); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + AVX512_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + AVX512_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, AVX2_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 16); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 16); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 16); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 16); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 16); + + /* Init AES CBC-S out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 12); + + /* Init SNOW3G out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 16); + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 16); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, AVX512_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, AVX512_NUM_SHA256_LANES); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, AVX512_NUM_SHA256_LANES); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, AVX512_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, AVX512_NUM_SHA512_LANES); +} + +IMB_DLL_LOCAL void +init_mb_mgr_avx512_t2_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for AVX512 interface are present */ + if ((state->features & IMB_CPUFLAGS_AVX512_T2) != + IMB_CPUFLAGS_AVX512_T2) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_AVX512; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_avx512; + state->keyexp_192 = aes_keyexp_192_avx512; + state->keyexp_256 = aes_keyexp_256_avx512; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_avx512; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_avx512; + + state->xcbc_keyexp = aes_xcbc_expand_key_avx512; + + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_avx512; + state->sha1 = sha1_avx512; + state->sha224_one_block = sha224_one_block_avx512; + state->sha224 = sha224_avx512; + state->sha256_one_block = sha256_one_block_avx512; + state->sha256 = sha256_avx512; + state->sha384_one_block = sha384_one_block_avx512; + state->sha384 = sha384_avx512; + state->sha512_one_block = sha512_one_block_avx512; + state->sha512 = sha512_avx512; + state->md5_one_block = md5_one_block_avx512; + + state->aes128_cfb_one = aes_cfb_128_one_avx512; + + state->eea3_1_buffer = zuc_eea3_1_buffer_avx512; + state->eea3_4_buffer = zuc_eea3_4_buffer_avx; + state->eia3_1_buffer = zuc_eia3_1_buffer_avx512; + state->eea3_n_buffer = zuc_eea3_n_buffer_gfni_avx512; + state->eia3_n_buffer = zuc_eia3_n_buffer_gfni_avx512; + + state->f8_1_buffer = kasumi_f8_1_buffer_avx; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_avx; + state->f8_2_buffer = kasumi_f8_2_buffer_avx; + state->f8_3_buffer = kasumi_f8_3_buffer_avx; + state->f8_4_buffer = kasumi_f8_4_buffer_avx; + state->f8_n_buffer = kasumi_f8_n_buffer_avx; + state->f9_1_buffer = kasumi_f9_1_buffer_avx; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_avx; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_avx; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_avx; + state->kasumi_key_sched_size = kasumi_key_sched_size_avx; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_avx512; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_avx512; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_avx512; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_avx512; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_avx512; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_avx512; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_avx512; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_avx512; +#ifndef _WIN32 + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_vaes_avx512; +#else + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_avx512; +#endif + state->snow3g_init_key_sched = snow3g_init_key_sched_avx512; + state->snow3g_key_sched_size = snow3g_key_sched_size_avx512; + + state->hec_32 = hec_32_avx; + state->hec_64 = hec_64_avx; + + state->crc32_ethernet_fcs = ethernet_fcs_avx512; + state->crc16_x25 = crc16_x25_avx512; + state->crc32_sctp = crc32_sctp_avx512; + state->crc24_lte_a = crc24_lte_a_avx512; + state->crc24_lte_b = crc24_lte_b_avx512; + state->crc16_fp_data = crc16_fp_data_avx512; + state->crc11_fp_header = crc11_fp_header_avx512; + state->crc7_fp_header = crc7_fp_header_avx512; + state->crc10_iuup_data = crc10_iuup_data_avx512; + state->crc6_iuup_header = crc6_iuup_header_avx512; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_avx512; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_avx512; + + state->chacha20_poly1305_init = init_chacha20_poly1305_fma_avx512; + state->chacha20_poly1305_enc_update = + update_enc_chacha20_poly1305_fma_avx512; + state->chacha20_poly1305_dec_update = + update_dec_chacha20_poly1305_fma_avx512; + state->chacha20_poly1305_finalize = + finalize_chacha20_poly1305_fma_avx512; + + state->gcm128_enc = aes_gcm_enc_128_vaes_avx512; + state->gcm192_enc = aes_gcm_enc_192_vaes_avx512; + state->gcm256_enc = aes_gcm_enc_256_vaes_avx512; + state->gcm128_dec = aes_gcm_dec_128_vaes_avx512; + state->gcm192_dec = aes_gcm_dec_192_vaes_avx512; + state->gcm256_dec = aes_gcm_dec_256_vaes_avx512; + state->gcm128_init = aes_gcm_init_128_vaes_avx512; + state->gcm192_init = aes_gcm_init_192_vaes_avx512; + state->gcm256_init = aes_gcm_init_256_vaes_avx512; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_vaes_avx512; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_vaes_avx512; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_vaes_avx512; + state->gcm128_enc_update = aes_gcm_enc_128_update_vaes_avx512; + state->gcm192_enc_update = aes_gcm_enc_192_update_vaes_avx512; + state->gcm256_enc_update = aes_gcm_enc_256_update_vaes_avx512; + state->gcm128_dec_update = aes_gcm_dec_128_update_vaes_avx512; + state->gcm192_dec_update = aes_gcm_dec_192_update_vaes_avx512; + state->gcm256_dec_update = aes_gcm_dec_256_update_vaes_avx512; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_vaes_avx512; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_vaes_avx512; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_vaes_avx512; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_vaes_avx512; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_vaes_avx512; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_vaes_avx512; + state->gcm128_precomp = aes_gcm_precomp_128_vaes_avx512; + state->gcm192_precomp = aes_gcm_precomp_192_vaes_avx512; + state->gcm256_precomp = aes_gcm_precomp_256_vaes_avx512; + state->gcm128_pre = aes_gcm_pre_128_vaes_avx512; + state->gcm192_pre = aes_gcm_pre_192_vaes_avx512; + state->gcm256_pre = aes_gcm_pre_256_vaes_avx512; + + state->ghash = ghash_vaes_avx512; + state->ghash_pre = ghash_pre_vaes_avx512; + + state->gmac128_init = imb_aes_gmac_init_128_vaes_avx512; + state->gmac192_init = imb_aes_gmac_init_192_vaes_avx512; + state->gmac256_init = imb_aes_gmac_init_256_vaes_avx512; + state->gmac128_update = imb_aes_gmac_update_128_vaes_avx512; + state->gmac192_update = imb_aes_gmac_update_192_vaes_avx512; + state->gmac256_update = imb_aes_gmac_update_256_vaes_avx512; + state->gmac128_finalize = imb_aes_gmac_finalize_128_vaes_avx512; + state->gmac192_finalize = imb_aes_gmac_finalize_192_vaes_avx512; + state->gmac256_finalize = imb_aes_gmac_finalize_256_vaes_avx512; +} + +#include "mb_mgr_code.h" diff --git a/lib/avx512/mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.asm similarity index 99% rename from lib/avx512/mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.asm index 94f3c10d0af9351cdf1926b66ca56bd612f65a24..380446c54725a817d17164ba0c944cb490898836 100644 --- a/lib/avx512/mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.asm @@ -32,8 +32,7 @@ %include "include/os.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" -%include "avx512/snow3g_uea2_by16_vaes_avx512.asm" +%include "avx512_t2/snow3g_uea2_by16_vaes_avx512.asm" %ifndef SUBMIT_JOB_SNOW3G_UEA2 %define SUBMIT_JOB_SNOW3G_UEA2_GEN2 submit_job_snow3g_uea2_vaes_avx512 @@ -380,14 +379,13 @@ mksection .text ;; arg 2 : job MKGLOBAL(SUBMIT_JOB_SNOW3G_UEA2_GEN2,function,internal) SUBMIT_JOB_SNOW3G_UEA2_GEN2: - endbranch64 SNOW3G_FUNC_START SUBMIT_FLUSH_JOB_SNOW3G_UEA2 submit, tmp_gp1, tmp_gp2, tmp_gp3, tmp_gp4, tmp_gp5, tmp_gp6, tmp_gp7, tmp_gp8, tmp_gp9, tmp_gp10, tmp_gp11, avx512_gen2 SNOW3G_FUNC_END ret + MKGLOBAL(SUBMIT_JOB_SNOW3G_UEA2,function,internal) SUBMIT_JOB_SNOW3G_UEA2: - endbranch64 SNOW3G_FUNC_START SUBMIT_FLUSH_JOB_SNOW3G_UEA2 submit, tmp_gp1, tmp_gp2, tmp_gp3, tmp_gp4, tmp_gp5, tmp_gp6, tmp_gp7, tmp_gp8, tmp_gp9, tmp_gp10, tmp_gp11, avx512_gen1 SNOW3G_FUNC_END @@ -397,16 +395,16 @@ SUBMIT_JOB_SNOW3G_UEA2: ;; arg 1 : state MKGLOBAL(FLUSH_JOB_SNOW3G_UEA2_GEN2,function,internal) FLUSH_JOB_SNOW3G_UEA2_GEN2: - endbranch64 SNOW3G_FUNC_START SUBMIT_FLUSH_JOB_SNOW3G_UEA2 flush, tmp_gp1, tmp_gp2, tmp_gp3, tmp_gp4, tmp_gp5, tmp_gp6, tmp_gp7, tmp_gp8, tmp_gp9, tmp_gp10, tmp_gp11, avx512_gen2 SNOW3G_FUNC_END ret + MKGLOBAL(FLUSH_JOB_SNOW3G_UEA2,function,internal) FLUSH_JOB_SNOW3G_UEA2: - endbranch64 SNOW3G_FUNC_START SUBMIT_FLUSH_JOB_SNOW3G_UEA2 flush, tmp_gp1, tmp_gp2, tmp_gp3, tmp_gp4, tmp_gp5, tmp_gp6, tmp_gp7, tmp_gp8, tmp_gp9, tmp_gp10, tmp_gp11, avx512_gen1 SNOW3G_FUNC_END ret + mksection stack-noexec diff --git a/lib/avx512/mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.asm b/lib/avx512_t2/mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.asm similarity index 98% rename from lib/avx512/mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.asm rename to lib/avx512_t2/mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.asm index 34932ea68b5bed5d86656a0de90fde6bbc8a4c51..c10985b1e49f26f5004d668861a1f0446085227c 100644 --- a/lib/avx512/mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.asm +++ b/lib/avx512_t2/mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.asm @@ -29,11 +29,10 @@ %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" %include "include/constants.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" %include "include/clear_regs.asm" -%include "avx512/snow3g_uea2_by16_vaes_avx512.asm" +%include "avx512_t2/snow3g_uea2_by16_vaes_avx512.asm" %ifndef SUBMIT_JOB_SNOW3G_UIA2 %define SUBMIT_JOB_SNOW3G_UIA2_GEN2 submit_job_snow3g_uia2_vaes_avx512 @@ -269,24 +268,20 @@ mksection .text ; arg 2 : job MKGLOBAL(SUBMIT_JOB_SNOW3G_UIA2_GEN2,function,internal) SUBMIT_JOB_SNOW3G_UIA2_GEN2: - endbranch64 SUBMIT_FLUSH_JOB_SNOW3G_UIA2 submit, avx512_gen2 MKGLOBAL(SUBMIT_JOB_SNOW3G_UIA2,function,internal) SUBMIT_JOB_SNOW3G_UIA2: - endbranch64 SUBMIT_FLUSH_JOB_SNOW3G_UIA2 submit, avx512_gen1 ; JOB* FLUSH_JOB_SNOW3G_UIA2(MB_MGR_SNOW3G_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_SNOW3G_UIA2_GEN2,function,internal) FLUSH_JOB_SNOW3G_UIA2_GEN2: - endbranch64 SUBMIT_FLUSH_JOB_SNOW3G_UIA2 flush, avx512_gen2 MKGLOBAL(FLUSH_JOB_SNOW3G_UIA2,function,internal) FLUSH_JOB_SNOW3G_UIA2: - endbranch64 SUBMIT_FLUSH_JOB_SNOW3G_UIA2 flush, avx512_gen1 mksection stack-noexec diff --git a/lib/avx512/mb_mgr_zuc_submit_flush_gfni_avx512.asm b/lib/avx512_t2/mb_mgr_zuc_submit_flush_gfni_avx512.asm similarity index 88% rename from lib/avx512/mb_mgr_zuc_submit_flush_gfni_avx512.asm rename to lib/avx512_t2/mb_mgr_zuc_submit_flush_gfni_avx512.asm index 788219cbadba32eb2f13cd34aadc127d8c2fb41b..76649d36557185118e4c31e5901416a5caa5149e 100644 --- a/lib/avx512/mb_mgr_zuc_submit_flush_gfni_avx512.asm +++ b/lib/avx512_t2/mb_mgr_zuc_submit_flush_gfni_avx512.asm @@ -35,13 +35,16 @@ %define ZUC128_INIT_16 asm_ZucInitialization_16_gfni_avx512 %define ZUC_CIPHER asm_ZucCipher_16_gfni_avx512 %define ZUC256_INIT_16 asm_Zuc256Initialization_16_gfni_avx512 -%define ZUC_KEYGEN4B_16 asm_ZucGenKeystream4B_16_gfni_avx512 %define ZUC_REMAINDER_16 asm_Eia3RemainderAVX512_16_VPCLMUL %define ZUC256_REMAINDER_16 asm_Eia3_256_RemainderAVX512_16_VPCLMUL +%define ZUC_KEYGEN_SKIP16_16 asm_ZucGenKeystream_16_skip16_gfni_avx512 +%define ZUC_KEYGEN64B_SKIP16_16 asm_ZucGenKeystream64B_16_skip16_gfni_avx512 %define ZUC_KEYGEN_SKIP8_16 asm_ZucGenKeystream_16_skip8_gfni_avx512 %define ZUC_KEYGEN64B_SKIP8_16 asm_ZucGenKeystream64B_16_skip8_gfni_avx512 +%define ZUC_KEYGEN_SKIP4_16 asm_ZucGenKeystream_16_skip4_gfni_avx512 +%define ZUC_KEYGEN64B_SKIP4_16 asm_ZucGenKeystream64B_16_skip4_gfni_avx512 %define ZUC_KEYGEN_16 asm_ZucGenKeystream_16_gfni_avx512 %define ZUC_KEYGEN64B_16 asm_ZucGenKeystream64B_16_gfni_avx512 %define ZUC_ROUND64B asm_Eia3Round64B_16_VPCLMUL %define ZUC_EIA3_N64B asm_Eia3_Nx64B_AVX512_16_VPCLMUL -%include "avx512/mb_mgr_zuc_submit_flush_avx512.asm" +%include "avx512_t1/mb_mgr_zuc_submit_flush_avx512.asm" diff --git a/lib/avx512/poly_fma_avx512.asm b/lib/avx512_t2/poly_fma_avx512.asm similarity index 99% rename from lib/avx512/poly_fma_avx512.asm rename to lib/avx512_t2/poly_fma_avx512.asm index a1efe40fef894d627ba7a1e29be71504cfd15fa5..0eec89f38b1d7e6203800db41f29ef3160cb24d4 100644 --- a/lib/avx512/poly_fma_avx512.asm +++ b/lib/avx512_t2/poly_fma_avx512.asm @@ -32,6 +32,10 @@ %include "include/clear_regs.asm" %include "include/cet.inc" +;; Enforce EVEX encoding for AVX512 capable systems +%xdefine vpmadd52luq {evex}vpmadd52luq +%xdefine vpmadd52huq {evex}vpmadd52huq + [bits 64] default rel @@ -162,6 +166,7 @@ dw 0, 0x1, 0x5, 0x15, 0x55, 0x57, 0x5f, 0x7f, 0xff struc STACKFRAME _r_save: resz 6 ; Memory to save limbs of powers of R _gpr_save: resq 8 ; Memory to save GP registers +_xmm_save: reso 10 ; Memory to save XMM registers _rsp_save: resq 1 ; Memory to save RSP endstruc @@ -1108,7 +1113,6 @@ mksection .text and %%T0, 0xffffffffffffff00 ; multiple of 256 bytes %%_poly1305_blocks_loop: - endbranch64 cmp %%T0, POLY1305_BLOCK_SIZE*16 jbe %%_poly1305_blocks_loop_end @@ -1206,8 +1210,6 @@ mksection .text and %%LEN, (POLY1305_BLOCK_SIZE*16 - 1) ; Get remaining lengths (LEN < 256 bytes) %%_less_than_256: - endbranch64 - cmp %%LEN, POLY1305_BLOCK_SIZE*8 jb %%_less_than_128 @@ -1373,8 +1375,6 @@ APPEND(%%_shuffle_blocks_, i): %endrep %%_end_shuffle: - endbranch64 - ; zmm13-zmm15 contain the 8 blocks of message plus the previous accumulator ; zmm22-24 contain the 3x44-bit limbs of the powers of R ; zmm25-26 contain the 3x44-bit limbs of the powers of R' (5*4*R) @@ -1449,8 +1449,6 @@ APPEND(%%_shuffle_blocks_, i): vmovdqa64 [rsp + _r_save + 64*5], zmm0 %endif - vzeroupper - %%_final_loop: cmp %%LEN, POLY1305_BLOCK_SIZE jb %%_poly1305_blocks_partial @@ -1641,6 +1639,13 @@ APPEND(%%_shuffle_blocks_, i): %ifndef LINUX mov [rsp + _gpr_save + 8*6], rsi mov [rsp + _gpr_save + 8*7], rdi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _xmm_save + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov [rsp + _rsp_save], rax @@ -1651,6 +1656,13 @@ APPEND(%%_shuffle_blocks_, i): ;; Restores registers and removes the stack frame ;; ============================================================================= %macro FUNC_EXIT 0 +%ifdef SAFE_DATA + clear_scratch_gps_asm + clear_all_zmms_asm +%else + vzeroupper +%endif ;; SAFE_DATA + mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -1660,13 +1672,16 @@ APPEND(%%_shuffle_blocks_, i): %ifndef LINUX mov rsi, [rsp + _gpr_save + 8*6] mov rdi, [rsp + _gpr_save + 8*7] +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _xmm_save + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov rsp, [rsp + _rsp_save] -%ifdef SAFE_DATA - clear_scratch_gps_asm -%endif ;; SAFE_DATA - %endmacro ;; ============================================================================= @@ -1777,7 +1792,7 @@ poly1305_aead_complete_fma_avx512: ;; clear Poly key %ifdef SAFE_DATA - vpxorq ymm0, ymm0 + vpxorq xmm0, xmm0 vmovdqu64 [arg2], ymm0 %endif @@ -1792,7 +1807,6 @@ poly1305_aead_complete_fma_avx512: align 32 MKGLOBAL(poly1305_mac_fma_avx512,function,internal) poly1305_mac_fma_avx512: - endbranch64 FUNC_ENTRY %ifndef LINUX diff --git a/lib/avx512/pon_vaes_avx512.asm b/lib/avx512_t2/pon_vaes_avx512.asm similarity index 98% rename from lib/avx512/pon_vaes_avx512.asm rename to lib/avx512_t2/pon_vaes_avx512.asm index 91babf8567f2006054bc3d33203c98ce46e248ea..1f712f15b6429329a35349dd1a5bc76e8e67f86c 100644 --- a/lib/avx512/pon_vaes_avx512.asm +++ b/lib/avx512_t2/pon_vaes_avx512.asm @@ -31,7 +31,6 @@ %include "include/os.asm" %include "include/memcpy.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" extern aes_cntr_pon_enc_128_vaes_avx512 extern aes_cntr_pon_dec_128_vaes_avx512 @@ -201,7 +200,7 @@ endstruc %endmacro %macro AES128_CTR_PON_ENC 1 -%define %%CIPH %1 ; [in] cipher "CTR" or "NO_CTR" +%define %%CIPHER %1 ; [in] cipher "CTR" or "NO_CTR" sub rsp, STACKFRAME_size @@ -261,7 +260,7 @@ endstruc ; get output buffer mov dst, [job + _dst] -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ; Encrypt buffer and calculate BIP in the same function mov arg2, dst @@ -326,7 +325,7 @@ endstruc vpxord xmm1, xmm0 vmovq bip, xmm1 -%endif ; CIPH = CTR +%endif ; CIPHER = CTR mov tmp_1, [job + _auth_tag_output] mov [tmp_1], DWORD(bip) @@ -352,7 +351,7 @@ endstruc %endmacro %macro AES128_CTR_PON_DEC 1 -%define %%CIPH %1 ; [in] cipher "CTR" or "NO_CTR" +%define %%CIPHER %1 ; [in] cipher "CTR" or "NO_CTR" sub rsp, STACKFRAME_size @@ -385,7 +384,7 @@ endstruc ; Save job pointer mov [rsp + _job_save], job -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; Decrypt message and calculate BIP in same function mov arg2, [job + _dst] mov arg3, [job + _iv] @@ -411,7 +410,7 @@ endstruc %ifndef LINUX add rsp, 8*6 %endif -%else ; %%CIPH == CTR +%else ; %%CIPHER == CTR ; Calculate BIP (XOR message) vmovq xmm1, bip @@ -450,7 +449,7 @@ endstruc vmovd DWORD(bip), xmm1 -%endif ; CIPH == CTR +%endif ; CIPHER == CTR cmp bytes_to_crc, 4 jle %%_skip_crc @@ -505,7 +504,6 @@ endstruc align 64 MKGLOBAL(submit_job_pon_enc_vaes_avx512,function,internal) submit_job_pon_enc_vaes_avx512: - endbranch64 AES128_CTR_PON_ENC CTR ret @@ -513,7 +511,6 @@ submit_job_pon_enc_vaes_avx512: align 64 MKGLOBAL(submit_job_pon_dec_vaes_avx512,function,internal) submit_job_pon_dec_vaes_avx512: - endbranch64 AES128_CTR_PON_DEC CTR ret @@ -521,7 +518,6 @@ submit_job_pon_dec_vaes_avx512: align 64 MKGLOBAL(submit_job_pon_enc_no_ctr_vaes_avx512,function,internal) submit_job_pon_enc_no_ctr_vaes_avx512: - endbranch64 AES128_CTR_PON_ENC NO_CTR ret @@ -529,7 +525,6 @@ submit_job_pon_enc_no_ctr_vaes_avx512: align 64 MKGLOBAL(submit_job_pon_dec_no_ctr_vaes_avx512,function,internal) submit_job_pon_dec_no_ctr_vaes_avx512: - endbranch64 AES128_CTR_PON_DEC NO_CTR ret diff --git a/lib/avx512/snow3g_uea2_by16_vaes_avx512.asm b/lib/avx512_t2/snow3g_uea2_by16_vaes_avx512.asm similarity index 99% rename from lib/avx512/snow3g_uea2_by16_vaes_avx512.asm rename to lib/avx512_t2/snow3g_uea2_by16_vaes_avx512.asm index ee406950d4f38bc6fa64a37e2705e4f9e7c0289a..d295d9abaa2cc942a06e5726f3f8a873df520f13 100644 --- a/lib/avx512/snow3g_uea2_by16_vaes_avx512.asm +++ b/lib/avx512_t2/snow3g_uea2_by16_vaes_avx512.asm @@ -332,7 +332,9 @@ endstruc ;; Restores register contents and removes the stack frame ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %macro SNOW3G_FUNC_END 0 -%ifndef SAFE_DATA +%ifdef SAFE_DATA + clear_all_zmms_asm +%else vzeroupper %endif mov rbx, [rsp + _gpr_save + 8 * 0] diff --git a/lib/avx512/snow3g_uia2_by32_vaes_avx512.asm b/lib/avx512_t2/snow3g_uia2_by32_vaes_avx512.asm similarity index 100% rename from lib/avx512/snow3g_uia2_by32_vaes_avx512.asm rename to lib/avx512_t2/snow3g_uia2_by32_vaes_avx512.asm diff --git a/lib/avx512/zuc_x16_vaes_avx512.asm b/lib/avx512_t2/zuc_x16_vaes_avx512.asm similarity index 88% rename from lib/avx512/zuc_x16_vaes_avx512.asm rename to lib/avx512_t2/zuc_x16_vaes_avx512.asm index a7efd55e6e4f3b698705ea3f4d066714faff8249..42446d4f3547e1471c282f3136a4780a8be8320d 100644 --- a/lib/avx512/zuc_x16_vaes_avx512.asm +++ b/lib/avx512_t2/zuc_x16_vaes_avx512.asm @@ -35,11 +35,13 @@ %define ZUC256_REMAINDER_16 asm_Eia3_256_RemainderAVX512_16_VPCLMUL %define ZUC_KEYGEN64B_16 asm_ZucGenKeystream64B_16_gfni_avx512 %define ZUC_KEYGEN8B_16 asm_ZucGenKeystream8B_16_gfni_avx512 -%define ZUC_KEYGEN4B_16 asm_ZucGenKeystream4B_16_gfni_avx512 %define ZUC_KEYGEN_16 asm_ZucGenKeystream_16_gfni_avx512 +%define ZUC_KEYGEN64B_SKIP16_16 asm_ZucGenKeystream64B_16_skip16_gfni_avx512 %define ZUC_KEYGEN64B_SKIP8_16 asm_ZucGenKeystream64B_16_skip8_gfni_avx512 -%define ZUC_KEYGEN8B_SKIP8_16 asm_ZucGenKeystream8B_16_skip8_gfni_avx512 +%define ZUC_KEYGEN64B_SKIP4_16 asm_ZucGenKeystream64B_16_skip4_gfni_avx512 +%define ZUC_KEYGEN_SKIP16_16 asm_ZucGenKeystream_16_skip16_gfni_avx512 %define ZUC_KEYGEN_SKIP8_16 asm_ZucGenKeystream_16_skip8_gfni_avx512 +%define ZUC_KEYGEN_SKIP4_16 asm_ZucGenKeystream_16_skip4_gfni_avx512 %define ZUC_ROUND64B_16 asm_Eia3Round64B_16_VPCLMUL %define ZUC_EIA3_N64B asm_Eia3_Nx64B_AVX512_16_VPCLMUL -%include "avx512/zuc_x16_avx512.asm" +%include "avx512_t1/zuc_x16_avx512.asm" diff --git a/lib/avx_t1/README b/lib/avx_t1/README new file mode 100644 index 0000000000000000000000000000000000000000..7302263d1e7e53b05b9b01bedeaadbfb09c2e7af --- /dev/null +++ b/lib/avx_t1/README @@ -0,0 +1,3 @@ +AVX: +- SSE TYPE1 +- AVX diff --git a/lib/avx/aes128_cbc_dec_by8_avx.asm b/lib/avx_t1/aes128_cbc_dec_by8_avx.asm similarity index 99% rename from lib/avx/aes128_cbc_dec_by8_avx.asm rename to lib/avx_t1/aes128_cbc_dec_by8_avx.asm index 978e9326d70eac2dda2bb21a37f5a4210e751c5f..c50d7898d30fdafd16268601b4a074fe72686ef6 100644 --- a/lib/avx/aes128_cbc_dec_by8_avx.asm +++ b/lib/avx_t1/aes128_cbc_dec_by8_avx.asm @@ -215,7 +215,6 @@ mksection .text MKGLOBAL(AES_CBC_DEC_128_X8,function,internal) AES_CBC_DEC_128_X8: - %ifndef LINUX mov num_bytes, [rsp + 8*5] %else diff --git a/lib/avx/aes128_cbc_enc_x8_avx.asm b/lib/avx_t1/aes128_cbc_enc_x8_avx.asm similarity index 100% rename from lib/avx/aes128_cbc_enc_x8_avx.asm rename to lib/avx_t1/aes128_cbc_enc_x8_avx.asm diff --git a/lib/avx/aes128_cbc_mac_x8_avx.asm b/lib/avx_t1/aes128_cbc_mac_x8_avx.asm similarity index 97% rename from lib/avx/aes128_cbc_mac_x8_avx.asm rename to lib/avx_t1/aes128_cbc_mac_x8_avx.asm index ea6ffb8e0c6b4fa92b36493d4ee3b4181ff5cbc1..870da05ee711f1c20361302df1b4d763628c40f3 100644 --- a/lib/avx/aes128_cbc_mac_x8_avx.asm +++ b/lib/avx_t1/aes128_cbc_mac_x8_avx.asm @@ -34,4 +34,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "avx/aes128_cbc_enc_x8_avx.asm" +%include "avx_t1/aes128_cbc_enc_x8_avx.asm" diff --git a/lib/avx/aes128_cbcs_1_9_dec_by8_avx.asm b/lib/avx_t1/aes128_cbcs_1_9_dec_by8_avx.asm similarity index 97% rename from lib/avx/aes128_cbcs_1_9_dec_by8_avx.asm rename to lib/avx_t1/aes128_cbcs_1_9_dec_by8_avx.asm index d9e6a3c1eef98c46dc758931936be71302d87522..b0982c2f61048a64f3139072cb2938ce477e66f7 100644 --- a/lib/avx/aes128_cbcs_1_9_dec_by8_avx.asm +++ b/lib/avx_t1/aes128_cbcs_1_9_dec_by8_avx.asm @@ -33,4 +33,4 @@ %define CBCS %endif -%include "avx/aes128_cbc_dec_by8_avx.asm" +%include "avx_t1/aes128_cbc_dec_by8_avx.asm" diff --git a/lib/avx/aes128_cbcs_1_9_enc_x8_avx.asm b/lib/avx_t1/aes128_cbcs_1_9_enc_x8_avx.asm similarity index 98% rename from lib/avx/aes128_cbcs_1_9_enc_x8_avx.asm rename to lib/avx_t1/aes128_cbcs_1_9_enc_x8_avx.asm index a2d891b9adbee466483ede329c15a3596d7767c4..d3051f2fbad7e97832998e88047e234e2f8ffe0c 100644 --- a/lib/avx/aes128_cbcs_1_9_enc_x8_avx.asm +++ b/lib/avx_t1/aes128_cbcs_1_9_enc_x8_avx.asm @@ -49,4 +49,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "avx/aes128_cbc_enc_x8_avx.asm" +%include "avx_t1/aes128_cbc_enc_x8_avx.asm" diff --git a/lib/avx/aes128_cntr_by8_avx.asm b/lib/avx_t1/aes128_cntr_by8_avx.asm similarity index 100% rename from lib/avx/aes128_cntr_by8_avx.asm rename to lib/avx_t1/aes128_cntr_by8_avx.asm diff --git a/lib/avx/aes128_cntr_ccm_by8_avx.asm b/lib/avx_t1/aes128_cntr_ccm_by8_avx.asm similarity index 97% rename from lib/avx/aes128_cntr_ccm_by8_avx.asm rename to lib/avx_t1/aes128_cntr_ccm_by8_avx.asm index 485f59235992d3a078956c41b606965ca52ac4d0..37bb556c354d2802ffb40351e41d1d78debe6c0e 100644 --- a/lib/avx/aes128_cntr_ccm_by8_avx.asm +++ b/lib/avx_t1/aes128_cntr_ccm_by8_avx.asm @@ -29,4 +29,4 @@ %ifndef AES_CNTR_CCM_128 %define AES_CNTR_CCM_128 aes_cntr_ccm_128_avx %endif -%include "avx/aes128_cntr_by8_avx.asm" +%include "avx_t1/aes128_cntr_by8_avx.asm" diff --git a/lib/avx_t1/aes128_ecb_by8_avx.asm b/lib/avx_t1/aes128_ecb_by8_avx.asm new file mode 100644 index 0000000000000000000000000000000000000000..65cd39726fe454d004d0d13d2584798b71e9bf34 --- /dev/null +++ b/lib/avx_t1/aes128_ecb_by8_avx.asm @@ -0,0 +1,165 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB 128 encrypt/decrypt on 16n bytes doing AES by 8 + +%include "include/os.asm" +%include "include/clear_regs.asm" +%include "include/aes_common.asm" + +%ifdef LINUX +%define IN rdi +%define KEYS rsi +%define OUT rdx +%define LEN rcx +%else +%define IN rcx +%define KEYS rdx +%define OUT r8 +%define LEN r9 +%endif +%define IDX rax +%define TMP r11 +%define XDATA0 xmm0 +%define XDATA1 xmm1 +%define XDATA2 xmm2 +%define XDATA3 xmm3 +%define XDATA4 xmm4 +%define XDATA5 xmm5 +%define XDATA6 xmm6 +%define XDATA7 xmm7 +%define XKEY1 xmm8 + +%ifndef AES_ECB_NROUNDS +%define AES_ECB_NROUNDS 10 +%endif + +%if AES_ECB_NROUNDS == 10 +%define KEYSIZE 128 +%elif AES_ECB_NROUNDS == 12 +%define KEYSIZE 192 +%else +%define KEYSIZE 256 +%endif + +%define AES_ECB_ENC aes_ecb_enc_ %+ KEYSIZE %+ _avx +%define AES_ECB_DEC aes_ecb_dec_ %+ KEYSIZE %+ _avx + +%macro AES_ECB 1 +%define %%DIR %1 ; [in] Direction (ENC/DIR) +%ifidn %%DIR, ENC +%define AES XMM_AESENC_ROUND_BLOCKS_AVX_0_8 +%else ; DIR = DEC +%define AES XMM_AESDEC_ROUND_BLOCKS_AVX_0_8 +%endif + or LEN, LEN + jz %%done + xor IDX, IDX + mov TMP, LEN + and TMP, 127 ; number of initial bytes (0 to 7 AES blocks) + jz %%main_loop + ; branch to different code block based on remainder + cmp TMP, 4*16 + je %%initial_num_blocks_is_4 + jb %%initial_num_blocks_is_3_1 + cmp TMP, 6*16 + je %%initial_num_blocks_is_6 + jb %%initial_num_blocks_is_5 + ja %%initial_num_blocks_is_7 +%%initial_num_blocks_is_3_1: + ;; 3, 2 or 1 + cmp TMP, 2*16 + ja %%initial_num_blocks_is_3 + je %%initial_num_blocks_is_2 + ;; fall through for `jmp %%initial_num_blocks_is_1` +%assign num_blocks 1 +%rep 7 +%%initial_num_blocks_is_ %+ num_blocks : + ; load initial blocks + XMM_LOAD_BLOCKS_AVX_0_8 num_blocks, IN, 0, XDATA0,\ + XDATA1, XDATA2, XDATA3, XDATA4, XDATA5,\ + XDATA6, XDATA7 +%assign %%I 0 +; Perform AES encryption/decryption on initial blocks +%rep (AES_ECB_NROUNDS + 1) ; 10/12/14 + vmovdqu XKEY1, [KEYS + %%I*16] + AES XDATA0, XDATA1, XDATA2, XDATA3, XDATA4,\ + XDATA5, XDATA6, XDATA7, XKEY1, %%I, no_data,\ + no_data, no_data, no_data, no_data, no_data,\ + no_data, no_data, num_blocks, (AES_ECB_NROUNDS - 1) +%assign %%I (%%I + 1) +%endrep + ; store initial blocks + XMM_STORE_BLOCKS_AVX_0_8 num_blocks, OUT, 0, XDATA0, XDATA1,\ + XDATA2, XDATA3, XDATA4, XDATA5, XDATA6, XDATA7 + add IDX, num_blocks*16 + cmp IDX, LEN + je %%done +%assign num_blocks (num_blocks + 1) + jmp %%main_loop +%endrep +align 16 +%%main_loop: + ; load next 8 blocks + XMM_LOAD_BLOCKS_AVX_0_8 8, {IN + IDX}, 0, XDATA0,\ + XDATA1, XDATA2, XDATA3, XDATA4, XDATA5,\ + XDATA6, XDATA7 +%assign %%I 0 +; Perform AES encryption/decryption on 8 blocks +%rep (AES_ECB_NROUNDS + 1) ; 10/12/14 + vmovdqu XKEY1, [KEYS + %%I*16] + AES XDATA0, XDATA1, XDATA2, XDATA3, XDATA4,\ + XDATA5, XDATA6, XDATA7, XKEY1, %%I, no_data,\ + no_data, no_data, no_data, no_data, no_data,\ + no_data, no_data, 8, (AES_ECB_NROUNDS - 1) +%assign %%I (%%I + 1) +%endrep + ; store 8 blocks + XMM_STORE_BLOCKS_AVX_0_8 8, {OUT + IDX}, 0, XDATA0, XDATA1,\ + XDATA2, XDATA3, XDATA4, XDATA5, XDATA6, XDATA7 + add IDX, 8*16 + cmp IDX, LEN + jne %%main_loop +%%done: +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif +%endmacro + +mksection .text +align 16 +MKGLOBAL(AES_ECB_ENC,function,internal) +AES_ECB_ENC: + AES_ECB ENC + ret +align 16 +MKGLOBAL(AES_ECB_DEC,function,internal) +AES_ECB_DEC: + AES_ECB DEC + ret + +mksection stack-noexec diff --git a/lib/avx/aes128_gcm_by8_avx.asm b/lib/avx_t1/aes128_gcm_by8_avx.asm similarity index 98% rename from lib/avx/aes128_gcm_by8_avx.asm rename to lib/avx_t1/aes128_gcm_by8_avx.asm index 2aa61e7819046c500c3b541cd89b38e50fedafb9..dba11298c7feee02dac5b2c997952549b5b83557 100644 --- a/lib/avx/aes128_gcm_by8_avx.asm +++ b/lib/avx_t1/aes128_gcm_by8_avx.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "avx/gcm_avx_gen2.asm" +%include "avx_t1/gcm_avx_gen2.asm" diff --git a/lib/avx/aes128_xcbc_mac_x8_avx.asm b/lib/avx_t1/aes128_xcbc_mac_x8_avx.asm similarity index 97% rename from lib/avx/aes128_xcbc_mac_x8_avx.asm rename to lib/avx_t1/aes128_xcbc_mac_x8_avx.asm index facf70ae02c1d462a0b4bf5c486a51bd12b1b5bf..9d9ec41417b8d3c5aed489c38b3086d13ded07e5 100644 --- a/lib/avx/aes128_xcbc_mac_x8_avx.asm +++ b/lib/avx_t1/aes128_xcbc_mac_x8_avx.asm @@ -34,4 +34,4 @@ %define ARG_KEYS _aesxcbcarg_keys %define ARG_IV _aesxcbcarg_ICV -%include "avx/aes128_cbc_enc_x8_avx.asm" +%include "avx_t1/aes128_cbc_enc_x8_avx.asm" diff --git a/lib/avx/aes192_cbc_dec_by8_avx.asm b/lib/avx_t1/aes192_cbc_dec_by8_avx.asm similarity index 100% rename from lib/avx/aes192_cbc_dec_by8_avx.asm rename to lib/avx_t1/aes192_cbc_dec_by8_avx.asm diff --git a/lib/avx/aes192_cbc_enc_x8_avx.asm b/lib/avx_t1/aes192_cbc_enc_x8_avx.asm similarity index 100% rename from lib/avx/aes192_cbc_enc_x8_avx.asm rename to lib/avx_t1/aes192_cbc_enc_x8_avx.asm diff --git a/lib/avx/aes192_cntr_by8_avx.asm b/lib/avx_t1/aes192_cntr_by8_avx.asm similarity index 100% rename from lib/avx/aes192_cntr_by8_avx.asm rename to lib/avx_t1/aes192_cntr_by8_avx.asm diff --git a/lib/avx_t1/aes192_ecb_by8_avx.asm b/lib/avx_t1/aes192_ecb_by8_avx.asm new file mode 100644 index 0000000000000000000000000000000000000000..b7402bade15c25b98bad8c5875c11b5403e19052 --- /dev/null +++ b/lib/avx_t1/aes192_ecb_by8_avx.asm @@ -0,0 +1,33 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB 192 encrypt/decrypt on 16n bytes doing AES by 8 + +%define AES_ECB_NROUNDS 12 + +%include "include/os.asm" +%include "avx_t1/aes128_ecb_by8_avx.asm" diff --git a/lib/avx/aes192_gcm_by8_avx.asm b/lib/avx_t1/aes192_gcm_by8_avx.asm similarity index 98% rename from lib/avx/aes192_gcm_by8_avx.asm rename to lib/avx_t1/aes192_gcm_by8_avx.asm index 8592e82b556bf3efa3267905881d0ef48113b96c..e072d850bd4f9c46c4750b848b279ce6478ced03 100644 --- a/lib/avx/aes192_gcm_by8_avx.asm +++ b/lib/avx_t1/aes192_gcm_by8_avx.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "avx/gcm_avx_gen2.asm" +%include "avx_t1/gcm_avx_gen2.asm" diff --git a/lib/avx/aes256_cbc_dec_by8_avx.asm b/lib/avx_t1/aes256_cbc_dec_by8_avx.asm similarity index 99% rename from lib/avx/aes256_cbc_dec_by8_avx.asm rename to lib/avx_t1/aes256_cbc_dec_by8_avx.asm index 44ac3c6ce222dadddd9b6f104340c274a34d01e4..36aa543875a38369316dadb39bc6994b42398124 100644 --- a/lib/avx/aes256_cbc_dec_by8_avx.asm +++ b/lib/avx_t1/aes256_cbc_dec_by8_avx.asm @@ -246,7 +246,6 @@ mksection .text ;; aes_cbc_dec_256_avx(void *in, void *IV, void *keys, void *out, UINT64 num_bytes) MKGLOBAL(aes_cbc_dec_256_avx,function,internal) aes_cbc_dec_256_avx: - %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif diff --git a/lib/avx/aes256_cbc_enc_x8_avx.asm b/lib/avx_t1/aes256_cbc_enc_x8_avx.asm similarity index 100% rename from lib/avx/aes256_cbc_enc_x8_avx.asm rename to lib/avx_t1/aes256_cbc_enc_x8_avx.asm diff --git a/lib/avx/aes256_cbc_mac_x8_avx.asm b/lib/avx_t1/aes256_cbc_mac_x8_avx.asm similarity index 97% rename from lib/avx/aes256_cbc_mac_x8_avx.asm rename to lib/avx_t1/aes256_cbc_mac_x8_avx.asm index 45674e34c886a6edab0498f7ce65fb30d8ea6b55..842eea9dfea0974e56dfad29b19c5149bd596b9a 100644 --- a/lib/avx/aes256_cbc_mac_x8_avx.asm +++ b/lib/avx_t1/aes256_cbc_mac_x8_avx.asm @@ -28,4 +28,4 @@ ;;; Routine to compute CBC-MAC. It is based on 256 bit CBC AES encrypt code. %define CBC_MAC 1 -%include "avx/aes256_cbc_enc_x8_avx.asm" +%include "avx_t1/aes256_cbc_enc_x8_avx.asm" diff --git a/lib/avx/aes256_cntr_by8_avx.asm b/lib/avx_t1/aes256_cntr_by8_avx.asm similarity index 100% rename from lib/avx/aes256_cntr_by8_avx.asm rename to lib/avx_t1/aes256_cntr_by8_avx.asm diff --git a/lib/avx/aes256_cntr_ccm_by8_avx.asm b/lib/avx_t1/aes256_cntr_ccm_by8_avx.asm similarity index 97% rename from lib/avx/aes256_cntr_ccm_by8_avx.asm rename to lib/avx_t1/aes256_cntr_ccm_by8_avx.asm index 9279757a45e71e5d7c65d3cd7f0a4d2031173f61..d51bab557d090c03848a158c87c2fb6411e86875 100644 --- a/lib/avx/aes256_cntr_ccm_by8_avx.asm +++ b/lib/avx_t1/aes256_cntr_ccm_by8_avx.asm @@ -29,4 +29,4 @@ %ifndef AES_CNTR_CCM_256 %define AES_CNTR_CCM_256 aes_cntr_ccm_256_avx %endif -%include "avx/aes256_cntr_by8_avx.asm" +%include "avx_t1/aes256_cntr_by8_avx.asm" diff --git a/lib/avx_t1/aes256_ecb_by8_avx.asm b/lib/avx_t1/aes256_ecb_by8_avx.asm new file mode 100644 index 0000000000000000000000000000000000000000..514ffa89725e6ee4f4e1f58565b176aae2dec5dd --- /dev/null +++ b/lib/avx_t1/aes256_ecb_by8_avx.asm @@ -0,0 +1,33 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB 256 encrypt/decrypt on 16n bytes doing AES by 8 + +%define AES_ECB_NROUNDS 14 + +%include "include/os.asm" +%include "avx_t1/aes128_ecb_by8_avx.asm" diff --git a/lib/avx/aes256_gcm_by8_avx.asm b/lib/avx_t1/aes256_gcm_by8_avx.asm similarity index 98% rename from lib/avx/aes256_gcm_by8_avx.asm rename to lib/avx_t1/aes256_gcm_by8_avx.asm index 3584c9b41a62cae547d46f7cbb71384d1cdd0e6a..9d5146e99af6cebc17c27ed9073e7f51d4076f85 100644 --- a/lib/avx/aes256_gcm_by8_avx.asm +++ b/lib/avx_t1/aes256_gcm_by8_avx.asm @@ -27,4 +27,4 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "avx/gcm_avx_gen2.asm" +%include "avx_t1/gcm_avx_gen2.asm" diff --git a/lib/avx/aes_cfb_avx.asm b/lib/avx_t1/aes_cfb_avx.asm similarity index 100% rename from lib/avx/aes_cfb_avx.asm rename to lib/avx_t1/aes_cfb_avx.asm diff --git a/lib/avx/chacha20_avx.asm b/lib/avx_t1/chacha20_avx.asm similarity index 98% rename from lib/avx/chacha20_avx.asm rename to lib/avx_t1/chacha20_avx.asm index 73c4b51aab6404c905e858e4cf5c735a63e663ff..ef0bacffe1d7e3f614bf82a40dfeb1cb87a11a5a 100644 --- a/lib/avx/chacha20_avx.asm +++ b/lib/avx_t1/chacha20_avx.asm @@ -85,6 +85,7 @@ dq 0x0ffffffc0fffffff, 0x0ffffffc0ffffffc struc STACK _STATE: reso 16 ; Space to store first 4 states _XMM_SAVE: reso 2 ; Space to store up to 2 temporary XMM registers +_XMM_WIN_SAVE: reso 10 ; Space to store up to 10 XMM registers _GP_SAVE: resq 7 ; Space to store up to 7 GP registers _RSP_SAVE: resq 1 ; Space to store rsp pointer endstruc @@ -619,6 +620,15 @@ submit_job_chacha20_enc_dec_avx: mov rax, rsp sub rsp, STACK_SIZE and rsp, -16 +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov [rsp + _RSP_SAVE], rax ; save RSP xor off, off @@ -1140,6 +1150,15 @@ no_partial_block: vmovdqa [rsp + _XMM_SAVE + 16], xmm0 %endif +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov rsp, [rsp + _RSP_SAVE] exit: @@ -1188,6 +1207,13 @@ chacha20_enc_dec_ks_avx: mov [rsp + _GP_SAVE + 40], rbp %ifndef LINUX mov [rsp + _GP_SAVE + 48], rdi +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov [rsp + _RSP_SAVE], rax ; save RSP @@ -1703,6 +1729,13 @@ exit_ks: mov rbp, [rsp + _GP_SAVE + 40] %ifndef LINUX mov rdi, [rsp + _GP_SAVE + 48] +%assign i 0 +%assign j 6 +%rep 10 + vmovdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov rsp, [rsp + _RSP_SAVE]; restore RSP @@ -1712,6 +1745,15 @@ exit_ks: align 32 MKGLOBAL(poly1305_key_gen_avx,function,internal) poly1305_key_gen_avx: +%ifndef LINUX + mov rax, rsp + sub rsp, 3*16 + 8 + and rsp, -16 + vmovdqa [rsp], xmm6 + vmovdqa [rsp + 16], xmm7 + vmovdqa [rsp + 16*2], xmm8 + mov [rsp + 16*3], rax +%endif ;; prepare chacha state from IV, key vmovdqa xmm0, [rel constants] vmovdqu xmm1, [arg1] ; Load key bytes 0-15 @@ -1732,6 +1774,12 @@ poly1305_key_gen_avx: %ifdef SAFE_DATA clear_all_xmms_avx_asm +%endif +%ifndef LINUX + vmovdqa xmm6, [rsp] + vmovdqa xmm7, [rsp + 16] + vmovdqa xmm8, [rsp + 16*2] + mov rsp, [rsp + 16*3] %endif ret diff --git a/lib/avx/crc16_x25_avx.asm b/lib/avx_t1/crc16_x25_avx.asm similarity index 100% rename from lib/avx/crc16_x25_avx.asm rename to lib/avx_t1/crc16_x25_avx.asm diff --git a/lib/avx/crc32_by8_avx.asm b/lib/avx_t1/crc32_by8_avx.asm similarity index 99% rename from lib/avx/crc32_by8_avx.asm rename to lib/avx_t1/crc32_by8_avx.asm index e479784f36381f75c73ed9b011b015403e5c6af5..2403b0fa4fa69116ddfeb152c10b548fb2b306a4 100644 --- a/lib/avx/crc32_by8_avx.asm +++ b/lib/avx_t1/crc32_by8_avx.asm @@ -39,6 +39,7 @@ %include "include/memcpy.asm" %include "include/reg_sizes.asm" %include "include/crc32.inc" +%include "include/clear_regs.asm" [bits 64] default rel @@ -299,6 +300,9 @@ crc32_by8_avx: vpextrd eax, xmm7, 1 .cleanup: +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif ret align 32 diff --git a/lib/avx/crc32_fp_avx.asm b/lib/avx_t1/crc32_fp_avx.asm similarity index 100% rename from lib/avx/crc32_fp_avx.asm rename to lib/avx_t1/crc32_fp_avx.asm diff --git a/lib/avx/crc32_iuup_avx.asm b/lib/avx_t1/crc32_iuup_avx.asm similarity index 100% rename from lib/avx/crc32_iuup_avx.asm rename to lib/avx_t1/crc32_iuup_avx.asm diff --git a/lib/avx/crc32_lte_avx.asm b/lib/avx_t1/crc32_lte_avx.asm similarity index 100% rename from lib/avx/crc32_lte_avx.asm rename to lib/avx_t1/crc32_lte_avx.asm diff --git a/lib/avx/crc32_refl_by8_avx.asm b/lib/avx_t1/crc32_refl_by8_avx.asm similarity index 99% rename from lib/avx/crc32_refl_by8_avx.asm rename to lib/avx_t1/crc32_refl_by8_avx.asm index e6701f3871a1690cebc2d852481b9a5497340cbe..bbbd4a0ae5c5c8fa8c03935e831aa0154cc9f606 100644 --- a/lib/avx/crc32_refl_by8_avx.asm +++ b/lib/avx_t1/crc32_refl_by8_avx.asm @@ -39,6 +39,7 @@ %include "include/memcpy.asm" %include "include/reg_sizes.asm" %include "include/crc32_refl.inc" +%include "include/clear_regs.asm" [bits 64] default rel @@ -286,6 +287,9 @@ crc32_refl_by8_avx: vpextrd eax, xmm7, 2 .cleanup: +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif not eax ret diff --git a/lib/avx/crc32_sctp_avx.asm b/lib/avx_t1/crc32_sctp_avx.asm similarity index 100% rename from lib/avx/crc32_sctp_avx.asm rename to lib/avx_t1/crc32_sctp_avx.asm diff --git a/lib/avx/crc32_wimax_avx.asm b/lib/avx_t1/crc32_wimax_avx.asm similarity index 100% rename from lib/avx/crc32_wimax_avx.asm rename to lib/avx_t1/crc32_wimax_avx.asm diff --git a/lib/avx/ethernet_fcs_avx.asm b/lib/avx_t1/ethernet_fcs_avx.asm similarity index 100% rename from lib/avx/ethernet_fcs_avx.asm rename to lib/avx_t1/ethernet_fcs_avx.asm diff --git a/lib/avx/gcm_avx_gen2.asm b/lib/avx_t1/gcm_avx_gen2.asm similarity index 99% rename from lib/avx/gcm_avx_gen2.asm rename to lib/avx_t1/gcm_avx_gen2.asm index a79c89d2145c23b1fa3c74787a62a867c09f55b9..db90c7b60bdb31714b29478e718136d1eeea858d 100644 --- a/lib/avx/gcm_avx_gen2.asm +++ b/lib/avx_t1/gcm_avx_gen2.asm @@ -528,7 +528,7 @@ mksection .text %ifidn %%ENC_DEC, DEC vmovdqa xmm3, xmm1 - vpxor xmm9, xmm1 ; Cyphertext XOR E(K, Yn) + vpxor xmm9, xmm1 ; Ciphertext XOR E(K, Yn) mov r15, %%PLAIN_CYPH_LEN add r15, r13 diff --git a/lib/avx/kasumi_avx.c b/lib/avx_t1/kasumi_avx.c similarity index 100% rename from lib/avx/kasumi_avx.c rename to lib/avx_t1/kasumi_avx.c diff --git a/lib/avx/mb_mgr_aes128_cbc_enc_flush_avx.asm b/lib/avx_t1/mb_mgr_aes128_cbc_enc_flush_avx.asm similarity index 99% rename from lib/avx/mb_mgr_aes128_cbc_enc_flush_avx.asm rename to lib/avx_t1/mb_mgr_aes128_cbc_enc_flush_avx.asm index 994ea9dfffbb62b64ad24b80040547d8bfcbf21f..c0a68a4f759b625b44e567ec64d21313410e9ce7 100644 --- a/lib/avx/mb_mgr_aes128_cbc_enc_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_cbc_enc_flush_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %ifndef AES_CBC_ENC_X8 @@ -116,7 +115,6 @@ endstruc ; arg 2 : job MKGLOBAL(FLUSH_JOB_AES_ENC,function,internal) FLUSH_JOB_AES_ENC: - mov rax, rsp sub rsp, STACK_size and rsp, -16 diff --git a/lib/avx/mb_mgr_aes128_cbc_enc_submit_avx.asm b/lib/avx_t1/mb_mgr_aes128_cbc_enc_submit_avx.asm similarity index 99% rename from lib/avx/mb_mgr_aes128_cbc_enc_submit_avx.asm rename to lib/avx_t1/mb_mgr_aes128_cbc_enc_submit_avx.asm index db9cb4e2bef15a50c9e130b620259319713ab876..e44dadebfa4e65ec3a3b165a3d33cf6a12f44027 100644 --- a/lib/avx/mb_mgr_aes128_cbc_enc_submit_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_cbc_enc_submit_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" @@ -89,7 +88,6 @@ endstruc ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_ENC,function,internal) SUBMIT_JOB_AES_ENC: - mov rax, rsp sub rsp, STACK_size and rsp, -16 diff --git a/lib/avx/mb_mgr_aes128_cbcs_1_9_flush_avx.asm b/lib/avx_t1/mb_mgr_aes128_cbcs_1_9_flush_avx.asm similarity index 99% rename from lib/avx/mb_mgr_aes128_cbcs_1_9_flush_avx.asm rename to lib/avx_t1/mb_mgr_aes128_cbcs_1_9_flush_avx.asm index 905d9215f0d9b111de82a52206d559f2e5786f82..85148570caf68bcfe6a5f5c13f41ca8101330159 100644 --- a/lib/avx/mb_mgr_aes128_cbcs_1_9_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_cbcs_1_9_flush_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %define NUM_LANES 8 diff --git a/lib/avx/mb_mgr_aes128_cbcs_1_9_submit_avx.asm b/lib/avx_t1/mb_mgr_aes128_cbcs_1_9_submit_avx.asm similarity index 99% rename from lib/avx/mb_mgr_aes128_cbcs_1_9_submit_avx.asm rename to lib/avx_t1/mb_mgr_aes128_cbcs_1_9_submit_avx.asm index 4f34da3020dddcb0aa9ba9b7856bbad17d00f313..a18c5dd34a300dcc474f8eb37975fb306ca5d096 100644 --- a/lib/avx/mb_mgr_aes128_cbcs_1_9_submit_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_cbcs_1_9_submit_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" diff --git a/lib/avx/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm b/lib/avx_t1/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm similarity index 95% rename from lib/avx/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm rename to lib/avx_t1/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm index 7f9e26271ca74b67c8199153bcc6102e1b9126ff..fc82044ebc10fe9d55a685c196442b38ace51585 100644 --- a/lib/avx/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" %include "include/memcpy.asm" @@ -387,6 +386,23 @@ APPEND(skip_,I): vmovdqa [state + _aes_ccm_lens], ccm_lens ;; Find min length vphminposuw min_len_idx, ccm_lens + jmp %%_ccm_round + +%%_ccm_round_flush: + ;; This is identical to the above block but optimized for + ;; a repeat flush operation when keys etc. are already set + ;; - vphminposuw was already executed + ;; - good_lane is already known + ;; - copy input pointer from good lane to empty lanes + mov tmp2, [state + _aes_ccm_args_in + good_lane*8] +%assign I 0 +%rep 8 + cmp qword [state + _aes_ccm_job_in_lane + I*8], 0 + jne APPEND(skip2_,I) + mov [state + _aes_ccm_args_in + I*8], tmp2 +APPEND(skip2_,I): +%assign I (I+1) +%endrep %endif ; end FLUSH @@ -525,12 +541,17 @@ APPEND(skip_clear_,I): ; Reset NULL lane lens to UINT16_MAX %ifidn %%SUBMIT_FLUSH, FLUSH SET_NULL_JOB_LENS_TO_MAX ccm_lens, xtmp0, xtmp1, xtmp2, xtmp3, xtmp4 + mov good_lane, min_idx %endif XVPINSRW ccm_lens, xtmp0, tmp2, min_idx, tmp, scale_x16 vphminposuw min_len_idx, ccm_lens vmovdqa [state + _aes_ccm_lens], ccm_lens +%ifidn %%SUBMIT_FLUSH, FLUSH + jmp %%_ccm_round_flush +%else jmp %%_ccm_round +%endif %%_prepare_partial_block_to_auth: ; Check if partial block needs to be hashed @@ -560,7 +581,12 @@ APPEND(skip_clear_,I): vmovdqa [init_block_addr], xtmp0 mov [state + _aes_ccm_args_in + min_idx * 8], init_block_addr +%ifidn %%SUBMIT_FLUSH, FLUSH + mov good_lane, min_idx + jmp %%_ccm_round_flush +%else jmp %%_ccm_round +%endif %endmacro align 64 diff --git a/lib/avx/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm b/lib/avx_t1/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm rename to lib/avx_t1/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm index 3f862e9defd3164a05fb52be1af4dfcbde90f5ca..f3a8bdb49172994af271ba23f3fa004fe6e7b4a2 100644 --- a/lib/avx/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" @@ -293,6 +292,22 @@ APPEND(skip_,I): %endrep ;; Find min length vphminposuw xmm1, xmm0 + jmp %%_cmac_round + +%%_cmac_round_flush: + ;; - good lane already known + ;; - copy good_lane input pointer to empty lanes + ;; - lens updated and vphminposuw executed + mov tmp2, [state + _aes_cmac_args_in + good_lane*8] + xor tmp3, tmp3 +%assign I 0 +%rep 8 + cmp qword [state + _aes_cmac_job_in_lane + I*8], tmp3 + jne APPEND(skip2_,I) + mov [state + _aes_cmac_args_in + I*8], tmp2 +APPEND(skip2_,I): +%assign I (I+1) +%endrep %endif ; end FLUSH @@ -351,7 +366,12 @@ APPEND(skip_,I): lea m_last, [state + _aes_cmac_scratch + tmp3] mov [state + _aes_cmac_args_in + idx*8], m_last +%ifidn %%SUBMIT_FLUSH, SUBMIT jmp %%_cmac_round +%else + mov good_lane, idx + jmp %%_cmac_round_flush +%endif %%_copy_complete_digest: ; Job complete, copy digest to AT output diff --git a/lib/avx/mb_mgr_aes128_xcbc_flush_x8_avx.asm b/lib/avx_t1/mb_mgr_aes128_xcbc_flush_x8_avx.asm similarity index 99% rename from lib/avx/mb_mgr_aes128_xcbc_flush_x8_avx.asm rename to lib/avx_t1/mb_mgr_aes128_xcbc_flush_x8_avx.asm index 28e5452bac605fedb8c8eb16ed74c5ed6415ddab..57f10267fd67db85cec7cbd25ba1b40bc0187075 100644 --- a/lib/avx/mb_mgr_aes128_xcbc_flush_x8_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_xcbc_flush_x8_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %ifndef AES_XCBC_X8 diff --git a/lib/avx/mb_mgr_aes128_xcbc_submit_x8_avx.asm b/lib/avx_t1/mb_mgr_aes128_xcbc_submit_x8_avx.asm similarity index 99% rename from lib/avx/mb_mgr_aes128_xcbc_submit_x8_avx.asm rename to lib/avx_t1/mb_mgr_aes128_xcbc_submit_x8_avx.asm index fe8a3e1d6264521ddacd409e3df7367f3d4ab78d..525d4d24926631e468073c2e9f85c6cc53164562 100644 --- a/lib/avx/mb_mgr_aes128_xcbc_submit_x8_avx.asm +++ b/lib/avx_t1/mb_mgr_aes128_xcbc_submit_x8_avx.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/memcpy.asm" diff --git a/lib/avx/mb_mgr_aes192_cbc_enc_flush_avx.asm b/lib/avx_t1/mb_mgr_aes192_cbc_enc_flush_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes192_cbc_enc_flush_avx.asm rename to lib/avx_t1/mb_mgr_aes192_cbc_enc_flush_avx.asm index 70d092f1f506808a60d5d01fb273624d0bd9343c..13797d301382fb58e6cd1a928495e352de7fe644 100644 --- a/lib/avx/mb_mgr_aes192_cbc_enc_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_aes192_cbc_enc_flush_avx.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X8 aes_cbc_enc_192_x8 %define FLUSH_JOB_AES_ENC flush_job_aes192_enc_avx -%include "avx/mb_mgr_aes128_cbc_enc_flush_avx.asm" +%include "avx_t1/mb_mgr_aes128_cbc_enc_flush_avx.asm" diff --git a/lib/avx/mb_mgr_aes192_cbc_enc_submit_avx.asm b/lib/avx_t1/mb_mgr_aes192_cbc_enc_submit_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes192_cbc_enc_submit_avx.asm rename to lib/avx_t1/mb_mgr_aes192_cbc_enc_submit_avx.asm index 9089b116656ecabb8ac52059887899c0442f0bef..1dd9ce6cf74e1e082c89617abd53628b1dcb23fb 100644 --- a/lib/avx/mb_mgr_aes192_cbc_enc_submit_avx.asm +++ b/lib/avx_t1/mb_mgr_aes192_cbc_enc_submit_avx.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X8 aes_cbc_enc_192_x8 %define SUBMIT_JOB_AES_ENC submit_job_aes192_enc_avx -%include "avx/mb_mgr_aes128_cbc_enc_submit_avx.asm" +%include "avx_t1/mb_mgr_aes128_cbc_enc_submit_avx.asm" diff --git a/lib/avx/mb_mgr_aes256_cbc_enc_flush_avx.asm b/lib/avx_t1/mb_mgr_aes256_cbc_enc_flush_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes256_cbc_enc_flush_avx.asm rename to lib/avx_t1/mb_mgr_aes256_cbc_enc_flush_avx.asm index 68c108ba43a97119db13a7d6ec98b08c95690b1a..75e8504214f2f4a38268c109f1f46d0b58ac8064 100644 --- a/lib/avx/mb_mgr_aes256_cbc_enc_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_aes256_cbc_enc_flush_avx.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X8 aes_cbc_enc_256_x8 %define FLUSH_JOB_AES_ENC flush_job_aes256_enc_avx -%include "avx/mb_mgr_aes128_cbc_enc_flush_avx.asm" +%include "avx_t1/mb_mgr_aes128_cbc_enc_flush_avx.asm" diff --git a/lib/avx/mb_mgr_aes256_cbc_enc_submit_avx.asm b/lib/avx_t1/mb_mgr_aes256_cbc_enc_submit_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes256_cbc_enc_submit_avx.asm rename to lib/avx_t1/mb_mgr_aes256_cbc_enc_submit_avx.asm index c6a26a8c317745f77ffa4c1bdb4c4d0f939d4423..8aa20d9dd6adbf812125923366f3e9325c288222 100644 --- a/lib/avx/mb_mgr_aes256_cbc_enc_submit_avx.asm +++ b/lib/avx_t1/mb_mgr_aes256_cbc_enc_submit_avx.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X8 aes_cbc_enc_256_x8 %define SUBMIT_JOB_AES_ENC submit_job_aes256_enc_avx -%include "avx/mb_mgr_aes128_cbc_enc_submit_avx.asm" +%include "avx_t1/mb_mgr_aes128_cbc_enc_submit_avx.asm" diff --git a/lib/avx/mb_mgr_aes256_ccm_auth_submit_flush_x8_avx.asm b/lib/avx_t1/mb_mgr_aes256_ccm_auth_submit_flush_x8_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes256_ccm_auth_submit_flush_x8_avx.asm rename to lib/avx_t1/mb_mgr_aes256_ccm_auth_submit_flush_x8_avx.asm index e30638627d23d57f95f0ce364cce1eac26f6a260..186f319f5e745a6d41096d951305ea728a3545fd 100644 --- a/lib/avx/mb_mgr_aes256_ccm_auth_submit_flush_x8_avx.asm +++ b/lib/avx_t1/mb_mgr_aes256_ccm_auth_submit_flush_x8_avx.asm @@ -32,4 +32,4 @@ %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes256_ccm_auth_avx %endif -%include "avx/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm" +%include "avx_t1/mb_mgr_aes128_ccm_auth_submit_flush_x8_avx.asm" diff --git a/lib/avx/mb_mgr_aes256_cmac_submit_flush_x8_avx.asm b/lib/avx_t1/mb_mgr_aes256_cmac_submit_flush_x8_avx.asm similarity index 96% rename from lib/avx/mb_mgr_aes256_cmac_submit_flush_x8_avx.asm rename to lib/avx_t1/mb_mgr_aes256_cmac_submit_flush_x8_avx.asm index 29f826efc3a227845bbb786a4dbf1c9269fbd181..62aa60ef4297193bcd9a21fe12cc3dde4f2eb0b7 100644 --- a/lib/avx/mb_mgr_aes256_cmac_submit_flush_x8_avx.asm +++ b/lib/avx_t1/mb_mgr_aes256_cmac_submit_flush_x8_avx.asm @@ -29,4 +29,4 @@ %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes256_cmac_auth_avx %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes256_cmac_auth_avx -%include "avx/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm" +%include "avx_t1/mb_mgr_aes128_cmac_submit_flush_x8_avx.asm" diff --git a/lib/avx_t1/mb_mgr_avx.c b/lib/avx_t1/mb_mgr_avx.c new file mode 100644 index 0000000000000000000000000000000000000000..94ed6caaada7a7508e57cb39331326e4d0e2906c --- /dev/null +++ b/lib/avx_t1/mb_mgr_avx.c @@ -0,0 +1,100 @@ +/******************************************************************************* + Copyright (c) 2012-2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/cpu_feature.h" +#include "include/aesni_emu.h" +#include "include/error.h" +#include "include/arch_x86_64.h" + +IMB_DLL_LOCAL void +init_mb_mgr_avx_internal(IMB_MGR *state, const int reset_mgrs) +{ +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return; + } +#endif + + if (!(state->features & IMB_FEATURE_AESNI)) { + fallback_no_aesni(state, 1); + return; + } + + /* reset error status */ + imb_set_errno(state, 0); + + state->features = cpu_feature_adjust(state->flags, + cpu_feature_detect()); + + if ((state->features & IMB_CPUFLAGS_AVX_T2) == + IMB_CPUFLAGS_AVX_T2) + init_mb_mgr_avx_t2_internal(state, reset_mgrs); + else + init_mb_mgr_avx_t1_internal(state, reset_mgrs); +} + +void +init_mb_mgr_avx(IMB_MGR *state) +{ + init_mb_mgr_avx_internal(state, 1); + + if (!self_test(state)) + imb_set_errno(state, IMB_ERR_SELFTEST); +} + +IMB_JOB *submit_job_avx(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB(state); +} + +IMB_JOB *flush_job_avx(IMB_MGR *state) +{ + return IMB_FLUSH_JOB(state); +} + +uint32_t queue_size_avx(IMB_MGR *state) +{ + return IMB_QUEUE_SIZE(state); +} + +IMB_JOB *submit_job_nocheck_avx(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB_NOCHECK(state); +} + +IMB_JOB *get_next_job_avx(IMB_MGR *state) +{ + return IMB_GET_NEXT_JOB(state); +} + +IMB_JOB *get_completed_job_avx(IMB_MGR *state) +{ + return IMB_GET_COMPLETED_JOB(state); +} diff --git a/lib/avx_t1/mb_mgr_avx_t1.c b/lib/avx_t1/mb_mgr_avx_t1.c new file mode 100644 index 0000000000000000000000000000000000000000..2ed82c0fccf3d7a86cffbc7ba2da9081e0129d77 --- /dev/null +++ b/lib/avx_t1/mb_mgr_avx_t1.c @@ -0,0 +1,482 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define AVX + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/snow3g_submit.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/aesni_emu.h" +#include "include/error.h" +#include "include/arch_sse_type1.h" /* snow3g */ +#include "include/arch_avx_type1.h" +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms_avx +#define RESTORE_XMMS restore_xmms_avx + +/* JOB API */ +#define SUBMIT_JOB submit_job_avx_t1 +#define FLUSH_JOB flush_job_avx_t1 +#define QUEUE_SIZE queue_size_avx_t1 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_avx_t1 +#define GET_NEXT_JOB get_next_job_avx_t1 +#define GET_COMPLETED_JOB get_completed_job_avx_t1 +#define GET_NEXT_BURST get_next_burst_avx_t1 +#define SUBMIT_BURST submit_burst_avx_t1 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx_t1 +#define FLUSH_BURST flush_burst_avx_t1 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx_t1 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx_t1 +#define SUBMIT_HASH_BURST submit_hash_burst_avx_t1 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx_t1 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_AVX +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_AVX + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_AVX +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_AVX +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_AVX + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_avx_gen2 +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_avx_gen2 +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_avx_gen2 +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_avx_gen2 +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_avx_gen2 +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_avx_gen2 + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_aes_gcm_dec_avx +#define SUBMIT_JOB_AES_GCM_ENC submit_job_aes_gcm_enc_avx + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_avx +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_avx +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_avx + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_avx +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_avx +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_avx + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_avx +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_avx +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_avx + +#define AES_CBC_DEC_128 aes_cbc_dec_128_avx +#define AES_CBC_DEC_192 aes_cbc_dec_192_avx +#define AES_CBC_DEC_256 aes_cbc_dec_256_avx + +#define SUBMIT_JOB_AES128_DEC submit_job_aes128_dec_avx +#define SUBMIT_JOB_AES192_DEC submit_job_aes192_dec_avx +#define SUBMIT_JOB_AES256_DEC submit_job_aes256_dec_avx + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_avx +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_avx +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_avx +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_avx + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_avx +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_avx +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_avx +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_avx +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_avx +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_avx + +#define AES_ECB_ENC_128 aes_ecb_enc_128_avx +#define AES_ECB_ENC_192 aes_ecb_enc_192_avx +#define AES_ECB_ENC_256 aes_ecb_enc_256_avx +#define AES_ECB_DEC_128 aes_ecb_dec_128_avx +#define AES_ECB_DEC_192 aes_ecb_dec_192_avx +#define AES_ECB_DEC_256 aes_ecb_dec_256_avx + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_avx +#define AES_CTR_192 aes_cntr_192_avx +#define AES_CTR_256 aes_cntr_256_avx +#define AES_CTR_128_BIT aes_cntr_bit_128_avx +#define AES_CTR_192_BIT aes_cntr_bit_192_avx +#define AES_CTR_256_BIT aes_cntr_bit_256_avx + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_avx +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_avx + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_avx +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_avx + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_avx +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_avx + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_avx +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_avx + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_avx +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_avx + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_avx +#define AES_CFB_256_ONE aes_cfb_256_one_avx + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_avx +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_avx + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_avx +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_avx +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_avx +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_avx + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_avx +#define FLUSH_JOB_SHA1 flush_job_sha1_avx +#define SUBMIT_JOB_SHA224 submit_job_sha224_avx +#define FLUSH_JOB_SHA224 flush_job_sha224_avx +#define SUBMIT_JOB_SHA256 submit_job_sha256_avx +#define FLUSH_JOB_SHA256 flush_job_sha256_avx +#define SUBMIT_JOB_SHA384 submit_job_sha384_avx +#define FLUSH_JOB_SHA384 flush_job_sha384_avx +#define SUBMIT_JOB_SHA512 submit_job_sha512_avx +#define FLUSH_JOB_SHA512 flush_job_sha512_avx + +/* HMAC-SHA1/224/256/384/512 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_avx +#define FLUSH_JOB_HMAC flush_job_hmac_avx +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_avx +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_avx +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_avx +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_avx +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_avx +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_avx +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_avx +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_avx +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_avx +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_avx + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_avx +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_avx +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_avx +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_avx +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_avx +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_avx +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_avx +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_avx +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_avx +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_avx +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_avx + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_avx +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_avx + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_avx_t1(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_avx_t1(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_avx_t1 +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_avx_t1 + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_avx_local + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 8); + ooo_mgr_aes_reset(state->aes192_ooo, 8); + ooo_mgr_aes_reset(state->aes256_ooo, 8); + + /* DOCSIS SEC BPI (AES CBC + AES CFB for partial block) + * uses same settings as AES CBC. + */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 8); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 4); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, AVX_NUM_SHA1_LANES); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, + AVX_NUM_SHA256_LANES); + + /* Init HMAC/SHA256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, + AVX_NUM_SHA256_LANES); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + AVX_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + AVX_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, AVX_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 8); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 8); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 8); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 8); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 8); + + /* Init AES CBC-S out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 8); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, AVX_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, AVX_NUM_SHA256_LANES); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, AVX_NUM_SHA256_LANES); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, AVX_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, AVX_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_avx_t1_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for AVX interface are present */ + if ((state->features & IMB_CPUFLAGS_AVX) != IMB_CPUFLAGS_AVX) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_AVX; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_avx; + state->keyexp_192 = aes_keyexp_192_avx; + state->keyexp_256 = aes_keyexp_256_avx; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_avx; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_avx; + + state->xcbc_keyexp = aes_xcbc_expand_key_avx; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_avx; + state->sha1 = sha1_avx; + state->sha224_one_block = sha224_one_block_avx; + state->sha224 = sha224_avx; + state->sha256_one_block = sha256_one_block_avx; + state->sha256 = sha256_avx; + state->sha384_one_block = sha384_one_block_avx; + state->sha384 = sha384_avx; + state->sha512_one_block = sha512_one_block_avx; + state->sha512 = sha512_avx; + state->md5_one_block = md5_one_block_avx; + + state->aes128_cfb_one = aes_cfb_128_one_avx; + + state->eea3_1_buffer = zuc_eea3_1_buffer_avx; + state->eea3_4_buffer = zuc_eea3_4_buffer_avx; + state->eea3_n_buffer = zuc_eea3_n_buffer_avx; + state->eia3_1_buffer = zuc_eia3_1_buffer_avx; + state->eia3_n_buffer = zuc_eia3_n_buffer_avx; + + state->f8_1_buffer = kasumi_f8_1_buffer_avx; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_avx; + state->f8_2_buffer = kasumi_f8_2_buffer_avx; + state->f8_3_buffer = kasumi_f8_3_buffer_avx; + state->f8_4_buffer = kasumi_f8_4_buffer_avx; + state->f8_n_buffer = kasumi_f8_n_buffer_avx; + state->f9_1_buffer = kasumi_f9_1_buffer_avx; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_avx; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_avx; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_avx; + state->kasumi_key_sched_size = kasumi_key_sched_size_avx; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_avx; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_avx; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_avx; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_avx; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_avx; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_avx; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_avx; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_avx; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_avx; + state->snow3g_init_key_sched = snow3g_init_key_sched_avx; + state->snow3g_key_sched_size = snow3g_key_sched_size_avx; + + state->hec_32 = hec_32_avx; + state->hec_64 = hec_64_avx; + + state->crc32_ethernet_fcs = ethernet_fcs_avx; + state->crc16_x25 = crc16_x25_avx; + state->crc32_sctp = crc32_sctp_avx; + state->crc24_lte_a = crc24_lte_a_avx; + state->crc24_lte_b = crc24_lte_b_avx; + state->crc16_fp_data = crc16_fp_data_avx; + state->crc11_fp_header = crc11_fp_header_avx; + state->crc7_fp_header = crc7_fp_header_avx; + state->crc10_iuup_data = crc10_iuup_data_avx; + state->crc6_iuup_header = crc6_iuup_header_avx; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_avx; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_avx; + + state->chacha20_poly1305_init = init_chacha20_poly1305_avx; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_avx; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_avx; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_avx; + + state->gcm128_enc = aes_gcm_enc_128_avx_gen2; + state->gcm192_enc = aes_gcm_enc_192_avx_gen2; + state->gcm256_enc = aes_gcm_enc_256_avx_gen2; + state->gcm128_dec = aes_gcm_dec_128_avx_gen2; + state->gcm192_dec = aes_gcm_dec_192_avx_gen2; + state->gcm256_dec = aes_gcm_dec_256_avx_gen2; + state->gcm128_init = aes_gcm_init_128_avx_gen2; + state->gcm192_init = aes_gcm_init_192_avx_gen2; + state->gcm256_init = aes_gcm_init_256_avx_gen2; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_avx_gen2; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_avx_gen2; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_avx_gen2; + state->gcm128_enc_update = aes_gcm_enc_128_update_avx_gen2; + state->gcm192_enc_update = aes_gcm_enc_192_update_avx_gen2; + state->gcm256_enc_update = aes_gcm_enc_256_update_avx_gen2; + state->gcm128_dec_update = aes_gcm_dec_128_update_avx_gen2; + state->gcm192_dec_update = aes_gcm_dec_192_update_avx_gen2; + state->gcm256_dec_update = aes_gcm_dec_256_update_avx_gen2; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_avx_gen2; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_avx_gen2; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_avx_gen2; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_avx_gen2; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_avx_gen2; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_avx_gen2; + state->gcm128_precomp = aes_gcm_precomp_128_avx_gen2; + state->gcm192_precomp = aes_gcm_precomp_192_avx_gen2; + state->gcm256_precomp = aes_gcm_precomp_256_avx_gen2; + state->gcm128_pre = aes_gcm_pre_128_avx_gen2; + state->gcm192_pre = aes_gcm_pre_192_avx_gen2; + state->gcm256_pre = aes_gcm_pre_256_avx_gen2; + + state->ghash = ghash_avx_gen2; + state->ghash_pre = ghash_pre_avx_gen2; + + state->gmac128_init = imb_aes_gmac_init_128_avx_gen2; + state->gmac192_init = imb_aes_gmac_init_192_avx_gen2; + state->gmac256_init = imb_aes_gmac_init_256_avx_gen2; + state->gmac128_update = imb_aes_gmac_update_128_avx_gen2; + state->gmac192_update = imb_aes_gmac_update_192_avx_gen2; + state->gmac256_update = imb_aes_gmac_update_256_avx_gen2; + state->gmac128_finalize = imb_aes_gmac_finalize_128_avx_gen2; + state->gmac192_finalize = imb_aes_gmac_finalize_192_avx_gen2; + state->gmac256_finalize = imb_aes_gmac_finalize_256_avx_gen2; +} + +#include "mb_mgr_code.h" diff --git a/lib/avx/mb_mgr_hmac_md5_flush_avx.asm b/lib/avx_t1/mb_mgr_hmac_md5_flush_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_md5_flush_avx.asm rename to lib/avx_t1/mb_mgr_hmac_md5_flush_avx.asm diff --git a/lib/avx/mb_mgr_hmac_md5_submit_avx.asm b/lib/avx_t1/mb_mgr_hmac_md5_submit_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_md5_submit_avx.asm rename to lib/avx_t1/mb_mgr_hmac_md5_submit_avx.asm diff --git a/lib/avx/mb_mgr_hmac_sha1_flush_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha1_flush_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_sha1_flush_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha1_flush_avx.asm diff --git a/lib/avx/mb_mgr_hmac_sha1_submit_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha1_submit_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_sha1_submit_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha1_submit_avx.asm diff --git a/lib/avx/mb_mgr_hmac_sha224_flush_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha224_flush_avx.asm similarity index 96% rename from lib/avx/mb_mgr_hmac_sha224_flush_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha224_flush_avx.asm index 2f130535d8e6ef9a865d7871744fbb8d1b5ef193..1efdcea168061a0c97b8f3b402964ea77b1c1741 100644 --- a/lib/avx/mb_mgr_hmac_sha224_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_hmac_sha224_flush_avx.asm @@ -28,4 +28,4 @@ %define FUNC flush_job_hmac_sha_224_avx %define SHA224 -%include "avx/mb_mgr_hmac_sha256_flush_avx.asm" +%include "avx_t1/mb_mgr_hmac_sha256_flush_avx.asm" diff --git a/lib/avx/mb_mgr_hmac_sha224_submit_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha224_submit_avx.asm similarity index 96% rename from lib/avx/mb_mgr_hmac_sha224_submit_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha224_submit_avx.asm index 2895957aa4f8a49b61c4133961eea719e62e5c3e..d26a0ab641c1d0bd908f0e9303dee47355f72585 100644 --- a/lib/avx/mb_mgr_hmac_sha224_submit_avx.asm +++ b/lib/avx_t1/mb_mgr_hmac_sha224_submit_avx.asm @@ -28,4 +28,4 @@ %define FUNC submit_job_hmac_sha_224_avx %define SHA224 -%include "avx/mb_mgr_hmac_sha256_submit_avx.asm" +%include "avx_t1/mb_mgr_hmac_sha256_submit_avx.asm" diff --git a/lib/avx/mb_mgr_hmac_sha256_flush_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha256_flush_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_sha256_flush_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha256_flush_avx.asm diff --git a/lib/avx/mb_mgr_hmac_sha256_submit_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha256_submit_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_sha256_submit_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha256_submit_avx.asm diff --git a/lib/avx/mb_mgr_hmac_sha384_flush_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha384_flush_avx.asm similarity index 97% rename from lib/avx/mb_mgr_hmac_sha384_flush_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha384_flush_avx.asm index cdf2e83645677d1e59aa10fca6c45e68e3269540..9ee3c12431efded713bea3d4f4180f1de0408e2e 100644 --- a/lib/avx/mb_mgr_hmac_sha384_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_hmac_sha384_flush_avx.asm @@ -28,4 +28,4 @@ %define FUNC flush_job_hmac_sha_384_avx %define SHA_X_DIGEST_SIZE 384 -%include "avx/mb_mgr_hmac_sha512_flush_avx.asm" +%include "avx_t1/mb_mgr_hmac_sha512_flush_avx.asm" diff --git a/lib/avx/mb_mgr_hmac_sha384_submit_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha384_submit_avx.asm similarity index 96% rename from lib/avx/mb_mgr_hmac_sha384_submit_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha384_submit_avx.asm index fa8dc5b1bddce2701f9c39bd2adab301e04a2d5c..70a90ba8290d8226793037e65f571f49c298247b 100644 --- a/lib/avx/mb_mgr_hmac_sha384_submit_avx.asm +++ b/lib/avx_t1/mb_mgr_hmac_sha384_submit_avx.asm @@ -28,4 +28,4 @@ %define FUNC submit_job_hmac_sha_384_avx %define SHA_X_DIGEST_SIZE 384 -%include "avx/mb_mgr_hmac_sha512_submit_avx.asm" +%include "avx_t1/mb_mgr_hmac_sha512_submit_avx.asm" diff --git a/lib/avx/mb_mgr_hmac_sha512_flush_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha512_flush_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_sha512_flush_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha512_flush_avx.asm diff --git a/lib/avx/mb_mgr_hmac_sha512_submit_avx.asm b/lib/avx_t1/mb_mgr_hmac_sha512_submit_avx.asm similarity index 100% rename from lib/avx/mb_mgr_hmac_sha512_submit_avx.asm rename to lib/avx_t1/mb_mgr_hmac_sha512_submit_avx.asm diff --git a/lib/avx/mb_mgr_zuc_submit_flush_avx.asm b/lib/avx_t1/mb_mgr_zuc_submit_flush_avx.asm similarity index 91% rename from lib/avx/mb_mgr_zuc_submit_flush_avx.asm rename to lib/avx_t1/mb_mgr_zuc_submit_flush_avx.asm index 9a519d1887ea55d139389be27bc55b1810ff14c0..92fb7208040bc3a16e6ef0eb44d5a813298c117c 100644 --- a/lib/avx/mb_mgr_zuc_submit_flush_avx.asm +++ b/lib/avx_t1/mb_mgr_zuc_submit_flush_avx.asm @@ -31,6 +31,7 @@ %include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" +%include "include/clear_regs.asm" %define SUBMIT_JOB_ZUC128_EEA3 submit_job_zuc_eea3_avx %define FLUSH_JOB_ZUC128_EEA3 flush_job_zuc_eea3_avx @@ -104,13 +105,15 @@ extern asm_ZucCipher_4_avx %define arg4 rcx %define arg5 r8 %define arg6 r9 +%define arg7 qword [rsp] %else %define arg1 rcx %define arg2 rdx %define arg3 r8 %define arg4 r9 -%define arg5 [rsp + 32] -%define arg6 [rsp + 40] +%define arg5 qword [rsp + 32] +%define arg6 qword [rsp + 40] +%define arg7 qword [rsp + 48] %endif %define state arg1 @@ -255,12 +258,14 @@ mksection .text ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes and write as 8 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out EXPAND_FROM_6_TO_8_BYTES tmp2, tmp, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -323,29 +328,20 @@ mksection .text %assign I (I + 1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 24 bytes for 3 parameters - sub rsp, 24 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_args_keys] lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] -%if %%KEY_SIZE == 256 - ;; Setting "tag size" to 2 in case of ciphering - ;; (dummy size, just for constant selecion at Initialization) - mov arg4, 2 -%endif %if %%KEY_SIZE == 128 call asm_ZucInitialization_4_avx %else + mov arg5, 0 ; Tag size = 0, arg4 not used call asm_Zuc256Initialization_4_avx %endif -%ifndef LINUX - add rsp, 24 -%endif + RESTORE_STACK_SPACE 5 cmp byte [r12 + _zuc_init_not_done], 0x0f ; Init done for all lanes je %%skip_submit_restoring_state @@ -385,11 +381,8 @@ mksection .text %endif mov byte [r12 + _zuc_init_not_done], 0 ; Init done for all lanes - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -398,9 +391,7 @@ mksection .text call asm_ZucCipher_4_avx -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -423,7 +414,9 @@ mksection .text %endif %%return_submit_eea3: - +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -537,29 +530,21 @@ APPEND(%%skip_eea3_,I): %assign I (I + 1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 24 bytes for 3 parameters - sub rsp, 24 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_args_keys] lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] -%if %%KEY_SIZE == 256 - ;; Setting "tag size" to 2 in case of ciphering - ;; (dummy size, just for constant selecion at Initialization) - mov arg4, 2 -%endif %if %%KEY_SIZE == 128 call asm_ZucInitialization_4_avx %else + mov arg5, 0 ; Tag size = 0, arg4 not used call asm_Zuc256Initialization_4_avx %endif -%ifndef LINUX - add rsp, 24 -%endif + RESTORE_STACK_SPACE 5 + cmp word [r12 + _zuc_init_not_done], 0x0f ; Init done for all lanes je %%skip_flush_restoring_state @@ -617,11 +602,8 @@ APPEND3(%%skip_eea3_copy_,I,J): vmovdqa [r12 + _zuc_state + 16*I], xmm1 ; Save new state %assign I (I+1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -630,9 +612,8 @@ APPEND3(%%skip_eea3_copy_,I,J): call asm_ZucCipher_4_avx -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] ; Clear ZUC state of the lane that is returned and NULL lanes @@ -665,7 +646,9 @@ APPEND3(%%skip_eea3_copy_,I,J): SHIFT_GP 1, idx, tmp3, tmp4, left or [state + _zuc_unused_lane_bitmask], BYTE(tmp3) %%return_flush_eea3: - +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -711,8 +694,9 @@ MKGLOBAL(FLUSH_JOB_ZUC256_EEA3,function,internal) FLUSH_JOB_ZUC256_EEA3: FLUSH_JOB_ZUC_EEA3 256 -%macro SUBMIT_JOB_ZUC_EIA3 1 +%macro SUBMIT_JOB_ZUC_EIA3 2 %define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16) ; idx needs to be in rbp %define len rbp @@ -765,12 +749,14 @@ FLUSH_JOB_ZUC256_EEA3: ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes and write as 8 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out EXPAND_FROM_6_TO_8_BYTES tmp2, tmp, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -817,11 +803,12 @@ FLUSH_JOB_ZUC256_EEA3: ; to pass parameter to next function mov r11, state - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 48 bytes for 6 parameters (already aligned to 16 bytes) - sub rsp, 48 +%if %%KEY_SIZE == 128 + RESERVE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESERVE_STACK_SPACE 7 %endif + lea arg1, [r11 + _zuc_args_keys] lea arg2, [r11 + _zuc_args_IV] lea arg3, [r11 + _zuc_args_in] @@ -835,6 +822,9 @@ FLUSH_JOB_ZUC256_EEA3: lea r12, [r11 + _zuc_job_in_lane] mov arg6, r12 %endif +%if %%KEY_SIZE == 256 + mov arg7, %%TAG_SIZE +%endif %if %%KEY_SIZE == 128 call zuc_eia3_4_buffer_job_avx @@ -842,8 +832,10 @@ FLUSH_JOB_ZUC256_EEA3: call zuc256_eia3_4_buffer_job_avx %endif -%ifndef LINUX - add rsp, 48 +%if %%KEY_SIZE == 128 + RESTORE_STACK_SPACE 6 +%else ;; %%KEY_SIZE == 256 + RESTORE_STACK_SPACE 7 %endif mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -864,7 +856,9 @@ FLUSH_JOB_ZUC256_EEA3: mov [state + _zuc_unused_lanes], unused_lanes %%return_submit_eia3: - +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -877,15 +871,18 @@ FLUSH_JOB_ZUC256_EEA3: %endif mov rsp, [rsp + _rsp_save] ; original SP - ret + jmp %%exit_submit_eia3 %%return_null_submit_eia3: xor job_rax, job_rax jmp %%return_submit_eia3 + +%%exit_submit_eia3: %endmacro -%macro FLUSH_JOB_ZUC_EIA3 1 +%macro FLUSH_JOB_ZUC_EIA3 2 %define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [constant] Tag size (4, 8 or 16) %define unused_lanes rbx %define tmp1 rbx @@ -971,9 +968,10 @@ APPEND(%%skip_eia3_,I): ; to pass parameter to next function mov r11, state -%ifndef LINUX - ;; 48 bytes for 6 parameters (already aligned to 16 bytes) - sub rsp, 48 +%if %%KEY_SIZE == 128 + RESERVE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESERVE_STACK_SPACE 7 %endif lea arg1, [r11 + _zuc_args_keys] lea arg2, [r11 + _zuc_args_IV] @@ -988,6 +986,9 @@ APPEND(%%skip_eia3_,I): lea r12, [r11 + _zuc_job_in_lane] mov arg6, r12 %endif +%if %%KEY_SIZE == 256 + mov arg7, %%TAG_SIZE +%endif %if %%KEY_SIZE == 128 call zuc_eia3_4_buffer_job_avx @@ -995,8 +996,10 @@ APPEND(%%skip_eia3_,I): call zuc256_eia3_4_buffer_job_avx %endif -%ifndef LINUX - add rsp, 48 +%if %%KEY_SIZE == 128 + RESTORE_STACK_SPACE 6 +%else ;; %%KEY_SIZE == 256 + RESTORE_STACK_SPACE 7 %endif mov tmp5, [rsp + _null_len_save] @@ -1018,7 +1021,9 @@ APPEND(%%skip_eia3_,I): mov [state + _zuc_unused_lanes], unused_lanes %%return_flush_eia3: - +%ifdef SAFE_DATA + clear_all_xmms_avx_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -1031,11 +1036,13 @@ APPEND(%%skip_eia3_,I): %endif mov rsp, [rsp + _rsp_save] ; original SP - ret + jmp %%exit_flush_eia3 %%return_null_flush_eia3: xor job_rax, job_rax jmp %%return_flush_eia3 + +%%exit_flush_eia3: %endmacro ; JOB* SUBMIT_JOB_ZUC128_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -1043,25 +1050,60 @@ APPEND(%%skip_eia3_,I): ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EIA3,function,internal) SUBMIT_JOB_ZUC128_EIA3: - SUBMIT_JOB_ZUC_EIA3 128 + SUBMIT_JOB_ZUC_EIA3 128, 4 + ret -; JOB* SUBMIT_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) +; JOB* SUBMIT_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job, +; const uint64_t tag_sz) ; arg 1 : state ; arg 2 : job +; arg 3 : tag size (4, 8 or 16 bytes) MKGLOBAL(SUBMIT_JOB_ZUC256_EIA3,function,internal) SUBMIT_JOB_ZUC256_EIA3: - SUBMIT_JOB_ZUC_EIA3 256 + cmp arg3, 8 + je submit_tag_8B + jb submit_tag_4B + + ; Fall-through for 16-byte tag +submit_tag_16B: + SUBMIT_JOB_ZUC_EIA3 256, 16 + ret +submit_tag_8B: + SUBMIT_JOB_ZUC_EIA3 256, 8 + ret +submit_tag_4B: + SUBMIT_JOB_ZUC_EIA3 256, 4 + ret ; JOB* FLUSH_JOB_ZUC128_EIA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC128_EIA3,function,internal) FLUSH_JOB_ZUC128_EIA3: - FLUSH_JOB_ZUC_EIA3 128 + FLUSH_JOB_ZUC_EIA3 128, 4 + ret -; JOB* FLUSH_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state) +; JOB* FLUSH_JOB_ZUC256_EIA3(MB_MGR_ZUC_OOO *state, +; const uint64_t tag_sz) ; arg 1 : state +; arg 2 : tag size (4, 8 or 16 bytes) MKGLOBAL(FLUSH_JOB_ZUC256_EIA3,function,internal) FLUSH_JOB_ZUC256_EIA3: - FLUSH_JOB_ZUC_EIA3 256 + endbranch64 + cmp arg2, 8 + je flush_tag_8B + jb flush_tag_4B + + ; Fall-through for 16-byte tag +flush_tag_16B: + FLUSH_JOB_ZUC_EIA3 256, 16 + ret + +flush_tag_8B: + FLUSH_JOB_ZUC_EIA3 256, 8 + ret + +flush_tag_4B: + FLUSH_JOB_ZUC_EIA3 256, 4 + ret mksection stack-noexec diff --git a/lib/avx/md5_x4x2_avx.asm b/lib/avx_t1/md5_x4x2_avx.asm similarity index 100% rename from lib/avx/md5_x4x2_avx.asm rename to lib/avx_t1/md5_x4x2_avx.asm diff --git a/lib/avx/memcpy_avx.asm b/lib/avx_t1/memcpy_avx.asm similarity index 100% rename from lib/avx/memcpy_avx.asm rename to lib/avx_t1/memcpy_avx.asm diff --git a/lib/avx/pon_by8_avx.asm b/lib/avx_t1/pon_by8_avx.asm similarity index 96% rename from lib/avx/pon_by8_avx.asm rename to lib/avx_t1/pon_by8_avx.asm index fe6dbf323c9be2a0008358f0d26f1dd007b74aba..74f2f27d80a3f4c60b6226883aa5f353bf435e90 100644 --- a/lib/avx/pon_by8_avx.asm +++ b/lib/avx_t1/pon_by8_avx.asm @@ -247,7 +247,7 @@ mksection .text ;;; ============================================================================ ;;; PON stitched algorithm round on a single AES block (16 bytes): -;;; AES-CTR (optional, depending on %%CIPH) +;;; AES-CTR (optional, depending on %%CIPHER) ;;; - prepares counter block ;;; - encrypts counter block ;;; - loads text @@ -273,10 +273,10 @@ mksection .text %define %%TXMM2 %11 ; [clobbered] XMM temporary %define %%CRC_TYPE %12 ; [in] "first_crc" or "next_crc" or "no_crc" %define %%DIR %13 ; [in] "ENC" or "DEC" -%define %%CIPH %14 ; [in] "CTR" or "NO_CTR" +%define %%CIPHER %14 ; [in] "CTR" or "NO_CTR" %define %%CTR_CHECK %15 ; [in/out] GP with 64bit counter (to identify overflow) -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; prepare counter blocks for encryption vpshufb %%TXMM0, %%CTR, [rel byteswap_const] ;; perform 1 increment on whole 128 bits @@ -302,23 +302,23 @@ mksection .text vmovdqu %%TXMM1, [%%INP] %endif -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; AES rounds AES_ENC_ROUNDS %%KP, %%N_ROUNDS, %%TXMM0 ;; xor plaintext/ciphertext against encrypted counter blocks vpxor %%TXMM0, %%TXMM0, %%TXMM1 -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR ;; register copy is needed as no_load/no_store options need it vmovdqa %%TXMM0, %%TXMM1 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR %ifnidn %%CRC_TYPE, no_crc %ifidn %%CRC_TYPE, next_crc ;; Finish split CRC_MUL() operation vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXMM2 %endif -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; CRC calculation for ENCRYPTION/DECRYPTION ;; - always XOR against plaintext block %ifidn %%DIR, ENC @@ -326,32 +326,32 @@ mksection .text %else vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXMM0 %endif ; DECRYPT -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR ;; CRC calculation for NO CIPHER option vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXMM1 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR %endif ;; CRC_TYPE != NO_CRC ;; store the result in the output buffer %ifnidn %%OUTP, no_store -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR vmovdqu [%%OUTP], %%TXMM0 -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR vmovdqu [%%OUTP], %%TXMM1 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR %endif ;; update BIP value - always use cipher text for BIP %ifnidn %%XBIP_IN_OUT, no_bip -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR %ifidn %%DIR, ENC vpxor %%XBIP_IN_OUT, %%XBIP_IN_OUT, %%TXMM0 %else vpxor %%XBIP_IN_OUT, %%XBIP_IN_OUT, %%TXMM1 %endif ; DECRYPT -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR vpxor %%XBIP_IN_OUT, %%XBIP_IN_OUT, %%TXMM1 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR %endif ;; !NO_BIP ;; increment in/out pointers @@ -365,7 +365,7 @@ mksection .text ;;; ============================================================================ ;;; PON stitched algorithm round on a single AES block (16 bytes): -;;; AES-CTR (optional, depending on %%CIPH) +;;; AES-CTR (optional, depending on %%CIPHER) ;;; - prepares counter block ;;; - encrypts counter block ;;; - loads text @@ -399,7 +399,7 @@ mksection .text %define %%T10 %19 ; [clobbered] XMM temporary %define %%CRC_TYPE %20 ; [in] "first_crc" or "next_crc" or "no_crc" %define %%DIR %21 ; [in] "ENC" or "DEC" -%define %%CIPH %22 ; [in] "CTR" or "NO_CTR" +%define %%CIPHER %22 ; [in] "CTR" or "NO_CTR" %define %%CTR_CHECK %23 ; [in/out] GP with 64bit counter (to identify overflow) %define %%CTR1 %%T3 @@ -412,7 +412,7 @@ mksection .text %define %%TXT3 %%T9 %define %%TXT4 %%T10 -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; prepare counter blocks for encryption vmovdqa %%T0, [rel ddq_add_1] vmovdqa %%T2, [rel byteswap_const] @@ -493,7 +493,7 @@ mksection .text vmovdqu %%TXT3, [%%INP + 32] vmovdqu %%TXT4, [%%INP + 48] -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR AES_ENC_ROUNDS_4 %%KP, %%N_ROUNDS, %%CTR1, %%CTR2, %%CTR3, %%CTR4, %%T0 ;; xor plaintext/ciphertext against encrypted counter blocks @@ -501,13 +501,13 @@ mksection .text vpxor %%CTR2, %%CTR2, %%TXT2 vpxor %%CTR3, %%CTR3, %%TXT3 vpxor %%CTR4, %%CTR4, %%TXT4 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR %ifidn %%CRC_TYPE, next_crc ;; Finish split CRC_MUL() operation vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%T2 %endif -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR %ifidn %%DIR, ENC ;; CRC calculation for ENCRYPTION (blocks 1 & 2) ;; - XOR CRC against plaintext block @@ -527,7 +527,7 @@ mksection .text CRC_CLMUL %%XCRC_IN_OUT, %%XCRC_MUL, %%T2 %endif ; DECRYPT -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR ;; CRC calculation for NO CIPHER option (blocks 1 & 2) ;; - XOR CRC against plaintext block vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXT1 @@ -536,23 +536,23 @@ mksection .text vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXT2 CRC_CLMUL %%XCRC_IN_OUT, %%XCRC_MUL, %%T2 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR ;; store ciphertext/plaintext -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR vmovdqu [%%OUTP], %%CTR1 vmovdqu [%%OUTP + 16], %%CTR2 vmovdqu [%%OUTP + 32], %%CTR3 vmovdqu [%%OUTP + 48], %%CTR4 -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR vmovdqu [%%OUTP], %%TXT1 vmovdqu [%%OUTP + 16], %%TXT2 vmovdqu [%%OUTP + 32], %%TXT3 vmovdqu [%%OUTP + 48], %%TXT4 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR ;; update BIP value -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; - always use ciphertext for BIP %ifidn %%DIR, ENC vpxor %%T0, %%CTR1, %%CTR2 @@ -561,10 +561,10 @@ mksection .text vpxor %%T0, %%TXT1, %%TXT2 vpxor %%T1, %%TXT3, %%TXT4 %endif ; DECRYPT -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR vpxor %%T0, %%TXT1, %%TXT2 vpxor %%T1, %%TXT3, %%TXT4 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR vpxor %%XBIP_IN_OUT, %%XBIP_IN_OUT, %%T0 vpxor %%XBIP_IN_OUT, %%XBIP_IN_OUT, %%T1 @@ -572,7 +572,7 @@ mksection .text add %%INP, 64 add %%OUTP, 64 -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR %ifidn %%DIR, ENC ;; CRC calculation for ENCRYPTION (blocks 3 & 4) ;; - XOR CRC against plaintext block @@ -588,14 +588,14 @@ mksection .text CRC_CLMUL %%XCRC_IN_OUT, %%XCRC_MUL, %%T2 vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%CTR4 %endif ; DECRYPT -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR ;; CRC calculation for NO CIPHER option (blocks 3 & 4) ;; - XOR CRC against plaintext block vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXT3 CRC_CLMUL %%XCRC_IN_OUT, %%XCRC_MUL, %%T2 vpxor %%XCRC_IN_OUT, %%XCRC_IN_OUT, %%TXT4 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR %endmacro ; DO_PON_4 @@ -604,7 +604,7 @@ mksection .text %macro CIPHER_BIP_REST 14 %define %%NUM_BYTES %1 ; [in/clobbered] number of bytes to cipher %define %%DIR %2 ; [in] "ENC" or "DEC" -%define %%CIPH %3 ; [in] "CTR" or "NO_CTR" +%define %%CIPHER %3 ; [in] "CTR" or "NO_CTR" %define %%PTR_IN %4 ; [in/clobbered] GPR pointer to input buffer %define %%PTR_OUT %5 ; [in/clobbered] GPR pointer to output buffer %define %%PTR_KEYS %6 ; [in] GPR pointer to expanded keys @@ -623,7 +623,7 @@ mksection .text jb %%_partial_block_left DO_PON %%PTR_KEYS, NUM_AES_ROUNDS, %%XCTR_IN_OUT, %%PTR_IN, %%PTR_OUT, %%XBIP_IN_OUT, \ - no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPH, %%CTR_CHECK + no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPHER, %%CTR_CHECK sub %%NUM_BYTES, 16 jz %%_bip_done jmp %%_cipher_last_blocks @@ -635,7 +635,7 @@ mksection .text ;; XMMT2 = data in ;; XMMT1 = data out DO_PON %%PTR_KEYS, NUM_AES_ROUNDS, %%XCTR_IN_OUT, no_load, no_store, no_bip, \ - no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPH, %%CTR_CHECK + no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPHER, %%CTR_CHECK ;; bip update for partial block (mask out bytes outside the message) lea %%GPT1, [rel mask_out_top_bytes + 16] @@ -863,8 +863,8 @@ mksection .text ;;; - calls other macros and directly uses registers ;;; defined at the top of the file %macro AES128_CTR_PON 2 -%define %%DIR %1 ; [in] direction "ENC" or "DEC" -%define %%CIPH %2 ; [in] cipher "CTR" or "NO_CTR" +%define %%DIR %1 ; [in] direction "ENC" or "DEC" +%define %%CIPHER %2 ; [in] cipher "CTR" or "NO_CTR" push r12 push r13 @@ -921,7 +921,7 @@ mksection .text %%_crc_not_zero: sub bytes_to_crc, 4 ; subtract size of the CRC itself -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; - read 16 bytes of IV ;; - convert to little endian format ;; - save least significant 8 bytes in GP register for overflow check @@ -938,7 +938,7 @@ mksection .text ;; get output buffer mov p_out, [job + _dst] -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; get key pointers mov p_keys, [job + _enc_keys] %endif @@ -950,7 +950,7 @@ mksection .text vmovdqa xcrckey, [rel rk1] ; rk1 and rk2 in xcrckey ;; get number of bytes to cipher -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR mov num_bytes, [job + _msg_len_to_cipher_in_bytes] %else ;; Message length to cipher is 0 @@ -967,7 +967,7 @@ mksection .text %ifidn %%DIR, DEC ;; decrypt the buffer first mov tmp, num_bytes - CIPHER_BIP_REST tmp, %%DIR, %%CIPH, p_in, p_out, p_keys, xbip, \ + CIPHER_BIP_REST tmp, %%DIR, %%CIPHER, p_in, p_out, p_keys, xbip, \ xcounter, xtmp1, xtmp2, xtmp3, ctr_check, tmp2, tmp3 ;; correct in/out pointers - go back to start of the buffers @@ -1042,7 +1042,7 @@ mksection .text DO_PON_4 p_keys, NUM_AES_ROUNDS, xcounter, p_in, p_out, xbip, \ xcrc, xcrckey, xtmp1, xtmp2, xtmp3, xtmp4, xtmp5, xtmp6, \ xtmp7, xtmp8, xtmp9, xtmp10, xtmp11, first_crc, %%DIR, \ - %%CIPH, ctr_check + %%CIPHER, ctr_check sub num_bytes, 64 sub bytes_to_crc, 64 %ifidn %%DIR, ENC @@ -1057,7 +1057,7 @@ mksection .text DO_PON_4 p_keys, NUM_AES_ROUNDS, xcounter, p_in, p_out, xbip, \ xcrc, xcrckey, xtmp1, xtmp2, xtmp3, xtmp4, xtmp5, xtmp6, \ xtmp7, xtmp8, xtmp9, xtmp10, xtmp11, next_crc, %%DIR, \ - %%CIPH, ctr_check + %%CIPHER, ctr_check sub num_bytes, 64 sub bytes_to_crc, 64 %ifidn %%DIR, ENC @@ -1068,7 +1068,7 @@ mksection .text %%_crc_below_64_bytes: DO_PON p_keys, NUM_AES_ROUNDS, xcounter, p_in, p_out, xbip, \ xcrc, xcrckey, xtmp1, xtmp2, xtmp3, first_crc, %%DIR, \ - %%CIPH, ctr_check + %%CIPHER, ctr_check sub num_bytes, 16 sub bytes_to_crc, 16 @@ -1078,7 +1078,7 @@ mksection .text jb %%_exit_loop DO_PON p_keys, NUM_AES_ROUNDS, xcounter, p_in, p_out, xbip, \ xcrc, xcrckey, xtmp1, xtmp2, xtmp3, next_crc, %%DIR, \ - %%CIPH, ctr_check + %%CIPHER, ctr_check sub num_bytes, 16 sub bytes_to_crc, 16 %ifidn %%DIR, ENC @@ -1092,7 +1092,7 @@ mksection .text ;; decrypt rest of the message including CRC and optional padding mov tmp, num_bytes - CIPHER_BIP_REST tmp, %%DIR, %%CIPH, p_in, p_out, p_keys, xbip, \ + CIPHER_BIP_REST tmp, %%DIR, %%CIPHER, p_in, p_out, p_keys, xbip, \ xcounter, xtmp1, xtmp2, xtmp3, ctr_check, tmp2, tmp3 mov tmp, num_bytes ; correct in/out pointers - to point before cipher & BIP @@ -1157,7 +1157,7 @@ mksection .text or DWORD(decrypt_not_done), DWORD(decrypt_not_done) jnz %%_do_not_cipher_the_rest %endif - CIPHER_BIP_REST num_bytes, %%DIR, %%CIPH, p_in, p_out, p_keys, xbip, \ + CIPHER_BIP_REST num_bytes, %%DIR, %%CIPHER, p_in, p_out, p_keys, xbip, \ xcounter, xtmp1, xtmp2, xtmp3, ctr_check, tmp2, tmp3 %%_do_not_cipher_the_rest: diff --git a/lib/avx/sha1_one_block_avx.asm b/lib/avx_t1/sha1_one_block_avx.asm similarity index 100% rename from lib/avx/sha1_one_block_avx.asm rename to lib/avx_t1/sha1_one_block_avx.asm diff --git a/lib/avx/sha1_x4_avx.asm b/lib/avx_t1/sha1_x4_avx.asm similarity index 79% rename from lib/avx/sha1_x4_avx.asm rename to lib/avx_t1/sha1_x4_avx.asm index 70e6d8d0371f749fe7025853967a72390d0373ef..b67ef36019681aee17e85fc90a7af7648f3aef49 100644 --- a/lib/avx/sha1_x4_avx.asm +++ b/lib/avx_t1/sha1_x4_avx.asm @@ -75,6 +75,12 @@ mksection .text ; r0 = {d2 c2 b2 a2} ; r3 = {d3 c3 b3 a3} ; +%define XMM_STORAGE 16*10 +%define GP_STORAGE 8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + %macro TRANSPOSE 6 %define %%r0 %1 %define %%r1 %2 @@ -224,7 +230,7 @@ mksection .text ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; FRAMESZ must be an odd multiple of 8 -%define FRAMESZ 16*16 + 8 +%define FRAMESZ 16*16 + 16*10 + 8 %define VMOVPS vmovdqu @@ -288,6 +294,55 @@ mksection .text %xdefine W14 TMP_ %endm +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes +%ifndef LINUX + vmovdqa [rsp + 0*16], xmm6 + vmovdqa [rsp + 1*16], xmm7 + vmovdqa [rsp + 2*16], xmm8 + vmovdqa [rsp + 3*16], xmm9 + vmovdqa [rsp + 4*16], xmm10 + vmovdqa [rsp + 5*16], xmm11 + vmovdqa [rsp + 6*16], xmm12 + vmovdqa [rsp + 7*16], xmm13 + vmovdqa [rsp + 8*16], xmm14 + vmovdqa [rsp + 9*16], xmm15 +%endif + mov [rsp + GP_OFFSET], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 +%ifndef LINUX + vmovdqa xmm6, [rsp + 0*16] + vmovdqa xmm7, [rsp + 1*16] + vmovdqa xmm8, [rsp + 2*16] + vmovdqa xmm9, [rsp + 3*16] + vmovdqa xmm10, [rsp + 4*16] + vmovdqa xmm11, [rsp + 5*16] + vmovdqa xmm12, [rsp + 6*16] + vmovdqa xmm13, [rsp + 7*16] + vmovdqa xmm14, [rsp + 8*16] + vmovdqa xmm15, [rsp + 9*16] + +%ifdef SAFE_DATA + vpxor xmm5, xmm5, xmm5 + vmovdqa [rsp + 0*16], xmm5 + vmovdqa [rsp + 1*16], xmm5 + vmovdqa [rsp + 2*16], xmm5 + vmovdqa [rsp + 3*16], xmm5 + vmovdqa [rsp + 4*16], xmm5 + vmovdqa [rsp + 5*16], xmm5 + vmovdqa [rsp + 6*16], xmm5 + vmovdqa [rsp + 7*16], xmm5 + vmovdqa [rsp + 8*16], xmm5 + vmovdqa [rsp + 9*16], xmm5 +%endif +%endif + mov rsp, [rsp + GP_OFFSET] ;; rsp pointer +%endmacro + align 32 ; XMM registers are clobbered. Saving/restoring must be done at a higher level @@ -300,6 +355,19 @@ sha1_mult_avx: sub rsp, FRAMESZ +%ifndef LINUX + vmovdqa [rsp + 16*16 + 0*16], xmm6 + vmovdqa [rsp + 16*16 + 1*16], xmm7 + vmovdqa [rsp + 16*16 + 2*16], xmm8 + vmovdqa [rsp + 16*16 + 3*16], xmm9 + vmovdqa [rsp + 16*16 + 4*16], xmm10 + vmovdqa [rsp + 16*16 + 5*16], xmm11 + vmovdqa [rsp + 16*16 + 6*16], xmm12 + vmovdqa [rsp + 16*16 + 7*16], xmm13 + vmovdqa [rsp + 16*16 + 8*16], xmm14 + vmovdqa [rsp + 16*16 + 9*16], xmm15 +%endif + ;; Initialize digests vmovdqa A, [arg1 + 0*SHA1_DIGEST_ROW_SIZE] vmovdqa B, [arg1 + 1*SHA1_DIGEST_ROW_SIZE] @@ -426,8 +494,36 @@ lloop: %endrep %endif - add rsp, FRAMESZ +%ifndef LINUX + vmovdqa xmm6, [rsp + 16*16 + 0*16] + vmovdqa xmm7, [rsp + 16*16 + 1*16] + vmovdqa xmm8, [rsp + 16*16 + 2*16] + vmovdqa xmm9, [rsp + 16*16 + 3*16] + vmovdqa xmm10, [rsp + 16*16 + 4*16] + vmovdqa xmm11, [rsp + 16*16 + 5*16] + vmovdqa xmm12, [rsp + 16*16 + 6*16] + vmovdqa xmm13, [rsp + 16*16 + 7*16] + vmovdqa xmm14, [rsp + 16*16 + 8*16] + vmovdqa xmm15, [rsp + 16*16 + 9*16] + +%ifdef SAFE_DATA + ; xmm0 already 0 +%assign i 0 +%rep 10 + vmovdqa [rsp + 16*16 + i*16], xmm0 +%assign i (i+1) +%endrep +%endif +%endif + add rsp, FRAMESZ + ret +; void call_sha1_mult_avx_from_c(SHA1_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha1_mult_avx_from_c,function,internal) +call_sha1_mult_avx_from_c: + FUNC_SAVE + call sha1_mult_avx + FUNC_RESTORE ret mksection stack-noexec diff --git a/lib/avx/sha224_one_block_avx.asm b/lib/avx_t1/sha224_one_block_avx.asm similarity index 97% rename from lib/avx/sha224_one_block_avx.asm rename to lib/avx_t1/sha224_one_block_avx.asm index 19213a7014da7ded6fb3c400d0fe2f3bd2001949..f80b557989328cdf97b024dbd58b0f3a471eb422 100644 --- a/lib/avx/sha224_one_block_avx.asm +++ b/lib/avx_t1/sha224_one_block_avx.asm @@ -30,4 +30,4 @@ %define FUNC sha224_block_avx -%include "avx/sha256_one_block_avx.asm" +%include "avx_t1/sha256_one_block_avx.asm" diff --git a/lib/avx/sha256_mult_avx.asm b/lib/avx_t1/sha256_mult_avx.asm similarity index 83% rename from lib/avx/sha256_mult_avx.asm rename to lib/avx_t1/sha256_mult_avx.asm index b6094fcbc136a0ee8693642acfff7ea7d552b12a..598d4cdae518946fd6668e7fc6bfe461b8d11542 100644 --- a/lib/avx/sha256_mult_avx.asm +++ b/lib/avx_t1/sha256_mult_avx.asm @@ -252,6 +252,72 @@ PSHUFFLE_BYTE_FLIP_MASK: ;ddq 0x0c0d0e0f08090a0b0405060700010203 mksection .text +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + vmovdqa [rsp + 3*16], xmm6 + vmovdqa [rsp + 4*16], xmm7 + vmovdqa [rsp + 5*16], xmm8 + vmovdqa [rsp + 6*16], xmm9 + vmovdqa [rsp + 7*16], xmm10 + vmovdqa [rsp + 8*16], xmm11 + vmovdqa [rsp + 9*16], xmm12 + vmovdqa [rsp + 10*16], xmm13 + vmovdqa [rsp + 11*16], xmm14 + vmovdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + vmovdqa xmm6, [rsp + 3*16] + vmovdqa xmm7, [rsp + 4*16] + vmovdqa xmm8, [rsp + 5*16] + vmovdqa xmm9, [rsp + 6*16] + vmovdqa xmm10, [rsp + 7*16] + vmovdqa xmm11, [rsp + 8*16] + vmovdqa xmm12, [rsp + 9*16] + vmovdqa xmm13, [rsp + 10*16] + vmovdqa xmm14, [rsp + 11*16] + vmovdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + vpxor xmm5, xmm5, xmm5 + vmovdqa xmm5, [rsp + 3*16] + vmovdqa xmm5, [rsp + 4*16] + vmovdqa xmm5, [rsp + 5*16] + vmovdqa xmm5, [rsp + 6*16] + vmovdqa xmm5, [rsp + 7*16] + vmovdqa xmm5, [rsp + 8*16] + vmovdqa xmm5, [rsp + 9*16] + vmovdqa xmm5, [rsp + 10*16] + vmovdqa xmm5, [rsp + 11*16] + vmovdqa xmm5, [rsp + 12*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + ;; SHA256_ARGS: ;; UINT128 digest[8]; // transposed digests ;; UINT8 *data_ptr[4]; @@ -384,4 +450,12 @@ Lrounds_16_xx: ; outer calling routine restores XMM and other GP registers ret +; void call_sha_256_mult_avx_from_c(SHA256_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha_256_mult_avx_from_c,function,internal) +call_sha_256_mult_avx_from_c: + FUNC_SAVE + call sha_256_mult_avx + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/avx/sha256_one_block_avx.asm b/lib/avx_t1/sha256_one_block_avx.asm similarity index 99% rename from lib/avx/sha256_one_block_avx.asm rename to lib/avx_t1/sha256_one_block_avx.asm index a1ad45005426f11aa2cd89f1c6297d808aec2240..c50cd0a4b1777f4ea16c93c5789d837048e1165d 100644 --- a/lib/avx/sha256_one_block_avx.asm +++ b/lib/avx_t1/sha256_one_block_avx.asm @@ -32,6 +32,9 @@ mksection .rodata default rel + +%ifndef FUNC +MKGLOBAL(K256,data,internal) align 64 K256: dd 0x428a2f98,0x71374491,0xb5c0fbcf,0xe9b5dba5 @@ -50,7 +53,11 @@ K256: dd 0x391c0cb3,0x4ed8aa4a,0x5b9cca4f,0x682e6ff3 dd 0x748f82ee,0x78a5636f,0x84c87814,0x8cc70208 dd 0x90befffa,0xa4506ceb,0xbef9a3f7,0xc67178f2 +%else +extern K256 +%endif +align 16 PSHUFFLE_BYTE_FLIP_MASK: ;ddq 0x0c0d0e0f08090a0b0405060700010203 dq 0x0405060700010203, 0x0c0d0e0f08090a0b diff --git a/lib/avx/sha384_one_block_avx.asm b/lib/avx_t1/sha384_one_block_avx.asm similarity index 97% rename from lib/avx/sha384_one_block_avx.asm rename to lib/avx_t1/sha384_one_block_avx.asm index b023030878a19c41203b394e0950fdcddef9d022..fc9bf7359aeba31a486d764cdb55570b43e7f9d6 100644 --- a/lib/avx/sha384_one_block_avx.asm +++ b/lib/avx_t1/sha384_one_block_avx.asm @@ -30,4 +30,4 @@ %define FUNC sha384_block_avx -%include "avx/sha512_one_block_avx.asm" +%include "avx_t1/sha512_one_block_avx.asm" diff --git a/lib/avx/sha512_one_block_avx.asm b/lib/avx_t1/sha512_one_block_avx.asm similarity index 100% rename from lib/avx/sha512_one_block_avx.asm rename to lib/avx_t1/sha512_one_block_avx.asm diff --git a/lib/avx/sha512_x2_avx.asm b/lib/avx_t1/sha512_x2_avx.asm similarity index 82% rename from lib/avx/sha512_x2_avx.asm rename to lib/avx_t1/sha512_x2_avx.asm index 34503e882d9a96cb470c0aaf821d70b91b54de40..865008db39b018a5133018151ef4fb032da7a329 100644 --- a/lib/avx/sha512_x2_avx.asm +++ b/lib/avx_t1/sha512_x2_avx.asm @@ -241,6 +241,72 @@ endstruc %endm +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + vmovdqa [rsp + 3*16], xmm6 + vmovdqa [rsp + 4*16], xmm7 + vmovdqa [rsp + 5*16], xmm8 + vmovdqa [rsp + 6*16], xmm9 + vmovdqa [rsp + 7*16], xmm10 + vmovdqa [rsp + 8*16], xmm11 + vmovdqa [rsp + 9*16], xmm12 + vmovdqa [rsp + 10*16], xmm13 + vmovdqa [rsp + 11*16], xmm14 + vmovdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + vmovdqa xmm6, [rsp + 3*16] + vmovdqa xmm7, [rsp + 4*16] + vmovdqa xmm8, [rsp + 5*16] + vmovdqa xmm9, [rsp + 6*16] + vmovdqa xmm10, [rsp + 7*16] + vmovdqa xmm11, [rsp + 8*16] + vmovdqa xmm12, [rsp + 9*16] + vmovdqa xmm13, [rsp + 10*16] + vmovdqa xmm14, [rsp + 11*16] + vmovdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + vpxor xmm5, xmm5, xmm5 + vmovdqa xmm5, [rsp + 3*16] + vmovdqa xmm5, [rsp + 4*16] + vmovdqa xmm5, [rsp + 5*16] + vmovdqa xmm5, [rsp + 6*16] + vmovdqa xmm5, [rsp + 7*16] + vmovdqa xmm5, [rsp + 8*16] + vmovdqa xmm5, [rsp + 9*16] + vmovdqa xmm5, [rsp + 10*16] + vmovdqa xmm5, [rsp + 11*16] + vmovdqa xmm5, [rsp + 12*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + ;; SHA512_ARGS: ;; UINT128 digest[8]; // transposed digests ;; UINT8 *data_ptr[2]; @@ -369,4 +435,12 @@ Lrounds_16_xx: ; outer calling routine restores XMM and other GP registers ret +; void call_sha512_x2_avx_from_c(SHA512_ARGS *args, UINT64 size_in_blocks); +MKGLOBAL(call_sha512_x2_avx_from_c,function,internal) +call_sha512_x2_avx_from_c: + FUNC_SAVE + call sha512_x2_avx + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/avx/sha_avx.c b/lib/avx_t1/sha_avx.c similarity index 100% rename from lib/avx/sha_avx.c rename to lib/avx_t1/sha_avx.c diff --git a/lib/avx_t1/sha_mb_avx.c b/lib/avx_t1/sha_mb_avx.c new file mode 100644 index 0000000000000000000000000000000000000000..b8026e7da11b0fd27e0075e2d862142cce48858b --- /dev/null +++ b/lib/avx_t1/sha_mb_avx.c @@ -0,0 +1,140 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "include/sha_mb_mgr.h" +#include "include/arch_avx_type1.h" + +IMB_JOB *submit_job_sha384_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_sha384_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); + +IMB_JOB *submit_job_sha512_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_sha512_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job); + +/* ========================================================================== */ +/* + * SHA1 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha1_avx(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 4, 1, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_mult_avx_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha1_avx(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 4, 0, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_mult_avx_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA224 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha224_avx(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 1, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha_256_mult_avx_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha224_avx(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 0, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha_256_mult_avx_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA256 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha256_avx(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 1, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha_256_mult_avx_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha256_avx(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 0, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha_256_mult_avx_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA384 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha384_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 1, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x2_avx_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha384_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 0, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x2_avx_from_c); +} + +/* ========================================================================== */ +/* + * SHA512 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha512_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 1, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x2_avx_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha512_avx(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 0, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x2_avx_from_c); +} diff --git a/lib/avx/snow3g_avx.c b/lib/avx_t1/snow3g_avx.c similarity index 100% rename from lib/avx/snow3g_avx.c rename to lib/avx_t1/snow3g_avx.c diff --git a/lib/avx/snow3g_uia2_by4_avx.asm b/lib/avx_t1/snow3g_uia2_by4_avx.asm similarity index 100% rename from lib/avx/snow3g_uia2_by4_avx.asm rename to lib/avx_t1/snow3g_uia2_by4_avx.asm diff --git a/lib/avx/snow_v_avx.asm b/lib/avx_t1/snow_v_avx.asm similarity index 100% rename from lib/avx/snow_v_avx.asm rename to lib/avx_t1/snow_v_avx.asm diff --git a/lib/avx/zuc_top_avx.c b/lib/avx_t1/zuc_top_avx.c old mode 100755 new mode 100644 similarity index 95% rename from lib/avx/zuc_top_avx.c rename to lib/avx_t1/zuc_top_avx.c index b6a591a9a00eb5ea1b09cafadb4a85e85e761696..5694a1089eaa1cbb33b27a5df5588763c1b35df7 --- a/lib/avx/zuc_top_avx.c +++ b/lib/avx_t1/zuc_top_avx.c @@ -528,6 +528,8 @@ void _zuc_eia3_1_buffer_avx(const void *pKey, uint32_t T = 0; const uint8_t *pIn8 = (const uint8_t *) pBufferIn; + memset(keyStream, 0, sizeof(keyStream)); + asm_ZucInitialization_avx(pKey, pIv, &(zucState)); asm_ZucGenKeystream16B_avx(pZuc, &zucState); @@ -540,7 +542,7 @@ void _zuc_eia3_1_buffer_avx(const void *pKey, asm_ZucGenKeystream8B_avx(&keyStream[4], &zucState); else asm_ZucGenKeystream16B_avx(&keyStream[4], &zucState); - asm_Eia3Round16B_avx(&T, keyStream, pIn8); + asm_Eia3Round16B_avx(&T, keyStream, pIn8, 4); pIn8 = &pIn8[KEYSTR_ROUND_LEN]; } @@ -550,7 +552,7 @@ void _zuc_eia3_1_buffer_avx(const void *pKey, */ if (remainingBits > (2 * 32)) asm_ZucGenKeystream8B_avx(&keyStream[4], &zucState); - asm_Eia3Remainder_avx(&T, &keyStream[0], pIn8, remainingBits); + asm_Eia3Remainder_avx(&T, &keyStream[0], pIn8, remainingBits, 128, 4); *pMacI = T; @@ -584,7 +586,9 @@ void _zuc_eia3_4_buffer_avx(const void * const pKey[NUM_AVX_BUFS], DECLARE_ALIGNED(uint32_t *pKeyStrArr[NUM_AVX_BUFS], 16) = {NULL}; unsigned int allCommonBits; + memset(keyStr, 0, sizeof(keyStr)); memset(T, 0, sizeof(T)); + /* Check if all lengths are equal */ if ((lengthInBits[0] == lengthInBits[1]) && (lengthInBits[0] == lengthInBits[2]) && @@ -628,16 +632,13 @@ void _zuc_eia3_4_buffer_avx(const void * const pKey[NUM_AVX_BUFS], else asm_ZucGenKeystream16B_4_avx(&state, pKeyStrArr); for (i = 0; i < NUM_AVX_BUFS; i++) { - asm_Eia3Round16B_avx(&T[i], keyStr[i], pIn8[i]); + asm_Eia3Round16B_avx(&T[i], keyStr[i], pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } } /* Process each packet separately for the remaining bits */ for (i = 0; i < NUM_AVX_BUFS; i++) { - const uint32_t N = lengthInBits[i] + (2 * ZUC_WORD_BITS); - uint32_t L = ((N + 31) / ZUC_WORD_BITS) - - numKeyStr*(keyStreamLengthInBits / 32); uint32_t remainBits = lengthInBits[i] - numKeyStr*keyStreamLengthInBits; uint32_t *keyStr32 = (uint32_t *) keyStr[i]; @@ -669,7 +670,6 @@ void _zuc_eia3_4_buffer_avx(const void * const pKey[NUM_AVX_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - L -= (keyStreamLengthInBits / 32); /* Generate the next key stream 8 bytes or 16 bytes */ if (!remainBits) @@ -678,7 +678,7 @@ void _zuc_eia3_4_buffer_avx(const void * const pKey[NUM_AVX_BUFS], else asm_ZucGenKeystream16B_avx(&keyStr32[4], &singlePktState); - asm_Eia3Round16B_avx(&T[i], keyStr32, pIn8[i]); + asm_Eia3Round16B_avx(&T[i], keyStr32, pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } @@ -691,7 +691,8 @@ void _zuc_eia3_4_buffer_avx(const void * const pKey[NUM_AVX_BUFS], asm_ZucGenKeystream8B_avx(&keyStr32[4], &singlePktState); - asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits); + asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits, + 128, 4); /* save the final MAC-I result */ *(pMacI[i]) = T[i]; } @@ -778,6 +779,8 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], DECLARE_ALIGNED(uint32_t *pKeyStrArr[NUM_AVX_BUFS], 16) = {NULL}; unsigned int allCommonBits; + memset(keyStr, 0, sizeof(keyStr)); + /* Check if all lengths are equal */ if ((lengthInBits[0] == lengthInBits[1]) && (lengthInBits[0] == lengthInBits[2]) && @@ -822,7 +825,7 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], for (i = 0; i < NUM_AVX_BUFS; i++) { if (job_in_lane[i] == NULL) continue; - asm_Eia3Round16B_avx(&T[i], keyStr[i], pIn8[i]); + asm_Eia3Round16B_avx(&T[i], keyStr[i], pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } } @@ -832,9 +835,6 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], if (job_in_lane[i] == NULL) continue; - const uint32_t N = lengthInBits[i] + (2 * ZUC_WORD_BITS); - uint32_t L = ((N + 31) / ZUC_WORD_BITS) - - numKeyStr*(keyStreamLengthInBits / 32); uint32_t remainBits = lengthInBits[i] - numKeyStr*keyStreamLengthInBits; uint32_t *keyStr32 = (uint32_t *) keyStr[i]; @@ -866,7 +866,6 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - L -= (keyStreamLengthInBits / 32); /* Generate the next key stream 8 bytes or 16 bytes */ if (!remainBits) @@ -875,7 +874,7 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], else asm_ZucGenKeystream16B_avx(&keyStr32[4], &singlePktState); - asm_Eia3Round16B_avx(&T[i], keyStr32, pIn8[i]); + asm_Eia3Round16B_avx(&T[i], keyStr32, pIn8[i], 4); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } @@ -887,7 +886,8 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], asm_ZucGenKeystream8B_avx(&keyStr32[4], &singlePktState); - asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits); + asm_Eia3Remainder_avx(&T[i], keyStr32, pIn8[i], remainBits, + 128, 4); /* save the final MAC-I result */ *(pMacI[i]) = T[i]; } @@ -904,9 +904,10 @@ void zuc_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], void zuc256_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], const uint8_t *ivs, const void * const pBufferIn[NUM_AVX_BUFS], - uint32_t *pMacI[NUM_AVX_BUFS], + void *pMacI[NUM_AVX_BUFS], const uint16_t lengthInBits[NUM_AVX_BUFS], - const void * const job_in_lane[NUM_AVX_BUFS]) + const void * const job_in_lane[NUM_AVX_BUFS], + const uint64_t tag_size) { unsigned int i; DECLARE_ALIGNED(ZucState4_t state, 64); @@ -917,11 +918,13 @@ void zuc256_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], const uint8_t *pIn8[NUM_AVX_BUFS] = {NULL}; uint32_t remainCommonBits; uint32_t numKeyStr = 0; - uint8_t T[NUM_AVX_BUFS*4] = {0}; + DECLARE_ALIGNED(uint8_t T[NUM_AVX_BUFS*16], 16) = {0}; const uint32_t keyStreamLengthInBits = KEYSTR_ROUND_LEN * 8; DECLARE_ALIGNED(uint32_t *pKeyStrArr[NUM_AVX_BUFS], 16) = {NULL}; unsigned int allCommonBits; + memset(keyStr, 0, sizeof(keyStr)); + /* Check if all lengths are equal */ if ((lengthInBits[0] == lengthInBits[1]) && (lengthInBits[0] == lengthInBits[2]) && @@ -945,14 +948,7 @@ void zuc256_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], keys.pKeys[i] = pKey[i]; } - /* TODO: Handle 8 and 16-byte digest cases */ - asm_Zuc256Initialization_4_avx(&keys, ivs, &state, 4); - - asm_ZucGenKeystream4B_4_avx(&state, pKeyStrArr); - - /* Initialize the tag with the first 4 bytes of the keystream */ - for (i = 0; i < NUM_AVX_BUFS; i++) - memcpy(&T[i], pKeyStrArr[i], 4); + asm_Zuc256Initialization_4_avx(&keys, ivs, &state, T, tag_size); /* Generate 16 bytes at a time */ asm_ZucGenKeystream16B_4_avx(&state, pKeyStrArr); @@ -965,25 +961,31 @@ void zuc256_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], while (remainCommonBits >= keyStreamLengthInBits) { remainCommonBits -= keyStreamLengthInBits; numKeyStr++; - /* Generate the next key stream 4 bytes or 16 bytes */ - if (!remainCommonBits && allCommonBits) - asm_ZucGenKeystream4B_4_avx(&state, pKeyStrArr); - else + /* Generate the next key stream 4/8 bytes or 16 bytes */ + if (!remainCommonBits && allCommonBits) { + if (tag_size == 4) + asm_ZucGenKeystream4B_4_avx(&state, pKeyStrArr); + else if (tag_size == 8) + asm_ZucGenKeystream8B_4_avx(&state, pKeyStrArr); + else + asm_ZucGenKeystream16B_4_avx(&state, + pKeyStrArr); + } else asm_ZucGenKeystream16B_4_avx(&state, pKeyStrArr); for (i = 0; i < NUM_AVX_BUFS; i++) { - uint32_t *tag = (uint32_t *) &T[i*4]; + void *tag = (void *) &T[i*tag_size]; if (job_in_lane[i] == NULL) continue; - asm_Eia3Round16B_avx(tag, keyStr[i], pIn8[i]); + asm_Eia3Round16B_avx(tag, keyStr[i], pIn8[i], tag_size); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } } /* Process each packet separately for the remaining bits */ for (i = 0; i < NUM_AVX_BUFS; i++) { - uint32_t *tag = (uint32_t *) &T[i*4]; + void *tag = (void *) &T[i*tag_size]; if (job_in_lane[i] == NULL) continue; @@ -991,11 +993,15 @@ void zuc256_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], uint32_t remainBits = lengthInBits[i] - numKeyStr*keyStreamLengthInBits; uint32_t *keyStr32 = (uint32_t *) keyStr[i]; + const uint32_t N = remainBits + ((uint32_t) tag_size << 3); + uint32_t L = ((N + 31) / ZUC_WORD_BITS); - /* If remaining bits are more than 4 bytes, we need to generate - * at least 4B more of keystream, so we need to copy - * the zuc state to single packet state first */ - if (remainBits > 32) { + /* 4 KS words are generated already */ + L = (L > 4) ? (L - 4) : 0; + + /* Copy the ZUC state to single packet state, + * if more KS is needed */ + if (L > 0) { singlePktState.lfsrState[0] = state.lfsrState[0][i]; singlePktState.lfsrState[1] = state.lfsrState[1][i]; singlePktState.lfsrState[2] = state.lfsrState[2][i]; @@ -1020,28 +1026,29 @@ void zuc256_eia3_4_buffer_job_avx(const void * const pKey[NUM_AVX_BUFS], while (remainBits >= keyStreamLengthInBits) { remainBits -= keyStreamLengthInBits; - /* Generate the next key stream 4 bytes or 16 bytes */ - if (!remainBits) - asm_ZucGenKeystream_avx(&keyStr32[4], - &singlePktState, 1); - else + /* Generate the next key stream (16 bytes max) */ + if (L > 3) { asm_ZucGenKeystream16B_avx(&keyStr32[4], &singlePktState); - asm_Eia3Round16B_avx(tag, keyStr32, pIn8[i]); + L -= 4; + } else { + asm_ZucGenKeystream_avx(&keyStr32[4], + &singlePktState, L); + L = 0; + } + asm_Eia3Round16B_avx(tag, keyStr32, pIn8[i], tag_size); pIn8[i] = &pIn8[i][KEYSTR_ROUND_LEN]; } - /* - * If remaining bits has more than 1 ZUC WORD (double words), - * keystream needs to have another ZUC WORD (4B) - */ - if (remainBits > (32)) + /* Generate final keystream if needed */ + if (L > 0) asm_ZucGenKeystream_avx(&keyStr32[4], - &singlePktState, 1); + &singlePktState, L); - asm_Eia3Remainder_avx(tag, keyStr32, pIn8[i], remainBits); + asm_Eia3Remainder_avx(tag, keyStr32, pIn8[i], remainBits, + 256, tag_size); /* save the final MAC-I result */ - memcpy(pMacI[i], tag, 4); + memcpy(pMacI[i], tag, tag_size); } #ifdef SAFE_DATA diff --git a/lib/avx_t1/zuc_x4_avx.asm b/lib/avx_t1/zuc_x4_avx.asm new file mode 100644 index 0000000000000000000000000000000000000000..574e54ead004e5e2b08cf2169b366453c8939424 --- /dev/null +++ b/lib/avx_t1/zuc_x4_avx.asm @@ -0,0 +1,2061 @@ +;; +;; Copyright (c) 2009-2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%include "include/os.asm" +%include "include/reg_sizes.asm" +%include "include/zuc_sbox.inc" +%include "include/memcpy.asm" +%include "include/mb_mgr_datastruct.asm" +%include "include/cet.inc" +%include "include/const.inc" + +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%define arg4 rcx +%define arg5 r8 +%define arg6 r9 +%else +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%define arg4 r9 +%define arg5 qword [rsp + 40] +%define arg6 qword [rsp + 48] +%endif + +%define APPEND(a,b) a %+ b + +mksection .rodata +default rel + +align 16 +Ek_d: +dd 0x0044D700, 0x0026BC00, 0x00626B00, 0x00135E00, +dd 0x00578900, 0x0035E200, 0x00713500, 0x0009AF00 +dd 0x004D7800, 0x002F1300, 0x006BC400, 0x001AF100, +dd 0x005E2600, 0x003C4D00, 0x00789A00, 0x0047AC00 + +; Constants to be used to initialize the LFSR registers +; The tables contain four different sets of constants: +; 0-63 bytes: Encryption +; 64-127 bytes: Authentication with tag size = 4 +; 128-191 bytes: Authentication with tag size = 8 +; 192-255 bytes: Authentication with tag size = 16 +align 16 +EK256_d64: +dd 0x00220000, 0x002F0000, 0x00240000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +EK256_EIA3_4: +dd 0x00220000, 0x002F0000, 0x00250000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +EK256_EIA3_8: +dd 0x00230000, 0x002F0000, 0x00240000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +EK256_EIA3_16: +dd 0x00230000, 0x002F0000, 0x00250000, 0x002A0000, +dd 0x006D0000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00400000, 0x00400000, 0x00400000, +dd 0x00400000, 0x00520000, 0x00100000, 0x00300000 + +align 16 +shuf_mask_key: +dd 0x00FFFFFF, 0x01FFFFFF, 0x02FFFFFF, 0x03FFFFFF, +dd 0x04FFFFFF, 0x05FFFFFF, 0x06FFFFFF, 0x07FFFFFF, +dd 0x08FFFFFF, 0x09FFFFFF, 0x0AFFFFFF, 0x0BFFFFFF, +dd 0x0CFFFFFF, 0x0DFFFFFF, 0x0EFFFFFF, 0x0FFFFFFF, + +align 16 +shuf_mask_iv: +dd 0xFFFFFF00, 0xFFFFFF01, 0xFFFFFF02, 0xFFFFFF03, +dd 0xFFFFFF04, 0xFFFFFF05, 0xFFFFFF06, 0xFFFFFF07, +dd 0xFFFFFF08, 0xFFFFFF09, 0xFFFFFF0A, 0xFFFFFF0B, +dd 0xFFFFFF0C, 0xFFFFFF0D, 0xFFFFFF0E, 0xFFFFFF0F, + +align 16 +shuf_mask_iv_17_19: +db 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF + +align 16 +clear_iv_mask: +db 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00, 0x00, 0x00, 0x3F, 0x00 + +align 16 +shuf_mask_iv_20_23: +db 0xFF, 0xFF, 0x00, 0xFF, 0xFF, 0xFF, 0x01, 0xFF, 0xFF, 0xFF, 0x02, 0xFF, 0xFF, 0xFF, 0x03, 0xFF + +align 16 +mask31: +dd 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF + +align 16 +bit_reverse_table_l: +db 0x00, 0x08, 0x04, 0x0c, 0x02, 0x0a, 0x06, 0x0e, 0x01, 0x09, 0x05, 0x0d, 0x03, 0x0b, 0x07, 0x0f + +align 16 +bit_reverse_table_h: +db 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0 + +align 16 +bit_reverse_and_table: +db 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f, 0x0f + +align 16 +swap_mask: +db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 +db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c + +align 16 +S0_S1_shuf: +db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E + +align 16 +rev_S0_S1_shuf: +db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 + +align 16 +rot8_mod32: +db 0x03, 0x00, 0x01, 0x02, 0x07, 0x04, 0x05, 0x06, +db 0x0B, 0x08, 0x09, 0x0A, 0x0F, 0x0C, 0x0D, 0x0E + +align 16 +rot16_mod32: +db 0x02, 0x03, 0x00, 0x01, 0x06, 0x07, 0x04, 0x05, +db 0x0A, 0x0B, 0x08, 0x09, 0x0E, 0x0F, 0x0C, 0x0D + +align 16 +rot24_mod32: +db 0x01, 0x02, 0x03, 0x00, 0x05, 0x06, 0x07, 0x04, +db 0x09, 0x0A, 0x0B, 0x08, 0x0D, 0x0E, 0x0F, 0x0C + +align 16 +broadcast_word: +db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 +db 0x00, 0x01, 0x00, 0x01, 0x00, 0x01, 0x00, 0x01 + +align 16 +all_threes: +dw 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003, 0x0003 + +align 16 +all_fffcs: +dw 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc, 0xfffc + +align 16 +all_0fs: +dw 0x000f, 0x000f, 0x000f, 0x000f, 0x000f, 0x000f, 0x000f, 0x000f + +align 16 +all_10s: +dw 0x0010, 0x0010, 0x0010, 0x0010, 0x0010, 0x0010, 0x0010, 0x0010 + +align 16 +bit_mask_table: +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x80 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc0 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe0 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf0 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xf8 +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfc +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe + +align 16 +shuf_mask_dw0_0_dw1_0: +db 0x00, 0x01, 0x02, 0x03, 0xff, 0xff, 0xff, 0xff +db 0x04, 0x05, 0x06, 0x07, 0xff, 0xff, 0xff, 0xff + +align 16 +shuf_mask_dw2_0_dw3_0: +db 0x08, 0x09, 0x0a, 0x0b, 0xff, 0xff, 0xff, 0xff +db 0x0c, 0x0d, 0x0e, 0x0f, 0xff, 0xff, 0xff, 0xff + +align 16 +bits_32_63: +dd 0x00000000, 0xffffffff, 0x00000000, 0x00000000 + +align 16 +shuf_mask_0_0_dw1_0: +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff +db 0x04, 0x05, 0x06, 0x07, 0xff, 0xff, 0xff, 0xff + +align 16 +shuf_mask_0_0_0_dw1: +db 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff +db 0xff, 0xff, 0xff, 0xff, 0x04, 0x05, 0x06, 0x07 + +; Stack frame for ZucCipher function +struc STACK +_rsp_save: resq 1 ; Space for rsp pointer +_gpr_save: resq 2 ; Space for GP registers +_rem_bytes_save resq 1 ; Space for number of remaining bytes +endstruc + +mksection .text +align 64 + +%define OFS_R1 (16*16) +%define OFS_R2 (OFS_R1 + 16) +%define OFS_X0 (OFS_R2 + 16) +%define OFS_X1 (OFS_X0 + 16) +%define OFS_X2 (OFS_X1 + 16) + +%ifidn __OUTPUT_FORMAT__, win64 + %define XMM_STORAGE 16*10 + %define GP_STORAGE 8*8 +%else + %define XMM_STORAGE 0 + %define GP_STORAGE 6*8 +%endif + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 + +%ifidn __OUTPUT_FORMAT__, win64 + ; xmm6:xmm15 need to be maintained for Windows + vmovdqa [rsp + 0*16], xmm6 + vmovdqa [rsp + 1*16], xmm7 + vmovdqa [rsp + 2*16], xmm8 + vmovdqa [rsp + 3*16], xmm9 + vmovdqa [rsp + 4*16], xmm10 + vmovdqa [rsp + 5*16], xmm11 + vmovdqa [rsp + 6*16], xmm12 + vmovdqa [rsp + 7*16], xmm13 + vmovdqa [rsp + 8*16], xmm14 + vmovdqa [rsp + 9*16], xmm15 + mov [rsp + GP_OFFSET + 48], rdi + mov [rsp + GP_OFFSET + 56], rsi +%endif + mov [rsp + GP_OFFSET], r12 + mov [rsp + GP_OFFSET + 8], r13 + mov [rsp + GP_OFFSET + 16], r14 + mov [rsp + GP_OFFSET + 24], r15 + mov [rsp + GP_OFFSET + 32], rbx + mov [rsp + GP_OFFSET + 40], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + +%ifidn __OUTPUT_FORMAT__, win64 + vmovdqa xmm6, [rsp + 0*16] + vmovdqa xmm7, [rsp + 1*16] + vmovdqa xmm8, [rsp + 2*16] + vmovdqa xmm9, [rsp + 3*16] + vmovdqa xmm10, [rsp + 4*16] + vmovdqa xmm11, [rsp + 5*16] + vmovdqa xmm12, [rsp + 6*16] + vmovdqa xmm13, [rsp + 7*16] + vmovdqa xmm14, [rsp + 8*16] + vmovdqa xmm15, [rsp + 9*16] + mov rdi, [rsp + GP_OFFSET + 48] + mov rsi, [rsp + GP_OFFSET + 56] +%endif + mov r12, [rsp + GP_OFFSET] + mov r13, [rsp + GP_OFFSET + 8] + mov r14, [rsp + GP_OFFSET + 16] + mov r15, [rsp + GP_OFFSET + 24] + mov rbx, [rsp + GP_OFFSET + 32] + mov rsp, [rsp + GP_OFFSET + 40] +%endmacro + +%macro TRANSPOSE4_U32 6 +%define %%r0 %1 +%define %%r1 %2 +%define %%r2 %3 +%define %%r3 %4 +%define %%t0 %5 +%define %%t1 %6 + + vshufps %%t0, %%r0, %%r1, 0x44 ; t0 = {b1 b0 a1 a0} + vshufps %%r0, %%r0, %%r1, 0xEE ; r0 = {b3 b2 a3 a2} + vshufps %%t1, %%r2, %%r3, 0x44 ; t1 = {d1 d0 c1 c0} + vshufps %%r2, %%r2, %%r3, 0xEE ; r2 = {d3 d2 c3 c2} + + vshufps %%r1, %%t0, %%t1, 0xDD ; r1 = {d1 c1 b1 a1} + vshufps %%r3, %%r0, %%r2, 0xDD ; r3 = {d3 c3 b3 a3} + vshufps %%r2, %%r0, %%r2, 0x88 ; r2 = {d2 c2 b2 a2} + vshufps %%r0, %%t0, %%t1, 0x88 ; r0 = {d0 c0 b0 a0} +%endmacro + +; +; Calculates X0-X3 from LFSR registers +; +%macro BITS_REORG4 12-13 +%define %%STATE %1 ; [in] ZUC state +%define %%ROUND_NUM %2 ; [in] Round number +%define %%LFSR_0 %3 ; [clobbered] LFSR_0 +%define %%LFSR_2 %4 ; [clobbered] LFSR_2 +%define %%LFSR_5 %5 ; [clobbered] LFSR_5 +%define %%LFSR_7 %6 ; [clobbered] LFSR_7 +%define %%LFSR_9 %7 ; [clobbered] LFSR_9 +%define %%LFSR_11 %8 ; [clobbered] LFSR_11 +%define %%LFSR_14 %9 ; [clobbered] LFSR_14 +%define %%LFSR_15 %10 ; [clobbered] LFSR_15 +%define %%XTMP1 %11 ; [clobbered] Temporary XMM register +%define %%XTMP2 %12 ; [clobbered] Temporary XMM register +%define %%X3 %13 ; [out] XMM register containing X3 of all lanes (only for work mode) + vmovdqa %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_14, [%%STATE + ((14 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_11, [%%STATE + ((11 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_9, [%%STATE + (( 9 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_7, [%%STATE + (( 7 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_5, [%%STATE + (( 5 + %%ROUND_NUM) % 16)*16] +%if (%0 == 13) ;Only needed when generating X3 (for "working" mode) + vmovdqa %%LFSR_2, [%%STATE + (( 2 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*16] +%endif + + vpxor %%XTMP1, %%XTMP1 + vpslld %%LFSR_15, 1 + vpblendw %%XTMP2, %%LFSR_14, %%XTMP1, 0xAA + vpblendw %%LFSR_15, %%LFSR_15, %%XTMP2, 0x55 + + vmovdqa [%%STATE + OFS_X0], %%LFSR_15 ; BRC_X0 + vpslld %%LFSR_11, 16 + vpsrld %%LFSR_9, 15 + vpor %%LFSR_11, %%LFSR_9 + vmovdqa [%%STATE + OFS_X1], %%LFSR_11 ; BRC_X1 + vpslld %%LFSR_7, 16 + vpsrld %%LFSR_5, 15 + vpor %%LFSR_7, %%LFSR_5 + vmovdqa [%%STATE + OFS_X2], %%LFSR_7 ; BRC_X2 +%if (%0 == 13) + vpslld %%LFSR_2, 16 + vpsrld %%LFSR_0, 15 + vpor %%X3, %%LFSR_2, %%LFSR_0 +%endif +%endmacro + +; +; Rotate dwords by N_BITS +; +%macro ROT_MOD32 4 +%define %%OUT %1 ; [out] XMM register +%define %%IN %2 ; [in] XMM register +%define %%XTMP %3 ; [clobbered] XMM register +%define %%N_BITS %4 ; [constant] Number of bits + +%if (%%N_BITS == 8) + vpshufb %%OUT, %%IN, [rel rot8_mod32] +%elif (%%N_BITS == 16) + vpshufb %%OUT, %%IN, [rel rot16_mod32] +%elif (%%N_BITS == 24) + vpshufb %%OUT, %%IN, [rel rot24_mod32] +%else + vpslld %%OUT, %%IN, %%N_BITS + vpsrld %%XTMP, %%IN, (32 - %%N_BITS) + vpor %%OUT, %%XTMP +%endif +%endmacro + +; +; Updates R1-R2, using X0-X3 and generates W (if needed) +; +%macro NONLIN_FUN4 8-9 +%define %%STATE %1 ; [in] ZUC state +%define %%XTMP1 %2 ; [clobbered] Temporary XMM register +%define %%XTMP2 %3 ; [clobbered] Temporary XMM register +%define %%XTMP3 %4 ; [clobbered] Temporary XMM register +%define %%XTMP4 %5 ; [clobbered] Temporary XMM register +%define %%XTMP5 %6 ; [clobbered] Temporary XMM register +%define %%XTMP6 %7 ; [clobbered] Temporary XMM register +%define %%XTMP7 %8 ; [clobbered] Temporary XMM register +%define %%W %9 ; [out] ZMM register to contain W for all lanes + +%if (%0 == 9) + vmovdqa %%W, [%%STATE + OFS_X0] + vpxor %%W, [%%STATE + OFS_R1] + vpaddd %%W, [%%STATE + OFS_R2] ; W = (BRC_X0 ^ F_R1) + F_R2 +%endif + + vmovdqa %%XTMP1, [%%STATE + OFS_R1] + vmovdqa %%XTMP2, [%%STATE + OFS_R2] + vpaddd %%XTMP1, [%%STATE + OFS_X1] ; W1 = F_R1 + BRC_X1 + vpxor %%XTMP2, [%%STATE + OFS_X2] ; W2 = F_R2 ^ BRC_X2 + + vpslld %%XTMP3, %%XTMP1, 16 + vpsrld %%XTMP4, %%XTMP1, 16 + vpslld %%XTMP5, %%XTMP2, 16 + vpsrld %%XTMP6, %%XTMP2, 16 + vpor %%XTMP1, %%XTMP3, %%XTMP6 + vpor %%XTMP2, %%XTMP4, %%XTMP5 + + ROT_MOD32 %%XTMP3, %%XTMP1, %%XTMP7, 2 + ROT_MOD32 %%XTMP4, %%XTMP1, %%XTMP7, 10 + ROT_MOD32 %%XTMP5, %%XTMP1, %%XTMP7, 18 + ROT_MOD32 %%XTMP6, %%XTMP1, %%XTMP7, 24 + vpxor %%XTMP1, %%XTMP3 + vpxor %%XTMP1, %%XTMP4 + vpxor %%XTMP1, %%XTMP5 + vpxor %%XTMP1, %%XTMP6 ; XMM1 = U = L1(P) + + ROT_MOD32 %%XTMP3, %%XTMP2, %%XTMP7, 8 + ROT_MOD32 %%XTMP4, %%XTMP2, %%XTMP7, 14 + ROT_MOD32 %%XTMP5, %%XTMP2, %%XTMP7, 22 + ROT_MOD32 %%XTMP6, %%XTMP2, %%XTMP7, 30 + vpxor %%XTMP2, %%XTMP3 + vpxor %%XTMP2, %%XTMP4 + vpxor %%XTMP2, %%XTMP5 + vpxor %%XTMP2, %%XTMP6 ; XMM2 = V = L2(Q) + + ; Shuffle U and V to have all S0 lookups in XMM1 and all S1 lookups in XMM2 + + ; Compress all S0 and S1 input values in each register + vpshufb %%XTMP1, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 + vpshufb %%XTMP2, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 + + vshufpd %%XTMP3, %%XTMP1, %%XTMP2, 0x0 ; All S0 input values + vshufpd %%XTMP4, %%XTMP2, %%XTMP1, 0x3 ; All S1 input values + + ; Compute S0 and S1 values + S0_comput_AVX %%XTMP3, %%XTMP1, %%XTMP2 + S1_comput_AVX %%XTMP4, %%XTMP1, %%XTMP2, %%XTMP5 + + ; Need to shuffle back %%XTMP1 & %%XTMP2 before storing output + ; (revert what was done before S0 and S1 computations) + vshufpd %%XTMP1, %%XTMP3, %%XTMP4, 0x2 + vshufpd %%XTMP2, %%XTMP3, %%XTMP4, 0x1 + + vpshufb %%XTMP1, [rel rev_S0_S1_shuf] + vpshufb %%XTMP2, [rel rev_S0_S1_shuf] + + vmovdqa [%%STATE + OFS_R1], %%XTMP1 + vmovdqa [%%STATE + OFS_R2], %%XTMP2 +%endmacro + +; +; Stores 16 bytes of keystream for 4 lanes +; +%macro STORE16B_KSTR4 8 +%define %%DATA16B_L0 %1 ; [in] 16 bytes of keystream for lane 0 +%define %%DATA16B_L1 %2 ; [in] 16 bytes of keystream for lane 1 +%define %%DATA16B_L2 %3 ; [in] 16 bytes of keystream for lane 2 +%define %%DATA16B_L3 %4 ; [in] 16 bytes of keystream for lane 3 +%define %%KS_PTR0 %5 ; [in] Pointer to keystream for lane 0 +%define %%KS_PTR1 %6 ; [in] Pointer to keystream for lane 1 +%define %%KS_PTR2 %7 ; [in] Pointer to keystream for lane 2 +%define %%KS_PTR3 %8 ; [in] Pointer to keystream for lane 3 + + vmovdqa [%%KS_PTR0], %%DATA16B_L0 + vmovdqa [%%KS_PTR1], %%DATA16B_L1 + vmovdqa [%%KS_PTR2], %%DATA16B_L2 + vmovdqa [%%KS_PTR3], %%DATA16B_L3 +%endmacro + +; +; Stores 4 bytes of keystream for 4 lanes +; +%macro STORE4B_KSTR4 6 +%define %%DATA4B_L03 %1 ; [in] 4 bytes of keystream for lanes 0-3 +%define %%KS_PTR0 %2 ; [in] Pointer to keystream for lane 0 +%define %%KS_PTR1 %3 ; [in] Pointer to keystream for lane 1 +%define %%KS_PTR2 %4 ; [in] Pointer to keystream for lane 2 +%define %%KS_PTR3 %5 ; [in] Pointer to keystream for lane 3 +%define %%OFFSET %6 ; [in] Offset into keystream + + vmovd [%%KS_PTR0 + %%OFFSET], %%DATA4B_L03 + vpextrd [%%KS_PTR1 + %%OFFSET], %%DATA4B_L03, 1 + vpextrd [%%KS_PTR2 + %%OFFSET], %%DATA4B_L03, 2 + vpextrd [%%KS_PTR3 + %%OFFSET], %%DATA4B_L03, 3 +%endmacro + +; +; Add two 32-bit args and reduce mod (2^31-1) +; +%macro ADD_MOD31 4 +%define %%IN_OUT %1 ; [in/out] XMM register with first input and output +%define %%IN2 %2 ; [in] XMM register with second input +%define %%XTMP %3 ; [clobbered] Temporary XMM register +%define %%MASK31 %4 ; [in] XMM register containing 0x7FFFFFFF's in all dwords + vpaddd %%IN_OUT, %%IN2 + vpsrld %%XTMP, %%IN_OUT, 31 + vpand %%IN_OUT, %%MASK31 + vpaddd %%IN_OUT, %%XTMP +%endmacro + +; +; Rotate (mult by pow of 2) 32-bit arg and reduce mod (2^31-1) +; +%macro ROT_MOD31 4 +%define %%IN_OUT %1 ; [in/out] XMM register with input and output +%define %%XTMP %2 ; [clobbered] Temporary XMM register +%define %%MASK31 %3 ; [in] XMM register containing 0x7FFFFFFF's in all dwords +%define %%N_BITS %4 ; [immediate] Number of bits to rotate for each dword + + vpslld %%XTMP, %%IN_OUT, %%N_BITS + vpsrld %%IN_OUT, (31 - %%N_BITS) + + vpor %%IN_OUT, %%XTMP + vpand %%IN_OUT, %%MASK31 +%endmacro + +; +; Update LFSR registers, calculating S_16 +; +; S_16 = [ 2^15*S_15 + 2^17*S_13 + 2^21*S_10 + 2^20*S_4 + (1 + 2^8)*S_0 ] mod (2^31 - 1) +; If init mode, add W to the calculation above. +; S_16 -> S_15 for next round +; +%macro LFSR_UPDT4 11 +%define %%STATE %1 ; [in] ZUC state +%define %%ROUND_NUM %2 ; [in] Round number +%define %%LFSR_0 %3 ; [clobbered] LFSR_0 (XMM) +%define %%LFSR_4 %4 ; [clobbered] LFSR_4 (XMM) +%define %%LFSR_10 %5 ; [clobbered] LFSR_10 (XMM) +%define %%LFSR_13 %6 ; [clobbered] LFSR_13 (XMM) +%define %%LFSR_15 %7 ; [clobbered] LFSR_15 (XMM) +%define %%XTMP %8 ; [clobbered] Temporary XMM register +%define %%MASK_31 %9 ; [in] Mask_31 +%define %%W %10 ; [in/clobbered] In init mode, contains W for all 4 lanes +%define %%MODE %11 ; [constant] "init" / "work" mode + + vmovdqa %%LFSR_0, [%%STATE + (( 0 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_4, [%%STATE + (( 4 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_10, [%%STATE + ((10 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_13, [%%STATE + ((13 + %%ROUND_NUM) % 16)*16] + vmovdqa %%LFSR_15, [%%STATE + ((15 + %%ROUND_NUM) % 16)*16] + + ; Calculate LFSR feedback (S_16) + + ; In Init mode, W is added to the S_16 calculation +%ifidn %%MODE, init + ADD_MOD31 %%W, %%LFSR_0, %%XTMP, %%MASK_31 +%else + vmovdqa %%W, %%LFSR_0 +%endif + ROT_MOD31 %%LFSR_0, %%XTMP, %%MASK_31, 8 + ADD_MOD31 %%W, %%LFSR_0, %%XTMP, %%MASK_31 + ROT_MOD31 %%LFSR_4, %%XTMP, %%MASK_31, 20 + ADD_MOD31 %%W, %%LFSR_4, %%XTMP, %%MASK_31 + ROT_MOD31 %%LFSR_10, %%XTMP, %%MASK_31, 21 + ADD_MOD31 %%W, %%LFSR_10, %%XTMP, %%MASK_31 + ROT_MOD31 %%LFSR_13, %%XTMP, %%MASK_31, 17 + ADD_MOD31 %%W, %%LFSR_13, %%XTMP, %%MASK_31 + ROT_MOD31 %%LFSR_15, %%XTMP, %%MASK_31, 15 + ADD_MOD31 %%W, %%LFSR_15, %%XTMP, %%MASK_31 + + ; Store LFSR_S16 + vmovdqa [%%STATE + (( 0 + %%ROUND_NUM) % 16)*16], %%W +%endmacro + +; This macro reorder the LFSR registers +; after N rounds (1 <= N <= 15), since the registers +; are shifted every round +; +; The macro clobbers XMM0-15 +; +%macro REORDER_LFSR 2 +%define %%STATE %1 ; [in] Pointer to LFSR state +%define %%NUM_ROUNDS %2 ; [immediate] Number of key generation rounds + +%if %%NUM_ROUNDS != 16 +%assign %%i 0 +%rep 16 + vmovdqa APPEND(xmm,%%i), [%%STATE + 16*%%i] +%assign %%i (%%i+1) +%endrep + +%assign %%i 0 +%assign %%j %%NUM_ROUNDS +%rep 16 + vmovdqa [%%STATE + 16*%%i], APPEND(xmm,%%j) +%assign %%i (%%i+1) +%assign %%j ((%%j+1) % 16) +%endrep +%endif ;; %%NUM_ROUNDS != 16 + +%endmacro + +; +; Initialize LFSR registers for a single lane, for ZUC-128 +; +; This macro initializes 4 LFSR registers at a time. +; so it needs to be called four times. +; +; From spec, s_i (LFSR) registers need to be loaded as follows: +; +; For 0 <= i <= 15, let s_i= k_i || d_i || iv_i. +; Where k_i is each byte of the key, d_i is a 15-bit constant +; and iv_i is each byte of the IV. +; +%macro INIT_LFSR_128 7 +%define %%KEY %1 ;; [in] XMM register containing 16-byte key +%define %%IV %2 ;; [in] XMM register containing 16-byte IV +%define %%SHUF_KEY %3 ;; [in] Shuffle key mask +%define %%SHUF_IV %4 ;; [in] Shuffle key mask +%define %%EKD_MASK %5 ;; [in] Shuffle key mask +%define %%LFSR %6 ;; [out] XMM register to contain initialized LFSR regs +%define %%XTMP %7 ;; [clobbered] XMM temporary register + + vpshufb %%LFSR, %%KEY, %%SHUF_KEY + vpsrld %%LFSR, 1 + vpshufb %%XTMP, %%IV, %%SHUF_IV + vpor %%LFSR, %%XTMP + vpor %%LFSR, %%EKD_MASK + +%endmacro + +; +; Initialize LFSR registers for a single lane, for ZUC-256 +; +%macro INIT_LFSR_256 9 +%define %%KEY %1 ;; [in] Key pointer +%define %%IV %2 ;; [in] IV pointer +%define %%LFSR0_3 %3 ;; [out] XMM register to contain initialized LFSR regs 0-3 +%define %%LFSR4_7 %4 ;; [out] XMM register to contain initialized LFSR regs 4-7 +%define %%LFSR8_11 %5 ;; [out] XMM register to contain initialized LFSR regs 8-11 +%define %%LFSR12_15 %6 ;; [out] XMM register to contain initialized LFSR regs 12-15 +%define %%XTMP %7 ;; [clobbered] XMM temporary register +%define %%TMP %8 ;; [clobbered] GP temporary register +%define %%TAG_SIZE %9 ;; [in] Tag size (0, 4, 8 or 16 bytes) + +%if %%TAG_SIZE == 0 +%define %%CONSTANTS rel EK256_d64 +%elif %%TAG_SIZE == 4 +%define %%CONSTANTS rel EK256_EIA3_4 +%elif %%TAG_SIZE == 8 +%define %%CONSTANTS rel EK256_EIA3_8 +%elif %%TAG_SIZE == 16 +%define %%CONSTANTS rel EK256_EIA3_16 +%endif + ; s0 - s3 + vpxor %%LFSR0_3, %%LFSR0_3 + vpinsrb %%LFSR0_3, [%%KEY], 3 ; s0 + vpinsrb %%LFSR0_3, [%%KEY + 1], 7 ; s1 + vpinsrb %%LFSR0_3, [%%KEY + 2], 11 ; s2 + vpinsrb %%LFSR0_3, [%%KEY + 3], 15 ; s3 + + vpsrld %%LFSR0_3, 1 + + vpor %%LFSR0_3, [%%CONSTANTS] ; s0 - s3 + + vpinsrb %%LFSR0_3, [%%KEY + 21], 1 ; s0 + vpinsrb %%LFSR0_3, [%%KEY + 16], 0 ; s0 + + vpinsrb %%LFSR0_3, [%%KEY + 22], 5 ; s1 + vpinsrb %%LFSR0_3, [%%KEY + 17], 4 ; s1 + + vpinsrb %%LFSR0_3, [%%KEY + 23], 9 ; s2 + vpinsrb %%LFSR0_3, [%%KEY + 18], 8 ; s2 + + vpinsrb %%LFSR0_3, [%%KEY + 24], 13 ; s3 + vpinsrb %%LFSR0_3, [%%KEY + 19], 12 ; s3 + + ; s4 - s7 + vpxor %%LFSR4_7, %%LFSR4_7 + vpinsrb %%LFSR4_7, [%%KEY + 4], 3 ; s4 + vpinsrb %%LFSR4_7, [%%IV], 7 ; s5 + vpinsrb %%LFSR4_7, [%%IV + 1], 11 ; s6 + vpinsrb %%LFSR4_7, [%%IV + 10], 15 ; s7 + + vpsrld %%LFSR4_7, 1 + + vpinsrb %%LFSR4_7, [%%KEY + 25], 1 ; s4 + vpinsrb %%LFSR4_7, [%%KEY + 20], 0 ; s4 + + vpinsrb %%LFSR4_7, [%%KEY + 5], 5 ; s5 + vpinsrb %%LFSR4_7, [%%KEY + 26], 4 ; s5 + + vpinsrb %%LFSR4_7, [%%KEY + 6], 9 ; s6 + vpinsrb %%LFSR4_7, [%%KEY + 27], 8 ; s6 + + vpinsrb %%LFSR4_7, [%%KEY + 7], 13 ; s7 + vpinsrb %%LFSR4_7, [%%IV + 2], 12 ; s7 + + vpor %%LFSR4_7, [%%CONSTANTS + 16] ; s4 - s7 + + vmovd %%XTMP, [%%IV + 17] + vpshufb %%XTMP, [rel shuf_mask_iv_17_19] + vpand %%XTMP, [rel clear_iv_mask] + + vpor %%LFSR4_7, %%XTMP + + ; s8 - s11 + vpxor %%LFSR8_11, %%LFSR8_11 + vpinsrb %%LFSR8_11, [%%KEY + 8], 3 ; s8 + vpinsrb %%LFSR8_11, [%%KEY + 9], 7 ; s9 + vpinsrb %%LFSR8_11, [%%IV + 5], 11 ; s10 + vpinsrb %%LFSR8_11, [%%KEY + 11], 15 ; s11 + + vpsrld %%LFSR8_11, 1 + + vpinsrb %%LFSR8_11, [%%IV + 3], 1 ; s8 + vpinsrb %%LFSR8_11, [%%IV + 11], 0 ; s8 + + vpinsrb %%LFSR8_11, [%%IV + 12], 5 ; s9 + vpinsrb %%LFSR8_11, [%%IV + 4], 4 ; s9 + + vpinsrb %%LFSR8_11, [%%KEY + 10], 9 ; s10 + vpinsrb %%LFSR8_11, [%%KEY + 28], 8 ; s10 + + vpinsrb %%LFSR8_11, [%%IV + 6], 13 ; s11 + vpinsrb %%LFSR8_11, [%%IV + 13], 12 ; s11 + + vpor %%LFSR8_11, [%%CONSTANTS + 32] ; s8 - s11 + + vmovd %%XTMP, [%%IV + 20] + vpshufb %%XTMP, [rel shuf_mask_iv_20_23] + vpand %%XTMP, [rel clear_iv_mask] + + vpor %%LFSR8_11, %%XTMP + + ; s12 - s15 + vpxor %%LFSR12_15, %%LFSR12_15 + vpinsrb %%LFSR12_15, [%%KEY + 12], 3 ; s12 + vpinsrb %%LFSR12_15, [%%KEY + 13], 7 ; s13 + vpinsrb %%LFSR12_15, [%%KEY + 14], 11 ; s14 + vpinsrb %%LFSR12_15, [%%KEY + 15], 15 ; s15 + + vpsrld %%LFSR12_15, 1 + + vpinsrb %%LFSR12_15, [%%IV + 7], 1 ; s12 + vpinsrb %%LFSR12_15, [%%IV + 14], 0 ; s12 + + vpinsrb %%LFSR12_15, [%%IV + 15], 5 ; s13 + vpinsrb %%LFSR12_15, [%%IV + 8], 4 ; s13 + + vpinsrb %%LFSR12_15, [%%IV + 16], 9 ; s14 + vpinsrb %%LFSR12_15, [%%IV + 9], 8 ; s14 + + vpinsrb %%LFSR12_15, [%%KEY + 30], 13 ; s15 + vpinsrb %%LFSR12_15, [%%KEY + 29], 12 ; s15 + + vpor %%LFSR12_15, [%%CONSTANTS + 48] ; s12 - s15 + + movzx DWORD(%%TMP), byte [%%IV + 24] + and DWORD(%%TMP), 0x0000003f + shl DWORD(%%TMP), 16 + vmovd %%XTMP, DWORD(%%TMP) + + movzx DWORD(%%TMP), byte [%%KEY + 31] + shl DWORD(%%TMP), 12 + and DWORD(%%TMP), 0x000f0000 ; high nibble of K_31 + vpinsrd %%XTMP, DWORD(%%TMP), 2 + + movzx DWORD(%%TMP), byte [%%KEY + 31] + shl DWORD(%%TMP), 16 + and DWORD(%%TMP), 0x000f0000 ; low nibble of K_31 + vpinsrd %%XTMP, DWORD(%%TMP), 3 + + vpor %%LFSR12_15, %%XTMP +%endmacro + +%macro ZUC_INIT_4 2-3 +%define %%KEY_SIZE %1 ; [constant] Key size (128 or 256) +%define %%TAG_SIZE %2 ; [in] Tag size (0 (for cipher), 4, 8 or 16) +%define %%TAGS %3 ; [in] Array of temporary tags + +%define pKe arg1 +%define pIv arg2 +%define pState arg3 + +%define %%XTMP1 xmm0 +%define %%XTMP2 xmm1 +%define %%XTMP3 xmm2 +%define %%XTMP4 xmm3 +%define %%XTMP5 xmm4 +%define %%XTMP6 xmm5 +%define %%XTMP7 xmm6 +%define %%XTMP8 xmm7 +%define %%XTMP9 xmm8 +%define %%XTMP10 xmm9 +%define %%XTMP11 xmm10 +%define %%XTMP12 xmm11 +%define %%XTMP13 xmm12 +%define %%XTMP14 xmm13 +%define %%XTMP15 xmm14 +%define %%XTMP16 xmm15 + +%define %%W %%XTMP10 +%define %%KSTR1 %%XTMP12 +%define %%KSTR2 %%XTMP13 +%define %%KSTR3 %%XTMP14 +%define %%KSTR4 %%XTMP15 +%define %%MASK_31 %%XTMP16 + + FUNC_SAVE + + ; Zero out R1-R2 + vpxor %%XTMP1, %%XTMP1 + vmovdqa [pState + OFS_R1], %%XTMP1 + vmovdqa [pState + OFS_R2], %%XTMP1 + +%if %%KEY_SIZE == 128 + + ;; Load key and IVs +%assign %%OFF 0 +%assign %%I 1 +%assign %%J 5 +%rep 4 + mov r15, [pKe + %%OFF] + vmovdqu APPEND(%%XTMP, %%I), [r15] + ; Read 16 bytes of IV + vmovdqa APPEND(%%XTMP, %%J), [pIv + %%OFF*4] +%assign %%OFF (%%OFF + 8) +%assign %%I (%%I + 1) +%assign %%J (%%J + 1) +%endrep + + ;;; Initialize all LFSR registers in four steps: + ;;; first, registers 0-3, then registers 4-7, 8-11, 12-15 +%assign %%OFF 0 +%rep 4 + ; Set read-only registers for shuffle masks for key, IV and Ek_d for 8 registers + vmovdqa %%XTMP13, [rel shuf_mask_key + %%OFF] + vmovdqa %%XTMP14, [rel shuf_mask_iv + %%OFF] + vmovdqa %%XTMP15, [rel Ek_d + %%OFF] + + ; Set 4xLFSR registers for all packets +%assign %%IDX 9 +%assign %%I 1 +%assign %%J 5 +%rep 4 + INIT_LFSR_128 APPEND(%%XTMP,%%I), APPEND(%%XTMP,%%J), %%XTMP13, %%XTMP14, \ + %%XTMP15, APPEND(%%XTMP, %%IDX), %%XTMP16 +%assign %%IDX (%%IDX + 1) +%assign %%I (%%I + 1) +%assign %%J (%%J + 1) +%endrep + + ; Store 4xLFSR registers in memory (reordering first, + ; so all SX registers are together) + TRANSPOSE4_U32 %%XTMP9, %%XTMP10, %%XTMP11, %%XTMP12, %%XTMP13, %%XTMP14 + + vmovdqa [pState + 4*%%OFF], %%XTMP9 + vmovdqa [pState + 4*%%OFF + 16], %%XTMP10 + vmovdqa [pState + 4*%%OFF + 16*2], %%XTMP11 + vmovdqa [pState + 4*%%OFF + 16*3], %%XTMP12 + +%assign %%OFF (%%OFF + 16) +%endrep + +%else ;; %%KEY_SIZE == 256 + + ;;; Initialize all LFSR registers +%assign %%OFF 0 +%rep 4 + ;; Load key and IV for each packet + mov r15, [pKe + %%OFF] + lea r10, [pIv + %%OFF*4] + + ; Initialize S0-15 for each packet + INIT_LFSR_256 r15, r10, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, r11, %%TAG_SIZE + + vmovdqa [pState + 2*%%OFF], %%XTMP1 + vmovdqa [pState + 2*%%OFF + 64], %%XTMP2 + vmovdqa [pState + 2*%%OFF + 64*2], %%XTMP3 + vmovdqa [pState + 2*%%OFF + 64*3], %%XTMP4 +%assign %%OFF (%%OFF + 8) +%endrep + + ; Read, transpose and store, so all S_X from the 4 packets are + ; in the same register +%assign %%OFF 0 +%rep 4 + vmovdqa %%XTMP1, [pState + %%OFF] + vmovdqa %%XTMP2, [pState + %%OFF + 16] + vmovdqa %%XTMP3, [pState + %%OFF + 16*2] + vmovdqa %%XTMP4, [pState + %%OFF + 16*3] + + TRANSPOSE4_U32 %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6 + + vmovdqa [pState + %%OFF], %%XTMP1 + vmovdqa [pState + %%OFF + 16], %%XTMP2 + vmovdqa [pState + %%OFF + 16*2], %%XTMP3 + vmovdqa [pState + %%OFF + 16*3], %%XTMP4 + +%assign %%OFF (%%OFF + 64) +%endrep +%endif ;; %%KEY_SIZE == 256 + + ; Load read-only registers + vmovdqa %%MASK_31, [rel mask31] + + ; Shift LFSR 32-times, update state variables +%assign %%N 0 +%rep 32 + BITS_REORG4 pState, %%N, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, \ + %%XTMP6, %%XTMP7, %%XTMP8, %%XTMP9, %%XTMP10 + NONLIN_FUN4 pState, %%XTMP1, %%XTMP2, %%XTMP3, \ + %%XTMP4, %%XTMP5, %%XTMP6, %%XTMP7, %%W + vpsrld %%W, 1 ; Shift out LSB of W + LFSR_UPDT4 pState, %%N, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, \ + %%MASK_31, %%W, init ; W used in LFSR update +%assign %%N %%N+1 +%endrep + + ; And once more, initial round from keygen phase = 33 times + BITS_REORG4 pState, 0, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, \ + %%XTMP6, %%XTMP7, %%XTMP8, %%XTMP9, %%XTMP10 + NONLIN_FUN4 pState, %%XTMP1, %%XTMP2, %%XTMP3, \ + %%XTMP4, %%XTMP5, %%XTMP6, %%XTMP7, %%W + LFSR_UPDT4 pState, 0, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, \ + %%MASK_31, %%XTMP8, work + + ; Generate extra 4, 8 or 16 bytes of KS for initial tags +%if %%TAG_SIZE == 4 +%define %%NUM_ROUNDS 1 +%elif %%TAG_SIZE == 8 +%define %%NUM_ROUNDS 2 +%elif %%TAG_SIZE == 16 +%define %%NUM_ROUNDS 4 +%else +%define %%NUM_ROUNDS 0 +%endif + +%assign %%N 1 +%rep %%NUM_ROUNDS + BITS_REORG4 pState, %%N, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, \ + %%XTMP6, %%XTMP7, %%XTMP8, %%XTMP9, %%XTMP10, APPEND(%%KSTR, %%N) + NONLIN_FUN4 pState, %%XTMP1, %%XTMP2, %%XTMP3, \ + %%XTMP4, %%XTMP5, %%XTMP6, %%XTMP7, %%W + ; OFS_X3 XOR W and store in stack + vpxor APPEND(%%KSTR, %%N), %%W + LFSR_UPDT4 pState, %%N, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, \ + %%MASK_31, %%XTMP8, work +%assign %%N %%N+1 +%endrep + +%if %%TAG_SIZE == 4 + vmovdqu [%%TAGS], %%KSTR1 + REORDER_LFSR pState, 1 +%elif %%TAG_SIZE == 8 + ; Transpose the keystream and store the 8 bytes per buffer consecutively, + ; being the initial tag for each buffer + vpunpckldq %%XTMP1, %%KSTR1, %%KSTR2 + vpunpckhdq %%XTMP2, %%KSTR1, %%KSTR2 + vmovdqu [%%TAGS], %%XTMP1 + vmovdqu [%%TAGS + 16], %%XTMP2 + REORDER_LFSR pState, 2 +%elif %%TAG_SIZE == 16 + ; Transpose the keystream and store the 16 bytes per buffer consecutively, + ; being the initial tag for each buffer + TRANSPOSE4_U32 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, %%XTMP5, %%XTMP6 + vmovdqu [%%TAGS], %%KSTR1 + vmovdqu [%%TAGS + 16], %%KSTR2 + vmovdqu [%%TAGS + 16*2], %%KSTR3 + vmovdqu [%%TAGS + 16*3], %%KSTR4 + REORDER_LFSR pState, 4 +%endif + FUNC_RESTORE +%endmacro + +MKGLOBAL(asm_ZucInitialization_4_avx,function,internal) +asm_ZucInitialization_4_avx: + ZUC_INIT_4 128, 0 + + ret + +MKGLOBAL(asm_Zuc256Initialization_4_avx,function,internal) +asm_Zuc256Initialization_4_avx: +%define tags arg4 +%define tag_sz arg5 + + cmp tag_sz, 0 + je init_for_cipher + + cmp tag_sz, 8 + je init_for_auth_tag_8B + jb init_for_auth_tag_4B + + ; Fall-through for tag size = 16 bytes +init_for_auth_tag_16B: + ZUC_INIT_4 256, 16, tags + ret + +init_for_auth_tag_8B: + ZUC_INIT_4 256, 8, tags + ret + +init_for_auth_tag_4B: + ZUC_INIT_4 256, 4, tags + ret + +init_for_cipher: + ZUC_INIT_4 256, 0 + ret + +; +; Generate N*4 bytes of keystream +; for 4 buffers (where N is number of rounds) +; +%macro KEYGEN_4_AVX 1 +%define %%NUM_ROUNDS %1 ; [in] Number of 4-byte rounds + +%define pState arg1 +%define pKS arg2 + +%define %%XTMP1 xmm0 +%define %%XTMP2 xmm1 +%define %%XTMP3 xmm2 +%define %%XTMP4 xmm3 +%define %%XTMP5 xmm4 +%define %%XTMP6 xmm5 +%define %%XTMP7 xmm6 +%define %%XTMP8 xmm7 +%define %%XTMP9 xmm8 +%define %%XTMP10 xmm9 +%define %%XTMP11 xmm10 +%define %%XTMP12 xmm11 +%define %%XTMP13 xmm12 +%define %%XTMP14 xmm13 +%define %%XTMP15 xmm14 +%define %%XTMP16 xmm15 + +%define %%W %%XTMP11 +%define %%KSTR1 %%XTMP12 +%define %%KSTR2 %%XTMP13 +%define %%KSTR3 %%XTMP14 +%define %%KSTR4 %%XTMP15 +%define %%MASK_31 %%XTMP16 + + FUNC_SAVE + + ; Load read-only registers + vmovdqa %%MASK_31, [rel mask31] + + + ; Generate N*4B of keystream in N rounds +%assign %%N 1 +%rep %%NUM_ROUNDS + BITS_REORG4 pState, %%N, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, \ + %%XTMP6, %%XTMP7, %%XTMP8, %%XTMP9, %%XTMP10, APPEND(%%KSTR, %%N) + NONLIN_FUN4 pState, %%XTMP1, %%XTMP2, %%XTMP3, \ + %%XTMP4, %%XTMP5, %%XTMP6, %%XTMP7, %%W + ; OFS_X3 XOR W and store in stack + vpxor APPEND(%%KSTR, %%N), %%W + LFSR_UPDT4 pState, %%N, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, \ + %%MASK_31, %%XTMP8, work +%assign %%N (%%N + 1) +%endrep + + ; Read keystream pointers and store the keystreams + mov r12, [pKS] + mov r13, [pKS + 8] + mov r14, [pKS + 16] + mov r15, [pKS + 24] +%if (%%NUM_ROUNDS == 4) + TRANSPOSE4_U32 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, %%XTMP5, %%XTMP6 + STORE16B_KSTR4 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, r12, r13, r14, r15 +%else ;; NUM_ROUNDS != 4 +%assign %%IDX 1 +%assign %%OFFSET 0 +%rep %%NUM_ROUNDS + STORE4B_KSTR4 APPEND(%%KSTR, %%IDX), r12, r13, r14, r15, %%OFFSET +%assign %%IDX (%%IDX + 1) +%assign %%OFFSET (%%OFFSET + 4) +%endrep +%endif ;; NUM_ROUNDS == 4 + + ;; Reorder memory for LFSR registers, as not all 16 rounds + ;; will be completed + REORDER_LFSR pState, %%NUM_ROUNDS + + FUNC_RESTORE + +%endmacro + +; +;; void asm_ZucGenKeystream16B_4_avx(state4_t *pSta, u32* pKeyStr[4]); +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream16B_4_avx,function,internal) +asm_ZucGenKeystream16B_4_avx: + + KEYGEN_4_AVX 4 + + ret + +; +;; void asm_ZucGenKeystream8B_4_avx(state4_t *pSta, u32* pKeyStr[4]); +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream8B_4_avx,function,internal) +asm_ZucGenKeystream8B_4_avx: + + KEYGEN_4_AVX 2 + + ret + +; +;; void asm_ZucGenKeystream4B_4_avx(state4_t *pSta, u32* pKeyStr[4]); +;; +;; WIN64 +;; RCX - pSta +;; RDX - pKeyStr +;; +;; LIN64 +;; RDI - pSta +;; RSI - pKeyStr +;; +MKGLOBAL(asm_ZucGenKeystream4B_4_avx,function,internal) +asm_ZucGenKeystream4B_4_avx: + + KEYGEN_4_AVX 1 + + ret + +;; +;; Encrypt N*4B bytes on all 4 buffers +;; where N is number of rounds (up to 4) +;; In final call, an array of final bytes is read +;; from memory and only these final bytes are of +;; plaintext are read and XOR'ed. +;; +%macro CIPHERNx4B_4 4 +%define %%NROUNDS %1 +%define %%INITIAL_ROUND %2 +%define %%OFFSET %3 +%define %%LAST_CALL %4 + +%ifdef LINUX +%define %%TMP1 r8 +%define %%TMP2 r9 +%else +%define %%TMP1 rdi +%define %%TMP2 rsi +%endif + +%define %%XTMP1 xmm0 +%define %%XTMP2 xmm1 +%define %%XTMP3 xmm2 +%define %%XTMP4 xmm3 +%define %%XTMP5 xmm4 +%define %%XTMP6 xmm5 +%define %%XTMP7 xmm6 +%define %%XTMP8 xmm7 +%define %%XTMP9 xmm8 +%define %%XTMP10 xmm9 +%define %%XTMP11 xmm10 +%define %%XTMP12 xmm11 +%define %%XTMP13 xmm12 +%define %%XTMP14 xmm13 +%define %%XTMP15 xmm14 +%define %%XTMP16 xmm15 + +%define %%W %%XTMP10 +%define %%KSTR1 %%XTMP12 +%define %%KSTR2 %%XTMP13 +%define %%KSTR3 %%XTMP14 +%define %%KSTR4 %%XTMP15 +%define %%MASK_31 %%XTMP16 + + ; Load read-only registers + vmovdqa %%MASK_31, [rel mask31] + + ; Generate N*4B of keystream in N rounds +%assign %%N 1 +%assign %%round (%%INITIAL_ROUND + %%N) +%rep %%NROUNDS + BITS_REORG4 pState, %%round, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, \ + %%XTMP6, %%XTMP7, %%XTMP8, %%XTMP9, %%XTMP10, APPEND(%%KSTR, %%N) + NONLIN_FUN4 pState, %%XTMP1, %%XTMP2, %%XTMP3, \ + %%XTMP4, %%XTMP5, %%XTMP6, %%XTMP7, %%W + ; OFS_X3 XOR W and store in stack + vpxor APPEND(%%KSTR, %%N), %%W + LFSR_UPDT4 pState, %%round, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, %%XTMP5, %%XTMP6, \ + %%MASK_31, %%XTMP8, work +%assign %%N (%%N + 1) +%assign %%round (%%round + 1) +%endrep + + TRANSPOSE4_U32 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, %%XTMP5, %%XTMP6 + + vmovdqa %%XTMP16, [rel swap_mask] + + ;; XOR Input buffer with keystream in rounds of 16B + mov r12, [pIn] + mov r13, [pIn + 8] + mov r14, [pIn + 16] + mov r15, [pIn + 24] +%if (%%LAST_CALL == 4) + ;; Save GP registers + mov [rsp + _gpr_save], %%TMP1 + mov [rsp + _gpr_save + 8], %%TMP2 + + ;; Read in r10 the word containing the number of final bytes to read for each lane + movzx r10d, word [rsp + _rem_bytes_save] + simd_load_avx_16_1 %%XTMP5, r12 + %%OFFSET, r10 + movzx r10d, word [rsp + _rem_bytes_save + 2] + simd_load_avx_16_1 %%XTMP6, r13 + %%OFFSET, r10 + movzx r10d, word [rsp + _rem_bytes_save + 4] + simd_load_avx_16_1 %%XTMP7, r14 + %%OFFSET, r10 + movzx r10d, word [rsp + _rem_bytes_save + 6] + simd_load_avx_16_1 %%XTMP8, r15 + %%OFFSET, r10 +%else + vmovdqu %%XTMP5, [r12 + %%OFFSET] + vmovdqu %%XTMP6, [r13 + %%OFFSET] + vmovdqu %%XTMP7, [r14 + %%OFFSET] + vmovdqu %%XTMP8, [r15 + %%OFFSET] +%endif + + vpshufb %%KSTR1, %%XTMP16 + vpshufb %%KSTR2, %%XTMP16 + vpshufb %%KSTR3, %%XTMP16 + vpshufb %%KSTR4, %%XTMP16 + + vpxor %%KSTR1, %%XTMP5 + vpxor %%KSTR2, %%XTMP6 + vpxor %%KSTR3, %%XTMP7 + vpxor %%KSTR4, %%XTMP8 + + mov r12, [pOut] + mov r13, [pOut + 8] + mov r14, [pOut + 16] + mov r15, [pOut + 24] + +%if (%%LAST_CALL == 1) + movzx r10d, word [rsp + _rem_bytes_save] + simd_store_avx r12, %%KSTR1, r10, %%TMP1, %%TMP2, %%OFFSET + movzx r10d, word [rsp + _rem_bytes_save + 2] + simd_store_avx r13, %%KSTR2, r10, %%TMP1, %%TMP2, %%OFFSET + movzx r10d, word [rsp + _rem_bytes_save + 4] + simd_store_avx r14, %%KSTR3, r10, %%TMP1, %%TMP2, %%OFFSET + movzx r10d, word [rsp + _rem_bytes_save + 6] + simd_store_avx r15, %%KSTR4, r10, %%TMP1, %%TMP2, %%OFFSET + + ; Restore registers + mov %%TMP1, [rsp + _gpr_save] + mov %%TMP2, [rsp + _gpr_save + 8] +%else + vmovdqu [r12 + %%OFFSET], %%KSTR1 + vmovdqu [r13 + %%OFFSET], %%KSTR2 + vmovdqu [r14 + %%OFFSET], %%KSTR3 + vmovdqu [r15 + %%OFFSET], %%KSTR4 +%endif +%endmacro + +;; +;; void asm_ZucCipher_4_avx(state16_t *pSta, u64 *pIn[4], +;; u64 *pOut[4], u16 *length[4], u64 min_length); +;; +;; WIN64 +;; RCX - pSta +;; RDX - pIn +;; R8 - pOut +;; R9 - lengths +;; rsp + 40 - min_length +;; +;; LIN64 +;; RDI - pSta +;; RSI - pIn +;; RDX - pOut +;; RCX - lengths +;; R8 - min_length +;; +MKGLOBAL(asm_ZucCipher_4_avx,function,internal) +asm_ZucCipher_4_avx: + +%define pState arg1 +%define pIn arg2 +%define pOut arg3 +%define lengths arg4 + +%ifdef LINUX + %define nrounds r8 +%else + %define nrounds rdi +%endif + +%define min_length r10 +%define buf_idx r11 + + mov min_length, arg5 + + or min_length, min_length + jz exit_cipher + + FUNC_SAVE + + ;; Convert all lengths from UINT16_MAX (indicating that lane is not valid) to min length + vmovd xmm0, DWORD(min_length) + vpshufb xmm0, [rel broadcast_word] + vmovq xmm1, [lengths] + vpcmpeqw xmm2, xmm2 ;; Get all ff's in XMM register + vpcmpeqw xmm3, xmm1, xmm2 ;; Mask with FFFF in NULL jobs + + vpand xmm4, xmm3, xmm0 ;; Length of valid job in all NULL jobs + vpxor xmm2, xmm3 ;; Mask with 0000 in NULL jobs + vpand xmm1, xmm2 ;; Zero out lengths of NULL jobs + vpor xmm1, xmm4 ;; XMM1 contain updated lengths + + ; Round up to nearest multiple of 4 bytes + vpaddw xmm0, [rel all_threes] + vpand xmm0, [rel all_fffcs] + + ; Calculate remaining bytes to encrypt after function call + vpsubw xmm2, xmm1, xmm0 + vpxor xmm3, xmm3 + vpcmpgtw xmm4, xmm2, xmm3 ;; Mask with FFFF in lengths > 0 + ; Set to zero the lengths of the lanes which are going to be completed (lengths < 0) + vpand xmm2, xmm4 + vmovq [lengths], xmm2 ; Update in memory the final updated lengths + + ; Calculate number of bytes to encrypt after rounds of 16 bytes (up to 15 bytes), + ; for each lane, and store it in stack to be used in the last round + vpsubw xmm1, xmm2 ; Bytes to encrypt in all lanes + vpand xmm1, [rel all_0fs] ; Number of final bytes (up to 15 bytes) for each lane + vpcmpeqw xmm2, xmm1, xmm3 ;; Mask with FFFF in lengths == 0 + vpand xmm2, [rel all_10s] ;; 16 in positions where lengths was 0 + vpor xmm1, xmm2 ;; Number of final bytes (up to 16 bytes) for each lane + + ; Allocate stack frame to store keystreams (16*4 bytes), number of final bytes (8 bytes), + ; space for rsp (8 bytes) and 2 GP registers (16 bytes) that will be clobbered later + mov rax, rsp + sub rsp, STACK_size + and rsp, -16 + xor buf_idx, buf_idx + vmovq [rsp + _rem_bytes_save], xmm1 + mov [rsp + _rsp_save], rax + +loop_cipher64: + cmp min_length, 64 + jl exit_loop_cipher64 + +%assign round_off 0 +%rep 4 + CIPHERNx4B_4 4, round_off, buf_idx, 0 + + add buf_idx, 16 + sub min_length, 16 +%assign round_off (round_off + 4) +%endrep + jmp loop_cipher64 +exit_loop_cipher64: + + ; Check if there are more bytes left to encrypt + mov r15, min_length + add r15, 3 + shr r15, 2 ;; number of rounds left (round up length to nearest multiple of 4B) + jz exit_final_rounds + + cmp r15, 8 + je _num_final_rounds_is_8 + jb _final_rounds_is_1_7 + + ; Final blocks 9-16 + cmp r15, 12 + je _num_final_rounds_is_12 + ja _final_rounds_is_13_16 + + ; Final blocks 9-11 + cmp r15, 10 + je _num_final_rounds_is_10 + jb _num_final_rounds_is_9 + ja _num_final_rounds_is_11 + +_final_rounds_is_13_16: + cmp r15, 16 + je _num_final_rounds_is_16 + cmp r15, 14 + je _num_final_rounds_is_14 + jb _num_final_rounds_is_13 + ja _num_final_rounds_is_15 + +_final_rounds_is_1_7: + cmp r15, 4 + je _num_final_rounds_is_4 + jl _final_rounds_is_1_3 + + ; Final blocks 5-7 + cmp r15, 6 + je _num_final_rounds_is_6 + jb _num_final_rounds_is_5 + ja _num_final_rounds_is_7 + +_final_rounds_is_1_3: + cmp r15, 2 + je _num_final_rounds_is_2 + ja _num_final_rounds_is_3 + + ; Perform encryption of last bytes (<= 63 bytes) and reorder LFSR registers +%assign I 1 +%rep 4 +APPEND(_num_final_rounds_is_,I): + CIPHERNx4B_4 I, 0, buf_idx, 1 + REORDER_LFSR pState, I + add buf_idx, (I*4) + jmp exit_final_rounds +%assign I (I + 1) +%endrep + +%assign I 5 +%rep 4 +APPEND(_num_final_rounds_is_,I): + CIPHERNx4B_4 4, 0, buf_idx, 0 + add buf_idx, 16 + CIPHERNx4B_4 (I-4), 4, buf_idx, 1 + add buf_idx, ((I-4)*4) + REORDER_LFSR pState, I + jmp exit_final_rounds +%assign I (I + 1) +%endrep + +%assign I 9 +%rep 4 +APPEND(_num_final_rounds_is_,I): + CIPHERNx4B_4 4, 0, buf_idx, 0 + add buf_idx, 16 + CIPHERNx4B_4 4, 4, buf_idx, 0 + add buf_idx, 16 + CIPHERNx4B_4 (I-8), 8, buf_idx, 1 + add buf_idx, ((I-8)*4) + REORDER_LFSR pState, I + jmp exit_final_rounds +%assign I (I + 1) +%endrep + +%assign I 13 +%rep 4 +APPEND(_num_final_rounds_is_,I): + CIPHERNx4B_4 4, 0, buf_idx, 0 + add buf_idx, 16 + CIPHERNx4B_4 4, 4, buf_idx, 0 + add buf_idx, 16 + CIPHERNx4B_4 4, 8, buf_idx, 0 + add buf_idx, 16 + CIPHERNx4B_4 (I-12), 12, buf_idx, 1 + add buf_idx, ((I-12)*4) + REORDER_LFSR pState, I + jmp exit_final_rounds +%assign I (I + 1) +%endrep + +exit_final_rounds: + ;; update in/out pointers + vmovq xmm0, buf_idx + vpshufd xmm0, xmm0, 0x44 + vpaddq xmm1, xmm0, [pIn] + vpaddq xmm2, xmm0, [pIn + 16] + vmovdqa [pIn], xmm1 + vmovdqa [pIn + 16], xmm2 + vpaddq xmm1, xmm0, [pOut] + vpaddq xmm2, xmm0, [pOut + 16] + vmovdqa [pOut], xmm1 + vmovdqa [pOut + 16], xmm2 + + ; Restore rsp + mov rsp, [rsp + _rsp_save] + + FUNC_RESTORE + +exit_cipher: + + ret + +; +; Processes 16 bytes of data and updates the digest +; +%macro DIGEST_16_BYTES 17 +%define %%KS %1 ; [in] Pointer to 24-byte keystream +%define %%BIT_REV_L %2 ; [in] Bit reverse low table (XMM) +%define %%BIT_REV_H %3 ; [in] Bit reverse high table (XMM) +%define %%BIT_REV_AND %4 ; [in] Bit reverse and table (XMM) +%define %%XDIGEST %5 ; [in/out] Temporary digest (XMM) +%define %%XDATA %6 ; [in/clobbered] Input data (16 bytes) / Temporary XMM register +%define %%XTMP2 %7 ; [clobbered] Temporary XMM register +%define %%XTMP3 %8 ; [clobbered] Temporary XMM register +%define %%XTMP4 %9 ; [clobbered] Temporary XMM register +%define %%XTMP5 %10 ; [clobbered] Temporary XMM register +%define %%XTMP6 %11 ; [clobbered] Temporary XMM register +%define %%KS_L %12 ; [clobbered] Temporary XMM register +%define %%KS_M1 %13 ; [clobbered] Temporary XMM register +%define %%KS_M2 %14 ; [clobbered] Temporary XMM register +%define %%KS_H %15 ; [clobbered] Temporary XMM register +%define %%OFF %16 ; [in] Offset into KS +%define %%TAG_SZ %17 ; [in] Tag size (4, 8 or 16) + +%define %%XTMP1 %%XDATA + + vpand %%XTMP2, %%XDATA, %%BIT_REV_AND + + vpandn %%XTMP3, %%BIT_REV_AND, %%XDATA + vpsrld %%XTMP3, 4 + + vpshufb %%XTMP4, %%BIT_REV_H, %%XTMP2 + vpshufb %%XTMP1, %%BIT_REV_L, %%XTMP3 + vpor %%XTMP4, %%XTMP4, %%XTMP1 ;; %%XTMP4 - bit reverse data bytes + + ;; ZUC authentication part + ;; - 4x32 data bits + ;; - set up KS + vpshufd %%KS_L, [%%KS + %%OFF + (0*4)], 0x61 ; KS bits [63:32 31:0 95:64 63:32] + vpshufd %%KS_M1, [%%KS + %%OFF + (2*4)], 0x61 ; KS bits [127:96 95:64 159:128 127:96] +%if %%TAG_SZ != 4 ;; TAG_SZ == 8 or 16 + vpshufd %%KS_M2, [%%KS + %%OFF + (4*4)], 0x61 ; KS bits [191:160 159:128 223:192 191:160] +%if %%TAG_SZ == 16 + vpshufd %%KS_H, [%%KS + %%OFF + (4*4)], 0xBB ; KS bits [255:224 223:192 255:224 223:192] +%endif +%endif ;; TAG_SZ != 4 + + ;; - set up DATA + ; Data bytes [31:0 0s 63:32 0s] + vpshufb %%XTMP1, %%XTMP4, [rel shuf_mask_dw0_0_dw1_0] + + ; Data bytes [95:64 0s 127:96 0s] + vpshufb %%XTMP2, %%XTMP4, [rel shuf_mask_dw2_0_dw3_0] + + ;; - clmul + ;; - xor the results from 4 32-bit words together + vpclmulqdq %%XTMP3, %%XTMP1, %%KS_L, 0x00 + vpclmulqdq %%XTMP4, %%XTMP1, %%KS_L, 0x11 + vpclmulqdq %%XTMP5, %%XTMP2, %%KS_M1, 0x00 + vpclmulqdq %%XTMP6, %%XTMP2, %%KS_M1, 0x11 + + vpxor %%XTMP3, %%XTMP3, %%XTMP4 + vpxor %%XTMP5, %%XTMP5, %%XTMP6 + vpxor %%XTMP3, %%XTMP3, %%XTMP5 +%if %%TAG_SZ == 4 + vpxor %%XDIGEST, %%XDIGEST, %%XTMP3 +%endif ; %%TAG_SZ == 4 +%if %%TAG_SZ >= 8 + ; Move previous result to low 32 bits and XOR with previous digest + vmovq %%XTMP3, %%XTMP3 ; Clear top 64 bits + vpsrldq %%XTMP3, %%XTMP3, 4 + vpxor %%XDIGEST, %%XDIGEST, %%XTMP3 + + vpclmulqdq %%XTMP3, %%XTMP1, %%KS_L, 0x10 + vpclmulqdq %%XTMP4, %%XTMP1, %%KS_M1, 0x01 + vpclmulqdq %%XTMP5, %%XTMP2, %%KS_M1, 0x10 + vpclmulqdq %%XTMP6, %%XTMP2, %%KS_M2, 0x01 + + ; XOR all the products and keep only 32-63 bits + vpxor %%XTMP3, %%XTMP3, %%XTMP4 + vpxor %%XTMP5, %%XTMP5, %%XTMP6 + vpxor %%XTMP3, %%XTMP3, %%XTMP5 + vpand %%XTMP3, %%XTMP3, [rel bits_32_63] + + ; XOR with bits 32-63 of previous digest + vpxor %%XDIGEST, %%XDIGEST, %%XTMP3 + +%if %%TAG_SZ == 16 + ; Prepare data and calculate bits 95-64 of tag + vpclmulqdq %%XTMP3, %%XTMP1, %%KS_M1, 0x00 + vpclmulqdq %%XTMP4, %%XTMP1, %%KS_M1, 0x11 + vpclmulqdq %%XTMP5, %%XTMP2, %%KS_M2, 0x00 + vpclmulqdq %%XTMP6, %%XTMP2, %%KS_M2, 0x11 + + ; XOR all the products and move bits 63-32 to bits 95-64 + vpxor %%XTMP3, %%XTMP4 + vpxor %%XTMP5, %%XTMP6 + vpxor %%XTMP3, %%XTMP5 + + vpshufb %%XTMP3, %%XTMP3, [rel shuf_mask_0_0_dw1_0] + + ; XOR with previous bits 64-95 of previous digest + vpxor %%XDIGEST, %%XDIGEST, %%XTMP3 + + ; Prepare data and calculate bits 127-96 of tag + vpclmulqdq %%XTMP3, %%XTMP1, %%KS_M1, 0x10 + vpclmulqdq %%XTMP4, %%XTMP1, %%KS_M2, 0x01 + vpclmulqdq %%XTMP5, %%XTMP2, %%KS_M2, 0x10 + vpclmulqdq %%XTMP6, %%XTMP2, %%KS_H, 0x01 + + ; XOR all the products and move bits 63-32 to bits 127-96 + vpxor %%XTMP3, %%XTMP3, %%XTMP4 + vpxor %%XTMP5, %%XTMP5, %%XTMP6 + vpxor %%XTMP3, %%XTMP3, %%XTMP5 + vpshufb %%XTMP3, %%XTMP3, [rel shuf_mask_0_0_0_dw1] + + ; XOR with lower 96 bits, to construct 128 bits of tag + vpxor %%XDIGEST, %%XDIGEST, %%XTMP3 + +%endif ; %%TAG_SZ == 16 +%endif ; %%TAG_SZ >= 8 +%endmacro + +%macro REMAINDER 25 +%define %%T %1 ; [in] Pointer to authentication tag +%define %%KS %2 ; [in/clobbered] Pointer to 32-byte keystream +%define %%DATA %3 ; [in/clobbered] Pointer to input data +%define %%N_BITS %4 ; [in/clobbered] Number of bits to digest +%define %%N_BYTES %5 ; [clobbered] Number of bytes to digest +%define %%TMP1 %6 ; [clobbered] Temporary GP register +%define %%TMP2 %7 ; [clobbered] Temporary GP register +%define %%TMP3 %8 ; [clobbered] Temporary GP register +%define %%TMP4 %9 ; [clobbered] Temporary GP register +%define %%BIT_REV_L %10 ; [in] Bit reverse low table (XMM) +%define %%BIT_REV_H %11 ; [in] Bit reverse high table (XMM) +%define %%BIT_REV_AND %12 ; [in] Bit reverse and table (XMM) +%define %%XDIGEST %13 ; [clobbered] Temporary digest (XMM) +%define %%XTMP1 %14 ; [clobbered] Temporary XMM register +%define %%XTMP2 %15 ; [clobbered] Temporary XMM register +%define %%XTMP3 %16 ; [clobbered] Temporary XMM register +%define %%XTMP4 %17 ; [clobbered] Temporary XMM register +%define %%XTMP5 %18 ; [clobbered] Temporary XMM register +%define %%XTMP6 %19 ; [clobbered] Temporary XMM register +%define %%KS_L %20 ; [clobbered] Temporary XMM register +%define %%KS_M1 %21 ; [clobbered] Temporary XMM register +%define %%KS_M2 %22 ; [clobbered] Temporary XMM register +%define %%KS_H %23 ; [clobbered] Temporary XMM register +%define %%KEY_SZ %24 ; [in] Key size (128 or 256) +%define %%TAG_SZ %25 ; [in] Key size (4, 8 or 16) + + FUNC_SAVE + + vpxor %%XDIGEST, %%XDIGEST + + ; Length between 1 and 255 bits + test %%N_BITS, 128 + jz %%Eia3RoundsAVX_dq_end + + ;; read up to 16 bytes of data and reverse bits + vmovdqu %%XTMP1, [%%DATA] + DIGEST_16_BYTES %%KS, %%BIT_REV_L, %%BIT_REV_H, %%BIT_REV_AND, \ + %%XDIGEST, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, \ + %%XTMP5, %%XTMP6, %%KS_L, %%KS_M1, %%KS_M2, %%KS_H, \ + 0, %%TAG_SZ + + add %%DATA, 16 + add %%KS, 16 + sub %%N_BITS, 128 +%%Eia3RoundsAVX_dq_end: + + or %%N_BITS, %%N_BITS + jz %%Eia3RoundsAVX_end + + ; Get number of bytes + lea %%N_BYTES, [%%N_BITS + 7] + shr %%N_BYTES, 3 + + ;; read up to 16 bytes of data, zero bits not needed if partial byte and bit-reverse + simd_load_avx_16_1 %%XTMP1, %%DATA, %%N_BYTES + ; check if there is a partial byte (less than 8 bits in last byte) + mov %%TMP1, %%N_BITS + and %%TMP1, 0x7 + shl %%TMP1, 4 + lea %%TMP2, [rel bit_mask_table] + add %%TMP2, %%TMP1 + + ; Get mask to clear last bits + vmovdqa %%XTMP2, [%%TMP2] + + ; Shift left 16-N bytes to have the last byte always at the end of the XMM register + ; to apply mask, then restore by shifting right same amount of bytes + mov %%TMP2, 16 + sub %%TMP2, %%N_BYTES + XVPSLLB %%XTMP1, %%TMP2, %%XTMP3, %%TMP1 + vpand %%XTMP1, %%XTMP2 + XVPSRLB %%XTMP1, %%TMP2, %%XTMP3, %%TMP1 + + DIGEST_16_BYTES %%KS, %%BIT_REV_L, %%BIT_REV_H, %%BIT_REV_AND, \ + %%XDIGEST, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, \ + %%XTMP5, %%XTMP6, %%KS_L, %%KS_M1, %%KS_M2, %%KS_H, \ + 0, %%TAG_SZ + +%%Eia3RoundsAVX_end: + +%if %%TAG_SZ == 4 +%define %%TAG DWORD(%%TMP1) + ;; - update T + mov %%TAG, [%%T] + vmovq %%TMP2, %%XDIGEST + shr %%TMP2, 32 + xor %%TAG, DWORD(%%TMP2) + + ;; XOR with keyStr[n_bits] (Z_length, from spec) + + ; Read keyStr[N_BITS / 32] + mov %%TMP2, %%N_BITS + shr %%TMP2, 5 + mov %%TMP3, [%%KS + %%TMP2*4] + + ; Rotate left by N_BITS % 32 + mov %%TMP2, rcx ; Save RCX + mov rcx, %%N_BITS + and rcx, 0x1F + rol %%TMP3, cl + mov rcx, %%TMP2 ; Restore RCX + + ; XOR with previous digest calculation + xor %%TAG, DWORD(%%TMP3) + +%if %%KEY_SZ == 128 + ;; XOR with keyStr[L-1] + + ; Read keyStr[L - 1] (last double word of keyStr) + mov %%TMP2, %%N_BITS + add %%TMP2, (31 + 64 - 32) ; (32 is subtracted here to get L - 1) + shr %%TMP2, 5 ; L - 1 + ; XOR with previous digest calculation + xor %%TAG, [%%KS + %%TMP2 * 4] + +%endif + bswap %%TAG + mov [%%T], %%TAG +%else ; %%TAG_SZ == 8 or 16 +%define %%TAG %%TMP1 + ;; Update lower 64 bits of T + vmovq %%TAG, %%XDIGEST + xor %%TAG, [%%T] + + ;; XOR with keyStr[n_bits] (Z_length, from spec) + + ; Read keyStr[N_BITS / 32] + mov %%TMP2, %%N_BITS + shr %%TMP2, 5 + mov %%TMP3, [%%KS + %%TMP2*4] + mov %%TMP4, [%%KS + %%TMP2*4 + 4] + + ; Rotate left by N_BITS % 32 + mov %%TMP2, rcx ; Save RCX + mov rcx, %%N_BITS + and rcx, 0x1F + rol %%TMP3, cl + rol %%TMP4, cl + mov rcx, %%TMP2 ; Restore RCX + + shl %%TMP4, 32 + mov DWORD(%%TMP3), DWORD(%%TMP3) ; Clear top 32 bits + or %%TMP4, %%TMP3 + + ; XOR with previous digest calculation + xor %%TAG, %%TMP4 + + ; Byte swap both dwords of the digest before writing out + bswap %%TAG + ror %%TAG, 32 + mov [%%T], %%TAG +%if %%TAG_SZ == 16 + ;; Update higher 64 bits of T + vpextrq %%TAG, %%XDIGEST, 1 + xor %%TAG, [%%T + 8] + + ;; XOR with keyStr[n_bits] (Z_length, from spec) + + ; Read keyStr[N_BITS / 32] + mov %%TMP2, %%N_BITS + shr %%TMP2, 5 + mov %%TMP3, [%%KS + %%TMP2*4 + 4*2] + mov %%TMP4, [%%KS + %%TMP2*4 + 4*3] + + ; Rotate left by N_BITS % 32 + mov %%TMP2, rcx ; Save RCX + mov rcx, %%N_BITS + and rcx, 0x1F + rol %%TMP3, cl + rol %%TMP4, cl + mov rcx, %%TMP2 ; Restore RCX + + shl %%TMP4, 32 + mov DWORD(%%TMP3), DWORD(%%TMP3) ; Clear top 32 bits + or %%TMP4, %%TMP3 + + ; XOR with previous digest calculation + xor %%TAG, %%TMP4 + + ; Byte swap both dwords of the digest before writing out + bswap %%TAG + ror %%TAG, 32 + mov [%%T + 8], %%TAG +%endif ; %%TAG_SZ == 16 +%endif ; %%TAG_SZ == 4 + + FUNC_RESTORE + +%endmacro + +;; +;; extern void asm_Eia3Remainder_avx(void *T, const void *ks, +;; const void *data, const uint64_t n_bits, +;; const uint64_t key_size, +;; const uint64_t tag_size); +;; +;; Returns authentication update value to be XOR'ed with current authentication tag +;; +;; @param [in] T (digest pointer) +;; @param [in] KS (key stream pointer) +;; @param [in] DATA (data pointer) +;; @param [in] N_BITS (number of bits to digest) +;; @param [in] KEY_SZ (Key size: 128 or 256 bits) +;; @param [in] TAG_SZ (Tag size: 4, 8 or 16 bytes) +;; +align 64 +MKGLOBAL(asm_Eia3Remainder_avx,function,internal) +asm_Eia3Remainder_avx: + +%define T arg1 +%define KS arg2 +%define DATA arg3 +%define N_BITS arg4 +%define KEY_SZ arg5 +%define TAG_SZ arg6 + + vmovdqa xmm0, [rel bit_reverse_table_l] + vmovdqa xmm1, [rel bit_reverse_table_h] + vmovdqa xmm2, [rel bit_reverse_and_table] + + cmp KEY_SZ, 128 + je remainder_key_sz_128 + + cmp TAG_SZ, 8 + je remainder_tag_sz_8 + ja remainder_tag_sz_16 + + ; Key size = 256 + ; Fall-through for tag size = 4 bytes +remainder_tag_sz_4: + REMAINDER T, KS, DATA, N_BITS, r11, r12, r13, r14, r15, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 256, 4 + ret + +remainder_tag_sz_8: + REMAINDER T, KS, DATA, N_BITS, r11, r12, r13, r14, r15, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 256, 8 + ret + +remainder_tag_sz_16: + REMAINDER T, KS, DATA, N_BITS, r11, r12, r13, r14, r15, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 256, 16 + ret + +remainder_key_sz_128: + REMAINDER T, KS, DATA, N_BITS, r11, r12, r13, r14, r15, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 128, 4 + ret + +%macro EIA3_ROUND 20 +%define %%T %1 ; [in] Pointer to authentication tag +%define %%KS %2 ; [in/clobbered] Pointer to 32-byte keystream +%define %%DATA %3 ; [in/clobbered] Pointer to input data +%define %%TMP %4 ; [clobbered] Temporary GP register +%define %%BIT_REV_L %5 ; [in] Bit reverse low table (XMM) +%define %%BIT_REV_H %6 ; [in] Bit reverse high table (XMM) +%define %%BIT_REV_AND %7 ; [in] Bit reverse and table (XMM) +%define %%XDIGEST %8 ; [clobbered] Temporary digest (XMM) +%define %%XTMP1 %9 ; [clobbered] Temporary XMM register +%define %%XTMP2 %10 ; [clobbered] Temporary XMM register +%define %%XTMP3 %11 ; [clobbered] Temporary XMM register +%define %%XTMP4 %12 ; [clobbered] Temporary XMM register +%define %%XTMP5 %13 ; [clobbered] Temporary XMM register +%define %%XTMP6 %14 ; [clobbered] Temporary XMM register +%define %%KS_L %15 ; [clobbered] Temporary XMM register +%define %%KS_M1 %16 ; [clobbered] Temporary XMM register +%define %%KS_M2 %17 ; [clobbered] Temporary XMM register +%define %%KS_H %18 ; [clobbered] Temporary XMM register +%define %%NUM_16B_ROUNDS %19 ; [in] Number of 16-byte rounds +%define %%TAG_SZ %20 ; [constant] Tag size (4, 8 or 16 bytes) + + vpxor %%XDIGEST, %%XDIGEST + +%assign %%OFF 0 +%rep %%NUM_16B_ROUNDS + vmovdqu %%XTMP1, [%%DATA + %%OFF] + + DIGEST_16_BYTES %%KS, %%BIT_REV_L, %%BIT_REV_H, %%BIT_REV_AND, \ + %%XDIGEST, %%XTMP1, %%XTMP2, %%XTMP3, %%XTMP4, \ + %%XTMP5, %%XTMP6, %%KS_L, %%KS_M1, %%KS_M2, %%KS_H, \ + %%OFF, %%TAG_SZ + +%assign %%OFF (%%OFF + 16) +%endrep + +%if %%TAG_SZ == 4 + ;; - update T + vmovq %%TMP, %%XDIGEST + shr %%TMP, 32 + xor [%%T], DWORD(%%TMP) +%elif %%TAG_SZ == 8 + ;; - update T + vmovq %%TMP, %%XDIGEST + xor [%%T], %%TMP +%else ;; %%TAG_SZ == 16 + vpxor %%XDIGEST, [%%T] + vmovdqu [%%T], %%XDIGEST +%endif + +%endmacro + +;; +;;extern void asm_Eia3Round32B_avx(void *T, const void *KS, const void *DATA, +;; const uint64_t tag_sz) +;; +;; Updates authentication tag T based on keystream KS and DATA. +;; - it processes 32 bytes of DATA +;; - reads data in 16 byte chunks and bit reverses them +;; - reads and re-arranges KS +;; - employs clmul for the XOR & ROL part +;; - copies top 32 bytes of KS to bottom (for the next round) +;; +;; @param [in] T (digest pointer) +;; @param [in] KS (key stream pointer) +;; @param [in] DATA (data pointer) +;; @param [in] TAG_SZ (Tag size: 4, 8 or 16 bytes) +;; +align 64 +MKGLOBAL(asm_Eia3Round32B_avx,function,internal) +asm_Eia3Round32B_avx: + +%define T arg1 +%define KS arg2 +%define DATA arg3 +%define TAG_SZ arg4 + + FUNC_SAVE + + vmovdqa xmm0, [bit_reverse_table_l] + vmovdqa xmm1, [bit_reverse_table_h] + vmovdqa xmm2, [bit_reverse_and_table] + + cmp TAG_SZ, 8 + je round32B_tag_8B + ja round32B_tag_16B + + ; Fall-through for 4 bytes +round32B_tag_4B: + EIA3_ROUND T, KS, DATA, r11, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 2, 4 + + jmp end_round32B + +round32B_tag_8B: + EIA3_ROUND T, KS, DATA, r11, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 2, 8 + + jmp end_round32B + +round32B_tag_16B: + EIA3_ROUND T, KS, DATA, r11, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 2, 16 +end_round32B: + + ;; Copy last 32 bytes of KS to the front + vmovdqa xmm0, [KS + 32] + vmovdqa xmm1, [KS + 48] + vmovdqa [KS], xmm0 + vmovdqa [KS + 16], xmm1 + + FUNC_RESTORE + + ret + +;; +;;extern void asm_Eia3Round16B_avx(void *T, const void *KS, const void *DATA, +;; const uint64_t tag_sz) +;; +;; Updates authentication tag T based on keystream KS and DATA. +;; - it processes 16 bytes of DATA +;; - reads data in 16 byte chunks and bit reverses them +;; - reads and re-arranges KS +;; - employs clmul for the XOR & ROL part +;; - copies top 16 bytes of KS to bottom (for the next round) +;; +;; @param [in] T (digest pointer) +;; @param [in] KS (key stream pointer) +;; @param [in] DATA (data pointer) +;; @param [in] TAG_SZ (Tag size: 4, 8 or 16 bytes) +;; +align 64 +MKGLOBAL(asm_Eia3Round16B_avx,function,internal) +asm_Eia3Round16B_avx: + +%define T arg1 +%define KS arg2 +%define DATA arg3 +%define TAG_SZ arg4 + + FUNC_SAVE + + vmovdqa xmm0, [bit_reverse_table_l] + vmovdqa xmm1, [bit_reverse_table_h] + vmovdqa xmm2, [bit_reverse_and_table] + + cmp TAG_SZ, 8 + je round16B_tag_8B + ja round16B_tag_16B + + ; Fall-through for 4 bytes +round16B_tag_4B: + EIA3_ROUND T, KS, DATA, r11, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 1, 4 + + jmp end_round16B + +round16B_tag_8B: + EIA3_ROUND T, KS, DATA, r11, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 1, 8 + + jmp end_round16B + +round16B_tag_16B: + EIA3_ROUND T, KS, DATA, r11, \ + xmm0, xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, 1, 16 +end_round16B: + ;; Copy last 16 bytes of KS to the front + vmovdqa xmm0, [KS + 16] + vmovdqa [KS], xmm0 + + FUNC_RESTORE + + ret + +;---------------------------------------------------------------------------------------- +;---------------------------------------------------------------------------------------- + +mksection stack-noexec diff --git a/lib/avx_t2/mb_mgr_avx_t2.c b/lib/avx_t2/mb_mgr_avx_t2.c new file mode 100644 index 0000000000000000000000000000000000000000..0e631f577ac188e71497b57b01a3fb2ed5baa397 --- /dev/null +++ b/lib/avx_t2/mb_mgr_avx_t2.c @@ -0,0 +1,483 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define AVX + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/snow3g_submit.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_sse_type1.h" /* snow3g */ +#include "include/arch_sse_type2.h" /* shani */ +#include "include/arch_avx_type1.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms_avx +#define RESTORE_XMMS restore_xmms_avx + +/* JOB API */ +#define SUBMIT_JOB submit_job_avx_t2 +#define FLUSH_JOB flush_job_avx_t2 +#define QUEUE_SIZE queue_size_avx_t2 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_avx_t2 +#define GET_NEXT_JOB get_next_job_avx_t2 +#define GET_COMPLETED_JOB get_completed_job_avx_t2 +#define GET_NEXT_BURST get_next_burst_avx_t2 +#define SUBMIT_BURST submit_burst_avx_t2 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_avx_t2 +#define FLUSH_BURST flush_burst_avx_t2 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_avx_t2 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_avx_t2 +#define SUBMIT_HASH_BURST submit_hash_burst_avx_t2 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_avx_t2 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_AVX_T2 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_AVX_T2 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_AVX_T2 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_AVX_T2 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_AVX_T2 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_avx_gen2 +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_avx_gen2 +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_avx_gen2 +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_avx_gen2 +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_avx_gen2 +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_avx_gen2 + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_aes_gcm_dec_avx +#define SUBMIT_JOB_AES_GCM_ENC submit_job_aes_gcm_enc_avx + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_avx +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_avx +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_avx + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_avx +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_avx +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_avx + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_avx +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_avx +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_avx + +#define AES_CBC_DEC_128 aes_cbc_dec_128_avx +#define AES_CBC_DEC_192 aes_cbc_dec_192_avx +#define AES_CBC_DEC_256 aes_cbc_dec_256_avx + +#define SUBMIT_JOB_AES128_DEC submit_job_aes128_dec_avx_t2 +#define SUBMIT_JOB_AES192_DEC submit_job_aes192_dec_avx_t2 +#define SUBMIT_JOB_AES256_DEC submit_job_aes256_dec_avx_t2 + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_avx +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_avx +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_avx +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_avx + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_avx_t2 +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_avx_t2 +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_avx_t2 +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_avx_t2 +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_avx_t2 +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_avx_t2 + +#define AES_ECB_ENC_128 aes_ecb_enc_128_avx +#define AES_ECB_ENC_192 aes_ecb_enc_192_avx +#define AES_ECB_ENC_256 aes_ecb_enc_256_avx +#define AES_ECB_DEC_128 aes_ecb_dec_128_avx +#define AES_ECB_DEC_192 aes_ecb_dec_192_avx +#define AES_ECB_DEC_256 aes_ecb_dec_256_avx + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_avx +#define AES_CTR_192 aes_cntr_192_avx +#define AES_CTR_256 aes_cntr_256_avx +#define AES_CTR_128_BIT aes_cntr_bit_128_avx +#define AES_CTR_192_BIT aes_cntr_bit_192_avx +#define AES_CTR_256_BIT aes_cntr_bit_256_avx + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_avx +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_avx + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_avx +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_avx + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_avx +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_avx + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_avx +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_avx + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_avx +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_avx + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_avx +#define AES_CFB_256_ONE aes_cfb_256_one_avx + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_avx +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_avx + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_avx +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_avx +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_avx +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_avx + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_ni_sse +#define FLUSH_JOB_SHA1 flush_job_sha1_ni_sse +#define SUBMIT_JOB_SHA224 submit_job_sha224_ni_sse +#define FLUSH_JOB_SHA224 flush_job_sha224_ni_sse +#define SUBMIT_JOB_SHA256 submit_job_sha256_ni_sse +#define FLUSH_JOB_SHA256 flush_job_sha256_ni_sse +#define SUBMIT_JOB_SHA384 submit_job_sha384_avx +#define FLUSH_JOB_SHA384 flush_job_sha384_avx +#define SUBMIT_JOB_SHA512 submit_job_sha512_avx +#define FLUSH_JOB_SHA512 flush_job_sha512_avx + +/* HMAC-SHA1/224/256/384/512 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_ni_sse +#define FLUSH_JOB_HMAC flush_job_hmac_ni_sse +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_ni_sse +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_ni_sse +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_ni_sse +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_ni_sse +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_avx +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_avx +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_avx +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_avx +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_avx +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_avx + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_avx +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_avx +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_avx +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_avx +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_avx +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_avx +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_avx +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_avx +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_avx +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_avx +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_avx + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_avx +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_avx + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_avx_t2(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_avx_t2(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_avx_t2 +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_avx_t2 + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_avx_local + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 8); + ooo_mgr_aes_reset(state->aes192_ooo, 8); + ooo_mgr_aes_reset(state->aes256_ooo, 8); + + /* DOCSIS SEC BPI (AES CBC + AES CFB for partial block) + * uses same settings as AES CBC. + */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 8); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 4); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, 2); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, 2); + + /* Init HMAC/SHA_256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, 2); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + AVX_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + AVX_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, AVX_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 8); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 8); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 8); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 8); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 8); + + /* Init AES CBC-S out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 8); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, 2); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, 2); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, 2); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, AVX_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, AVX_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_avx_t2_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for AVX interface are present */ + if ((state->features & IMB_CPUFLAGS_AVX) != IMB_CPUFLAGS_AVX) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_AVX; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_avx; + state->keyexp_192 = aes_keyexp_192_avx; + state->keyexp_256 = aes_keyexp_256_avx; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_avx; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_avx; + + state->xcbc_keyexp = aes_xcbc_expand_key_avx; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_avx; + state->sha1 = sha1_avx; + state->sha224_one_block = sha224_one_block_avx; + state->sha224 = sha224_avx; + state->sha256_one_block = sha256_one_block_avx; + state->sha256 = sha256_avx; + state->sha384_one_block = sha384_one_block_avx; + state->sha384 = sha384_avx; + state->sha512_one_block = sha512_one_block_avx; + state->sha512 = sha512_avx; + state->md5_one_block = md5_one_block_avx; + + state->aes128_cfb_one = aes_cfb_128_one_avx; + + state->eea3_1_buffer = zuc_eea3_1_buffer_avx; + state->eea3_4_buffer = zuc_eea3_4_buffer_avx; + state->eea3_n_buffer = zuc_eea3_n_buffer_avx; + state->eia3_1_buffer = zuc_eia3_1_buffer_avx; + state->eia3_n_buffer = zuc_eia3_n_buffer_avx; + + state->f8_1_buffer = kasumi_f8_1_buffer_avx; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_avx; + state->f8_2_buffer = kasumi_f8_2_buffer_avx; + state->f8_3_buffer = kasumi_f8_3_buffer_avx; + state->f8_4_buffer = kasumi_f8_4_buffer_avx; + state->f8_n_buffer = kasumi_f8_n_buffer_avx; + state->f9_1_buffer = kasumi_f9_1_buffer_avx; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_avx; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_avx; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_avx; + state->kasumi_key_sched_size = kasumi_key_sched_size_avx; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_avx; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_avx; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_avx; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_avx; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_avx; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_avx; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_avx; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_avx; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_avx; + state->snow3g_init_key_sched = snow3g_init_key_sched_avx; + state->snow3g_key_sched_size = snow3g_key_sched_size_avx; + + state->hec_32 = hec_32_avx; + state->hec_64 = hec_64_avx; + + state->crc32_ethernet_fcs = ethernet_fcs_avx; + state->crc16_x25 = crc16_x25_avx; + state->crc32_sctp = crc32_sctp_avx; + state->crc24_lte_a = crc24_lte_a_avx; + state->crc24_lte_b = crc24_lte_b_avx; + state->crc16_fp_data = crc16_fp_data_avx; + state->crc11_fp_header = crc11_fp_header_avx; + state->crc7_fp_header = crc7_fp_header_avx; + state->crc10_iuup_data = crc10_iuup_data_avx; + state->crc6_iuup_header = crc6_iuup_header_avx; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_avx; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_avx; + + state->chacha20_poly1305_init = init_chacha20_poly1305_avx; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_avx; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_avx; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_avx; + + state->gcm128_enc = aes_gcm_enc_128_avx_gen2; + state->gcm192_enc = aes_gcm_enc_192_avx_gen2; + state->gcm256_enc = aes_gcm_enc_256_avx_gen2; + state->gcm128_dec = aes_gcm_dec_128_avx_gen2; + state->gcm192_dec = aes_gcm_dec_192_avx_gen2; + state->gcm256_dec = aes_gcm_dec_256_avx_gen2; + state->gcm128_init = aes_gcm_init_128_avx_gen2; + state->gcm192_init = aes_gcm_init_192_avx_gen2; + state->gcm256_init = aes_gcm_init_256_avx_gen2; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_avx_gen2; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_avx_gen2; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_avx_gen2; + state->gcm128_enc_update = aes_gcm_enc_128_update_avx_gen2; + state->gcm192_enc_update = aes_gcm_enc_192_update_avx_gen2; + state->gcm256_enc_update = aes_gcm_enc_256_update_avx_gen2; + state->gcm128_dec_update = aes_gcm_dec_128_update_avx_gen2; + state->gcm192_dec_update = aes_gcm_dec_192_update_avx_gen2; + state->gcm256_dec_update = aes_gcm_dec_256_update_avx_gen2; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_avx_gen2; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_avx_gen2; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_avx_gen2; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_avx_gen2; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_avx_gen2; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_avx_gen2; + state->gcm128_precomp = aes_gcm_precomp_128_avx_gen2; + state->gcm192_precomp = aes_gcm_precomp_192_avx_gen2; + state->gcm256_precomp = aes_gcm_precomp_256_avx_gen2; + state->gcm128_pre = aes_gcm_pre_128_avx_gen2; + state->gcm192_pre = aes_gcm_pre_192_avx_gen2; + state->gcm256_pre = aes_gcm_pre_256_avx_gen2; + + state->ghash = ghash_avx_gen2; + state->ghash_pre = ghash_pre_avx_gen2; + + state->gmac128_init = imb_aes_gmac_init_128_avx_gen2; + state->gmac192_init = imb_aes_gmac_init_192_avx_gen2; + state->gmac256_init = imb_aes_gmac_init_256_avx_gen2; + state->gmac128_update = imb_aes_gmac_update_128_avx_gen2; + state->gmac192_update = imb_aes_gmac_update_192_avx_gen2; + state->gmac256_update = imb_aes_gmac_update_256_avx_gen2; + state->gmac128_finalize = imb_aes_gmac_finalize_128_avx_gen2; + state->gmac192_finalize = imb_aes_gmac_finalize_192_avx_gen2; + state->gmac256_finalize = imb_aes_gmac_finalize_256_avx_gen2; +} + +#include "mb_mgr_code.h" diff --git a/lib/include/aes_common.asm b/lib/include/aes_common.asm index d428bb9dfb6fd1268f2776293cdc8141de17808d..f0410deaa1071f45e3d3f6af0faa5f285c351d7b 100644 --- a/lib/include/aes_common.asm +++ b/lib/include/aes_common.asm @@ -126,6 +126,41 @@ %endmacro +;; ============================================================================= +;; Loads specified number of AES blocks into YMM registers +%macro YMM_LOAD_BLOCKS_0_16 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%INP %2 ; [in] input data pointer to read from +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%DST0 %4 ; [out] YMM register with loaded data +%define %%DST1 %5 ; [out] YMM register with loaded data +%define %%DST2 %6 ; [out] YMM register with loaded data +%define %%DST3 %7 ; [out] YMM register with loaded data +%define %%DST4 %8 ; [out] YMM register with loaded data +%define %%DST5 %9 ; [out] YMM register with loaded data +%define %%DST6 %10 ; [out] YMM register with loaded data +%define %%DST7 %11 ; [out] YMM register with loaded data + +%assign src_offset 0 +%assign dst_idx 0 + +%rep (%%NUM_BLOCKS / 2) +%xdefine %%DSTREG %%DST %+ dst_idx + vmovdqu8 %%DSTREG, [%%INP + %%DATA_OFFSET + src_offset] +%undef %%DSTREG +%assign src_offset (src_offset + 32) +%assign dst_idx (dst_idx + 1) +%endrep + +%assign blocks_left (%%NUM_BLOCKS % 2) +%xdefine %%DSTREG %%DST %+ dst_idx + +%if blocks_left == 1 + vmovdqu8 XWORD(%%DSTREG), [%%INP + %%DATA_OFFSET + src_offset] +%endif + +%endmacro + ;; ============================================================================= ;; Loads specified number of AES blocks at offsets into ZMM registers ;; DATA_OFFSET specifies the offset between blocks to load @@ -276,6 +311,41 @@ %endmacro +;; ============================================================================= +;; Stores specified number of AES blocks from YMM registers +%macro YMM_STORE_BLOCKS_0_16 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%OUTP %2 ; [in] output data pointer to write to +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%SRC0 %4 ; [in] YMM register with data to store +%define %%SRC1 %5 ; [in] YMM register with data to store +%define %%SRC2 %6 ; [in] YMM register with data to store +%define %%SRC3 %7 ; [in] YMM register with data to store +%define %%SRC4 %8 ; [in] YMM register with data to store +%define %%SRC5 %9 ; [in] YMM register with data to store +%define %%SRC6 %10 ; [in] YMM register with data to store +%define %%SRC7 %11 ; [in] YMM register with data to store + +%assign dst_offset 0 +%assign src_idx 0 + +%rep (%%NUM_BLOCKS / 2) +%xdefine %%SRCREG %%SRC %+ src_idx + vmovdqu8 [%%OUTP + %%DATA_OFFSET + dst_offset], %%SRCREG +%undef %%SRCREG +%assign dst_offset (dst_offset + 32) +%assign src_idx (src_idx + 1) +%endrep + +%assign blocks_left (%%NUM_BLOCKS % 2) +%xdefine %%SRCREG %%SRC %+ src_idx + +%if blocks_left == 1 + vmovdqu8 [%%OUTP + %%DATA_OFFSET + dst_offset], XWORD(%%SRCREG) +%endif + +%endmacro + ;; ============================================================================= ;; Stores specified number of AES blocks from ZMM registers with mask register ;; for the last loaded register (xmm, ymm or zmm). @@ -445,4 +515,1010 @@ %endmacro +;; ============================================================================= +;; Generic macro to produce code that executes %%OPCODE instruction +;; on selected number of AES blocks (16 bytes long) between 0 and 16. +;; All three operands of the instruction come from registers. +%macro YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 26 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%OPCODE %2 ; [in] instruction name +%define %%DST0 %3 ; [out] destination YMM register +%define %%DST1 %4 ; [out] destination YMM register +%define %%DST2 %5 ; [out] destination YMM register +%define %%DST3 %6 ; [out] destination YMM register +%define %%DST4 %7 ; [out] destination YMM register +%define %%DST5 %8 ; [out] destination YMM register +%define %%DST6 %9 ; [out] destination YMM register +%define %%DST7 %10 ; [out] destination YMM register +%define %%SRC1_0 %11 ; [in] source 1 YMM register +%define %%SRC1_1 %12 ; [in] source 1 YMM register +%define %%SRC1_2 %13 ; [in] source 1 YMM register +%define %%SRC1_3 %14 ; [in] source 1 YMM register +%define %%SRC1_4 %15 ; [in] source 1 YMM register +%define %%SRC1_5 %16 ; [in] source 1 YMM register +%define %%SRC1_6 %17 ; [in] source 1 YMM register +%define %%SRC1_7 %18 ; [in] source 1 YMM register +%define %%SRC2_0 %19 ; [in] source 2 YMM register +%define %%SRC2_1 %20 ; [in] source 2 YMM register +%define %%SRC2_2 %21 ; [in] source 2 YMM register +%define %%SRC2_3 %22 ; [in] source 2 YMM register +%define %%SRC2_4 %23 ; [in] source 2 YMM register +%define %%SRC2_5 %24 ; [in] source 2 YMM register +%define %%SRC2_6 %25 ; [in] source 2 YMM register +%define %%SRC2_7 %26 ; [in] source 2 YMM register + +%assign _reg_idx 0 +%assign _blocks_left %%NUM_BLOCKS + +%rep (%%NUM_BLOCKS / 2) +%xdefine %%DSTREG %%DST %+ _reg_idx +%xdefine %%SRC1REG %%SRC1_ %+ _reg_idx +%xdefine %%SRC2REG %%SRC2_ %+ _reg_idx + %%OPCODE %%DSTREG, %%SRC1REG, %%SRC2REG +%undef %%DSTREG +%undef %%SRC1REG +%undef %%SRC2REG +%assign _reg_idx (_reg_idx + 1) +%assign _blocks_left (_blocks_left - 2) +%endrep + +%xdefine %%DSTREG %%DST %+ _reg_idx +%xdefine %%SRC1REG %%SRC1_ %+ _reg_idx +%xdefine %%SRC2REG %%SRC2_ %+ _reg_idx + +%if _blocks_left == 1 + %%OPCODE XWORD(%%DSTREG), XWORD(%%SRC1REG), XWORD(%%SRC2REG) +%endif + +%endmacro + +;;; =========================================================================== +;;; Handles AES encryption rounds +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameter to check what needs to be done for the current round. +%macro YMM_AESENC_ROUND_BLOCKS_0_16 20 +%define %%L0B0_1 %1 ; [in/out] ymm; ciphered blocks +%define %%L0B2_3 %2 ; [in/out] ymm; ciphered blocks +%define %%L0B4_5 %3 ; [in/out] ymm; ciphered blocks +%define %%L0B6_7 %4 ; [in/out] ymm; ciphered blocks +%define %%L0B8_9 %5 ; [in/out] ymm; ciphered blocks +%define %%L0B10_11 %6 ; [in/out] ymm; ciphered blocks +%define %%L0B12_13 %7 ; [in/out] ymm; ciphered blocks +%define %%L0B14_15 %8 ; [in/out] ymm; ciphered blocks +%define %%KEY %9 ; [in] ymm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0_1 %11 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D2_3 %12 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D4_5 %13 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D6_7 %14 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D8_9 %15 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D10_11 %16 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D12_13 %17 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D14_15 %18 ; [in] ymm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxorq, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesenc, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesenclast, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +%ifnidn %%D0_1, no_data +%ifnidn %%D2_3, no_data +%ifnidn %%D4_5, no_data +%ifnidn %%D6_7, no_data +%ifnidn %%D8_9, no_data +%ifnidn %%D10_11, no_data +%ifnidn %%D12_13, no_data +%ifnidn %%D14_15, no_data + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxorq, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%D0_1, %%D2_3, %%D4_5, %%D6_7, \ + %%D8_9, %%D10_11, %%D12_13, %%D14_15 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + +;;; =========================================================================== +;;; Handles AES decryption rounds +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameterto check what needs to be done for the current round. +%macro YMM_AESDEC_ROUND_BLOCKS_0_16 20 +%define %%L0B0_1 %1 ; [in/out] ymm; ciphered blocks +%define %%L0B2_3 %2 ; [in/out] ymm; ciphered blocks +%define %%L0B4_5 %3 ; [in/out] ymm; ciphered blocks +%define %%L0B6_7 %4 ; [in/out] ymm; ciphered blocks +%define %%L0B8_9 %5 ; [in/out] ymm; ciphered blocks +%define %%L0B10_11 %6 ; [in/out] ymm; ciphered blocks +%define %%L0B12_13 %7 ; [in/out] ymm; ciphered blocks +%define %%L0B14_15 %8 ; [in/out] ymm; ciphered blocks +%define %%KEY %9 ; [in] ymm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0_1 %11 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D2_3 %12 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D4_5 %13 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D6_7 %14 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D8_9 %15 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D10_11 %16 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D12_13 %17 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D14_15 %18 ; [in] ymm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxorq, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesdec, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesdeclast, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +;;; === XOR with data +%ifnidn %%D0_1, no_data +%ifnidn %%D2_3, no_data +%ifnidn %%D4_5, no_data +%ifnidn %%D6_7, no_data +%ifnidn %%D8_9, no_data +%ifnidn %%D10_11, no_data +%ifnidn %%D12_13, no_data +%ifnidn %%D14_15, no_data + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxorq, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%D0_1, %%D2_3, %%D4_5, %%D6_7, \ + %%D8_9, %%D10_11, %%D12_13, %%D14_15 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + +;; ============================================================================= +;; Generic macro to produce code that executes %%OPCODE instruction with 3 +;; operands on selected number of AES blocks (16 bytes long) between 0 and 8. +;; All three operands of the instruction come from registers. +%macro XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 26 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 8) +%define %%OPCODE %2 ; [in] instruction name +%define %%DST0 %3 ; [out] destination XMM register +%define %%DST1 %4 ; [out] destination XMM register +%define %%DST2 %5 ; [out] destination XMM register +%define %%DST3 %6 ; [out] destination XMM register +%define %%DST4 %7 ; [out] destination XMM register +%define %%DST5 %8 ; [out] destination XMM register +%define %%DST6 %9 ; [out] destination XMM register +%define %%DST7 %10 ; [out] destination XMM register +%define %%SRC1_0 %11 ; [in] source 1 XMM register +%define %%SRC1_1 %12 ; [in] source 1 XMM register +%define %%SRC1_2 %13 ; [in] source 1 XMM register +%define %%SRC1_3 %14 ; [in] source 1 XMM register +%define %%SRC1_4 %15 ; [in] source 1 XMM register +%define %%SRC1_5 %16 ; [in] source 1 XMM register +%define %%SRC1_6 %17 ; [in] source 1 XMM register +%define %%SRC1_7 %18 ; [in] source 1 XMM register +%define %%SRC2_0 %19 ; [in] source 2 XMM register +%define %%SRC2_1 %20 ; [in] source 2 XMM register +%define %%SRC2_2 %21 ; [in] source 2 XMM register +%define %%SRC2_3 %22 ; [in] source 2 XMM register +%define %%SRC2_4 %23 ; [in] source 2 XMM register +%define %%SRC2_5 %24 ; [in] source 2 XMM register +%define %%SRC2_6 %25 ; [in] source 2 XMM register +%define %%SRC2_7 %26 ; [in] source 2 XMM register + +%assign _reg_idx 0 + +%rep (%%NUM_BLOCKS) +%xdefine %%DSTREG %%DST %+ _reg_idx +%xdefine %%SRC1REG %%SRC1_ %+ _reg_idx +%xdefine %%SRC2REG %%SRC2_ %+ _reg_idx + %%OPCODE %%DSTREG, %%SRC1REG, %%SRC2REG +%undef %%DSTREG +%undef %%SRC1REG +%undef %%SRC2REG +%assign _reg_idx (_reg_idx + 1) +%endrep + +%endmacro + +;; ============================================================================= +;; Generic macro to produce code that executes %%OPCODE instruction with 2 +;; operands on selected number of AES blocks (16 bytes long) between 0 and 8. +;; Both operands of the instruction come from registers. +%macro XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 18 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 8) +%define %%OPCODE %2 ; [in] instruction name +%define %%DST0 %3 ; [out] destination YMM register +%define %%DST1 %4 ; [out] destination YMM register +%define %%DST2 %5 ; [out] destination YMM register +%define %%DST3 %6 ; [out] destination YMM register +%define %%DST4 %7 ; [out] destination YMM register +%define %%DST5 %8 ; [out] destination YMM register +%define %%DST6 %9 ; [out] destination YMM register +%define %%DST7 %10 ; [out] destination YMM register +%define %%SRC0 %11 ; [in] source YMM register +%define %%SRC1 %12 ; [in] source YMM register +%define %%SRC2 %13 ; [in] source YMM register +%define %%SRC3 %14 ; [in] source YMM register +%define %%SRC4 %15 ; [in] source YMM register +%define %%SRC5 %16 ; [in] source YMM register +%define %%SRC6 %17 ; [in] source YMM register +%define %%SRC7 %18 ; [in] source YMM register + +%assign _reg_idx 0 + +%rep (%%NUM_BLOCKS) +%xdefine %%DSTREG %%DST %+ _reg_idx +%xdefine %%SRCREG %%SRC %+ _reg_idx + %%OPCODE %%DSTREG, %%SRCREG +%undef %%DSTREG +%undef %%SRCREG +%assign _reg_idx (_reg_idx + 1) +%endrep + +%endmacro + +;;; =========================================================================== +;;; Handles AES encryption rounds for 0 to 8 blocks on AVX +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameter to check what needs to be done for the current round. +%macro XMM_AESENC_ROUND_BLOCKS_AVX_0_8 20 +%define %%L0B0 %1 ; [in/out] xmm; ciphered blocks +%define %%L0B1 %2 ; [in/out] xmm; ciphered blocks +%define %%L0B2 %3 ; [in/out] xmm; ciphered blocks +%define %%L0B3 %4 ; [in/out] xmm; ciphered blocks +%define %%L0B4 %5 ; [in/out] xmm; ciphered blocks +%define %%L0B5 %6 ; [in/out] xmm; ciphered blocks +%define %%L0B6 %7 ; [in/out] xmm; ciphered blocks +%define %%L0B7 %8 ; [in/out] xmm; ciphered blocks +%define %%KEY %9 ; [in] xmm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0 %11 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D1 %12 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D2 %13 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D3 %14 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D4 %15 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D5 %16 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D6 %17 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D7 %18 ; [in] xmm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vpxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vaesenc, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vaesenclast, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +;;; === XOR with data +%ifnidn %%D0, no_data +%ifnidn %%D1, no_data +%ifnidn %%D2, no_data +%ifnidn %%D3, no_data +%ifnidn %%D4, no_data +%ifnidn %%D5, no_data +%ifnidn %%D6, no_data +%ifnidn %%D7, no_data + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vpxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%D0, %%D1, %%D2, %%D3, \ + %%D4, %%D5, %%D6, %%D7 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + +;;; =========================================================================== +;;; Handles AES encryption rounds for 0 to 8 blocks on SSE +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameter to check what needs to be done for the current round. +%macro XMM_AESENC_ROUND_BLOCKS_SSE_0_8 20 +%define %%L0B0 %1 ; [in/out] xmm; ciphered blocks +%define %%L0B1 %2 ; [in/out] xmm; ciphered blocks +%define %%L0B2 %3 ; [in/out] xmm; ciphered blocks +%define %%L0B3 %4 ; [in/out] xmm; ciphered blocks +%define %%L0B4 %5 ; [in/out] xmm; ciphered blocks +%define %%L0B5 %6 ; [in/out] xmm; ciphered blocks +%define %%L0B6 %7 ; [in/out] xmm; ciphered blocks +%define %%L0B7 %8 ; [in/out] xmm; ciphered blocks +%define %%KEY %9 ; [in] xmm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0 %11 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D1 %12 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D2 %13 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D3 %14 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D4 %15 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D5 %16 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D6 %17 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D7 %18 ; [in] xmm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, pxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, aesenc, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, aesenclast, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +;;; === XOR with data +%ifnidn %%D0, no_data +%ifnidn %%D1, no_data +%ifnidn %%D2, no_data +%ifnidn %%D3, no_data +%ifnidn %%D4, no_data +%ifnidn %%D5, no_data +%ifnidn %%D6, no_data +%ifnidn %%D7, no_data + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, pxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%D0, %%D1, %%D2, %%D3, \ + %%D4, %%D5, %%D6, %%D7 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + +;;; =========================================================================== +;;; Handles AES decryption rounds for 0 to 8 blocks on AVX +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameter to check what needs to be done for the current round. +%macro XMM_AESDEC_ROUND_BLOCKS_AVX_0_8 20 +%define %%L0B0 %1 ; [in/out] xmm; ciphered blocks +%define %%L0B1 %2 ; [in/out] xmm; ciphered blocks +%define %%L0B2 %3 ; [in/out] xmm; ciphered blocks +%define %%L0B3 %4 ; [in/out] xmm; ciphered blocks +%define %%L0B4 %5 ; [in/out] xmm; ciphered blocks +%define %%L0B5 %6 ; [in/out] xmm; ciphered blocks +%define %%L0B6 %7 ; [in/out] xmm; ciphered blocks +%define %%L0B7 %8 ; [in/out] xmm; ciphered blocks +%define %%KEY %9 ; [in] xmm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0 %11 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D1 %12 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D2 %13 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D3 %14 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D4 %15 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D5 %16 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D6 %17 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D7 %18 ; [in] xmm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vpxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vaesdec, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vaesdeclast, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +;;; === XOR with data +%ifnidn %%D0, no_data +%ifnidn %%D1, no_data +%ifnidn %%D2, no_data +%ifnidn %%D3, no_data +%ifnidn %%D4, no_data +%ifnidn %%D5, no_data +%ifnidn %%D6, no_data +%ifnidn %%D7, no_data + XMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_8 %%NUMBL, vpxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%D0, %%D1, %%D2, %%D3, \ + %%D4, %%D5, %%D6, %%D7 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + %endif ;; _AES_COMMON_ASM + +;;; =========================================================================== +;;; Handles AES decryption rounds for 0 to 8 blocks on SSE +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameter to check what needs to be done for the current round. +%macro XMM_AESDEC_ROUND_BLOCKS_SSE_0_8 20 +%define %%L0B0 %1 ; [in/out] xmm; ciphered blocks +%define %%L0B1 %2 ; [in/out] xmm; ciphered blocks +%define %%L0B2 %3 ; [in/out] xmm; ciphered blocks +%define %%L0B3 %4 ; [in/out] xmm; ciphered blocks +%define %%L0B4 %5 ; [in/out] xmm; ciphered blocks +%define %%L0B5 %6 ; [in/out] xmm; ciphered blocks +%define %%L0B6 %7 ; [in/out] xmm; ciphered blocks +%define %%L0B7 %8 ; [in/out] xmm; ciphered blocks +%define %%KEY %9 ; [in] xmm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0 %11 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D1 %12 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D2 %13 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D3 %14 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D4 %15 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D5 %16 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D6 %17 ; [in] xmm or no_data; plain/cipher text blocks +%define %%D7 %18 ; [in] xmm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, pxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, aesdec, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, aesdeclast, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +;;; === XOR with data +%ifnidn %%D0, no_data +%ifnidn %%D1, no_data +%ifnidn %%D2, no_data +%ifnidn %%D3, no_data +%ifnidn %%D4, no_data +%ifnidn %%D5, no_data +%ifnidn %%D6, no_data +%ifnidn %%D7, no_data + XMM_OPCODE2_DSTR_SRCR_BLOCKS_0_8 %%NUMBL, pxor, \ + %%L0B0, %%L0B1, %%L0B2, %%L0B3, \ + %%L0B4, %%L0B5, %%L0B6, %%L0B7, \ + %%D0, %%D1, %%D2, %%D3, \ + %%D4, %%D5, %%D6, %%D7 + +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + +;; ============================================================================= +;; Loads up to 8 blocks into XMM registers on AVX +%macro XMM_LOAD_BLOCKS_AVX_0_8 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%INP %2 ; [in] input data pointer to read from +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%DST0 %4 ; [out] XMM register with loaded data +%define %%DST1 %5 ; [out] XMM register with loaded data +%define %%DST2 %6 ; [out] XMM register with loaded data +%define %%DST3 %7 ; [out] XMM register with loaded data +%define %%DST4 %8 ; [out] XMM register with loaded data +%define %%DST5 %9 ; [out] XMM register with loaded data +%define %%DST6 %10 ; [out] XMM register with loaded data +%define %%DST7 %11 ; [out] XMM register with loaded data + +%assign src_offset 0 +%assign dst_idx 0 + +%rep (%%NUM_BLOCKS) +%xdefine %%DSTREG %%DST %+ dst_idx + vmovdqu %%DSTREG, [%%INP + %%DATA_OFFSET + src_offset] +%undef %%DSTREG +%assign src_offset (src_offset + 16) +%assign dst_idx (dst_idx + 1) +%endrep + +%endmacro + +;; ============================================================================= +;; Loads up to 8 AES blocks into XMM registers on SSE +%macro XMM_LOAD_BLOCKS_SSE_0_8 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%INP %2 ; [in] input data pointer to read from +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%DST0 %4 ; [out] XMM register with loaded data +%define %%DST1 %5 ; [out] XMM register with loaded data +%define %%DST2 %6 ; [out] XMM register with loaded data +%define %%DST3 %7 ; [out] XMM register with loaded data +%define %%DST4 %8 ; [out] XMM register with loaded data +%define %%DST5 %9 ; [out] XMM register with loaded data +%define %%DST6 %10 ; [out] XMM register with loaded data +%define %%DST7 %11 ; [out] XMM register with loaded data + +%assign src_offset 0 +%assign dst_idx 0 + +%rep (%%NUM_BLOCKS) +%xdefine %%DSTREG %%DST %+ dst_idx + movdqu %%DSTREG, [%%INP + %%DATA_OFFSET + src_offset] +%undef %%DSTREG +%assign src_offset (src_offset + 16) +%assign dst_idx (dst_idx + 1) +%endrep + +%endmacro + +;; ============================================================================= +;; Stores up to 8 AES blocks from XMM registers on AVX +%macro XMM_STORE_BLOCKS_AVX_0_8 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 8) +%define %%OUTP %2 ; [in] output data pointer to write to +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%SRC0 %4 ; [in] XMM register with data to store +%define %%SRC1 %5 ; [in] XMM register with data to store +%define %%SRC2 %6 ; [in] XMM register with data to store +%define %%SRC3 %7 ; [in] XMM register with data to store +%define %%SRC4 %8 ; [in] XMM register with data to store +%define %%SRC5 %9 ; [in] XMM register with data to store +%define %%SRC6 %10 ; [in] XMM register with data to store +%define %%SRC7 %11 ; [in] XMM register with data to store + +%assign dst_offset 0 +%assign src_idx 0 + +%rep (%%NUM_BLOCKS) +%xdefine %%SRCREG %%SRC %+ src_idx + vmovdqu [%%OUTP + %%DATA_OFFSET + dst_offset], %%SRCREG +%undef %%SRCREG +%assign dst_offset (dst_offset + 16) +%assign src_idx (src_idx + 1) +%endrep + +%endmacro + +;; ============================================================================= +;; Stores up to 8 AES blocks from XMM registers on SSE +%macro XMM_STORE_BLOCKS_SSE_0_8 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 8) +%define %%OUTP %2 ; [in] output data pointer to write to +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%SRC0 %4 ; [in] XMM register with data to store +%define %%SRC1 %5 ; [in] XMM register with data to store +%define %%SRC2 %6 ; [in] XMM register with data to store +%define %%SRC3 %7 ; [in] XMM register with data to store +%define %%SRC4 %8 ; [in] XMM register with data to store +%define %%SRC5 %9 ; [in] XMM register with data to store +%define %%SRC6 %10 ; [in] XMM register with data to store +%define %%SRC7 %11 ; [in] XMM register with data to store + +%assign dst_offset 0 +%assign src_idx 0 + +%rep (%%NUM_BLOCKS) +%xdefine %%SRCREG %%SRC %+ src_idx + movdqu [%%OUTP + %%DATA_OFFSET + dst_offset], %%SRCREG +%undef %%SRCREG +%assign dst_offset (dst_offset + 16) +%assign src_idx (src_idx + 1) +%endrep + +%endmacro + +;; ============================================================================= +;; Loads specified number of AES blocks into YMM registers +%macro YMM_LOAD_BLOCKS_AVX2_0_16 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%INP %2 ; [in] input data pointer to read from +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%DST0 %4 ; [out] YMM register with loaded data +%define %%DST1 %5 ; [out] YMM register with loaded data +%define %%DST2 %6 ; [out] YMM register with loaded data +%define %%DST3 %7 ; [out] YMM register with loaded data +%define %%DST4 %8 ; [out] YMM register with loaded data +%define %%DST5 %9 ; [out] YMM register with loaded data +%define %%DST6 %10 ; [out] YMM register with loaded data +%define %%DST7 %11 ; [out] YMM register with loaded data + +%assign src_offset 0 +%assign dst_idx 0 + +%rep (%%NUM_BLOCKS / 2) +%xdefine %%DSTREG %%DST %+ dst_idx + vmovdqu %%DSTREG, [%%INP + %%DATA_OFFSET + src_offset] +%undef %%DSTREG +%assign src_offset (src_offset + 32) +%assign dst_idx (dst_idx + 1) +%endrep + +%assign blocks_left (%%NUM_BLOCKS % 2) +%xdefine %%DSTREG %%DST %+ dst_idx + +%if blocks_left == 1 + vmovdqu XWORD(%%DSTREG), [%%INP + %%DATA_OFFSET + src_offset] +%endif + +%endmacro + +;; ============================================================================= +;; Stores specified number of AES blocks from YMM registers +%macro YMM_STORE_BLOCKS_AVX2_0_16 11 +%define %%NUM_BLOCKS %1 ; [in] numerical value, number of AES blocks (0 to 16) +%define %%OUTP %2 ; [in] output data pointer to write to +%define %%DATA_OFFSET %3 ; [in] offset to the output pointer (GP or numerical) +%define %%SRC0 %4 ; [in] YMM register with data to store +%define %%SRC1 %5 ; [in] YMM register with data to store +%define %%SRC2 %6 ; [in] YMM register with data to store +%define %%SRC3 %7 ; [in] YMM register with data to store +%define %%SRC4 %8 ; [in] YMM register with data to store +%define %%SRC5 %9 ; [in] YMM register with data to store +%define %%SRC6 %10 ; [in] YMM register with data to store +%define %%SRC7 %11 ; [in] YMM register with data to store + +%assign dst_offset 0 +%assign src_idx 0 + +%rep (%%NUM_BLOCKS / 2) +%xdefine %%SRCREG %%SRC %+ src_idx + vmovdqu [%%OUTP + %%DATA_OFFSET + dst_offset], %%SRCREG +%undef %%SRCREG +%assign dst_offset (dst_offset + 32) +%assign src_idx (src_idx + 1) +%endrep + +%assign blocks_left (%%NUM_BLOCKS % 2) +%xdefine %%SRCREG %%SRC %+ src_idx + +%if blocks_left == 1 + vmovdqu [%%OUTP + %%DATA_OFFSET + dst_offset], XWORD(%%SRCREG) +%endif + +%endmacro + +;;; =========================================================================== +;;; Handles AES encryption rounds +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameter to check what needs to be done for the current round. +%macro YMM_AESENC_ROUND_BLOCKS_AVX2_0_16 20 +%define %%L0B0_1 %1 ; [in/out] ymm; ciphered blocks +%define %%L0B2_3 %2 ; [in/out] ymm; ciphered blocks +%define %%L0B4_5 %3 ; [in/out] ymm; ciphered blocks +%define %%L0B6_7 %4 ; [in/out] ymm; ciphered blocks +%define %%L0B8_9 %5 ; [in/out] ymm; ciphered blocks +%define %%L0B10_11 %6 ; [in/out] ymm; ciphered blocks +%define %%L0B12_13 %7 ; [in/out] ymm; ciphered blocks +%define %%L0B14_15 %8 ; [in/out] ymm; ciphered blocks +%define %%KEY %9 ; [in] ymm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0_1 %11 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D2_3 %12 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D4_5 %13 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D6_7 %14 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D8_9 %15 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D10_11 %16 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D12_13 %17 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D14_15 %18 ; [in] ymm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxor, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesenc, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesenclast, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +%ifnidn %%D0_1, no_data +%ifnidn %%D2_3, no_data +%ifnidn %%D4_5, no_data +%ifnidn %%D6_7, no_data +%ifnidn %%D8_9, no_data +%ifnidn %%D10_11, no_data +%ifnidn %%D12_13, no_data +%ifnidn %%D14_15, no_data + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxor, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%D0_1, %%D2_3, %%D4_5, %%D6_7, \ + %%D8_9, %%D10_11, %%D12_13, %%D14_15 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro + +;;; =========================================================================== +;;; Handles AES decryption rounds +;;; It handles special cases: the last and first rounds +;;; Optionally, it performs XOR with data after the last AES round. +;;; Uses NROUNDS parameterto check what needs to be done for the current round. +%macro YMM_AESDEC_ROUND_BLOCKS_AVX2_0_16 20 +%define %%L0B0_1 %1 ; [in/out] ymm; ciphered blocks +%define %%L0B2_3 %2 ; [in/out] ymm; ciphered blocks +%define %%L0B4_5 %3 ; [in/out] ymm; ciphered blocks +%define %%L0B6_7 %4 ; [in/out] ymm; ciphered blocks +%define %%L0B8_9 %5 ; [in/out] ymm; ciphered blocks +%define %%L0B10_11 %6 ; [in/out] ymm; ciphered blocks +%define %%L0B12_13 %7 ; [in/out] ymm; ciphered blocks +%define %%L0B14_15 %8 ; [in/out] ymm; ciphered blocks +%define %%KEY %9 ; [in] ymm containing round key +%define %%ROUND %10 ; [in] round number +%define %%D0_1 %11 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D2_3 %12 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D4_5 %13 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D6_7 %14 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D8_9 %15 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D10_11 %16 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D12_13 %17 ; [in] ymm or no_data; plain/cipher text blocks +%define %%D14_15 %18 ; [in] ymm or no_data; plain/cipher text blocks +%define %%NUMBL %19 ; [in] number of blocks; numerical value +%define %%NROUNDS %20 ; [in] number of rounds; numerical value + +;;; === first AES round +%if (%%ROUND < 1) + ;; round 0 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxor, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; ROUND 0 + +;;; === middle AES rounds +%if (%%ROUND >= 1 && %%ROUND <= %%NROUNDS) + ;; rounds 1 to 9/11/13 + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesdec, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY +%endif ; rounds 1 to 9/11/13 + +;;; === last AES round +%if (%%ROUND > %%NROUNDS) + ;; the last round - mix enclast with text xor's + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vaesdeclast, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY, %%KEY + +;;; === XOR with data +%ifnidn %%D0_1, no_data +%ifnidn %%D2_3, no_data +%ifnidn %%D4_5, no_data +%ifnidn %%D6_7, no_data +%ifnidn %%D8_9, no_data +%ifnidn %%D10_11, no_data +%ifnidn %%D12_13, no_data +%ifnidn %%D14_15, no_data + YMM_OPCODE3_DSTR_SRC1R_SRC2R_BLOCKS_0_16 %%NUMBL, vpxor, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%L0B0_1, %%L0B2_3, %%L0B4_5, %%L0B6_7, \ + %%L0B8_9, %%L0B10_11, %%L0B12_13, %%L0B14_15, \ + %%D0_1, %%D2_3, %%D4_5, %%D6_7, \ + %%D8_9, %%D10_11, %%D12_13, %%D14_15 +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data +%endif ; !no_data + +%endif ; The last round + +%endmacro diff --git a/lib/include/arch_avx2_type1.h b/lib/include/arch_avx2_type1.h new file mode 100644 index 0000000000000000000000000000000000000000..874cd0db9215d59ebfe04d3cd79683aa78c9b1ca --- /dev/null +++ b/lib/include/arch_avx2_type1.h @@ -0,0 +1,115 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* AVX2 + AESNI + PCLMULQDQ */ + +#ifndef IMB_ASM_AVX2_T1_H +#define IMB_ASM_AVX2_T1_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* SHA */ +void call_sha1_x8_avx2_from_c(SHA1_ARGS *args, uint32_t size_in_blocks); +void call_sha256_oct_avx2_from_c(SHA256_ARGS *args, uint32_t size_in_blocks); +void call_sha512_x4_avx2_from_c(SHA512_ARGS *args, uint64_t size_in_blocks); + +/* moved from MB MGR */ +IMB_JOB *submit_job_zuc_eea3_avx2(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_avx2(MB_MGR_ZUC_OOO *state); + +IMB_JOB *flush_job_zuc256_eea3_avx2(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_avx2(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_zuc_eia3_avx2(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_avx2(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eia3_avx2(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_avx2(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +IMB_JOB *submit_job_sha1_avx2(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha1_avx2(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha224_avx2(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha224_avx2(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha256_avx2(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha256_avx2(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha384_avx2(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha384_avx2(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha512_avx2(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha512_avx2(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +void aes_cmac_256_subkey_gen_avx2(const void *key_exp, + void *key1, void *key2); + +IMB_JOB *submit_job_chacha20_enc_dec_avx2(IMB_JOB *job); + +IMB_JOB *submit_job_hmac_avx2(MB_MGR_HMAC_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_avx2(MB_MGR_HMAC_SHA_1_OOO *state); + +IMB_JOB *submit_job_hmac_sha_224_avx2(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_224_avx2(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_256_avx2(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_256_avx2(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_384_avx2(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_384_avx2(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_sha_512_avx2(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_512_avx2(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_md5_avx2(MB_MGR_HMAC_MD5_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_md5_avx2(MB_MGR_HMAC_MD5_OOO *state); + +#endif /* IMB_ASM_AVX2_T1_H */ diff --git a/lib/include/arch_avx2_type2.h b/lib/include/arch_avx2_type2.h new file mode 100644 index 0000000000000000000000000000000000000000..47ec7d907b71f7fb331e596cecc4351b1b2fd4f6 --- /dev/null +++ b/lib/include/arch_avx2_type2.h @@ -0,0 +1,51 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* AVX2 + VAES + PCLMULQDQ */ + +#ifndef IMB_ASM_AVX2_T2_H +#define IMB_ASM_AVX2_T2_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* AES-ECB */ +void aes_ecb_enc_256_vaes_avx2(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_192_vaes_avx2(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_128_vaes_avx2(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_vaes_avx2(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_vaes_avx2(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_vaes_avx2(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +#endif /* IMB_ASM_AVX2_T2_H */ diff --git a/lib/include/arch_avx512_type1.h b/lib/include/arch_avx512_type1.h new file mode 100644 index 0000000000000000000000000000000000000000..523ba6d8055249765db91a0631821aa3d481d629 --- /dev/null +++ b/lib/include/arch_avx512_type1.h @@ -0,0 +1,161 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* AVX512 + AESNI + PCLMULQDQ */ + +#ifndef IMB_ASM_AVX512_T1_H +#define IMB_ASM_AVX512_T1_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +IMB_JOB *submit_job_des_cbc_enc_avx512(MB_MGR_DES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_des_cbc_enc_avx512(MB_MGR_DES_OOO *state); + +IMB_JOB *submit_job_des_cbc_dec_avx512(MB_MGR_DES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_des_cbc_dec_avx512(MB_MGR_DES_OOO *state); + +IMB_JOB *submit_job_3des_cbc_enc_avx512(MB_MGR_DES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_3des_cbc_enc_avx512(MB_MGR_DES_OOO *state); + +IMB_JOB *submit_job_3des_cbc_dec_avx512(MB_MGR_DES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_3des_cbc_dec_avx512(MB_MGR_DES_OOO *state); + +IMB_JOB *submit_job_docsis_des_enc_avx512(MB_MGR_DES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_docsis_des_enc_avx512(MB_MGR_DES_OOO *state); + +IMB_JOB *submit_job_docsis_des_dec_avx512(MB_MGR_DES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_docsis_des_dec_avx512(MB_MGR_DES_OOO *state); + +IMB_JOB *submit_job_zuc_eea3_no_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_no_gfni_avx512(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc_eia3_no_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_no_gfni_avx512(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_no_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc256_eea3_no_gfni_avx512(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eia3_no_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_no_gfni_avx512(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +IMB_JOB *submit_job_sha1_avx512(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha1_avx512(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha224_avx512(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha224_avx512(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha256_avx512(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha256_avx512(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha384_avx512(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha384_avx512(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha512_avx512(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha512_avx512(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_snow3g_uea2_avx512(MB_MGR_SNOW3G_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_snow3g_uea2_avx512(MB_MGR_SNOW3G_OOO *state); + +IMB_JOB *submit_job_snow3g_uia2_avx512(MB_MGR_SNOW3G_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_snow3g_uia2_avx512(MB_MGR_SNOW3G_OOO *state); + +void aes_cmac_256_subkey_gen_avx512(const void *key_exp, + void *key1, void *key2); + +IMB_JOB *submit_job_hmac_avx512(MB_MGR_HMAC_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_avx512(MB_MGR_HMAC_SHA_1_OOO *state); + +IMB_JOB *submit_job_hmac_sha_224_avx512(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_224_avx512(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_256_avx512(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_256_avx512(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_384_avx512(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_384_avx512(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_sha_512_avx512(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_512_avx512(MB_MGR_HMAC_SHA_512_OOO *state); + +void poly1305_mac_plain_avx512(IMB_JOB *job); + +IMB_JOB *submit_job_chacha20_enc_dec_avx512(IMB_JOB *job); + +void aes_docsis128_dec_crc32_avx512(IMB_JOB *job); +void aes_docsis256_dec_crc32_avx512(IMB_JOB *job); +IMB_JOB * +submit_job_aes_docsis128_enc_crc32_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job); +IMB_JOB * +flush_job_aes_docsis128_enc_crc32_avx512(MB_MGR_DOCSIS_AES_OOO *state); + +IMB_JOB * +submit_job_aes_docsis256_enc_crc32_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job); +IMB_JOB * +flush_job_aes_docsis256_enc_crc32_avx512(MB_MGR_DOCSIS_AES_OOO *state); + +/* SHA */ +void call_sha1_x16_avx512_from_c(SHA1_ARGS *args, uint32_t size_in_blocks); +void call_sha256_x16_avx512_from_c(SHA256_ARGS *args, uint32_t size_in_blocks); +void call_sha512_x8_avx512_from_c(SHA512_ARGS *args, uint64_t size_in_blocks); + +#endif /* IMB_ASM_AVX512_T1_H */ + + diff --git a/lib/include/arch_avx512_type2.h b/lib/include/arch_avx512_type2.h new file mode 100644 index 0000000000000000000000000000000000000000..c1170698c17bae39aa70a590c8c74e122117aebc --- /dev/null +++ b/lib/include/arch_avx512_type2.h @@ -0,0 +1,203 @@ +/******************************************************************************* + Copyright (c) 2012-2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* AVX512 + VAES + VPCLMULQDQ + GFNI + FMA */ + +#ifndef IMB_ASM_AVX512_T2_H +#define IMB_ASM_AVX512_T2_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* Define interface to base asm code */ + +/* AES-CBC */ +void aes_cbc_dec_128_vaes_avx512(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes); +void aes_cbc_dec_192_vaes_avx512(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes); +void aes_cbc_dec_256_vaes_avx512(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes); + +/* AES-CTR */ +void aes_cntr_128_submit_vaes_avx512(IMB_JOB *job); +void aes_cntr_192_submit_vaes_avx512(IMB_JOB *job); +void aes_cntr_256_submit_vaes_avx512(IMB_JOB *job); + +/* AES-CTR-BITLEN */ +void aes_cntr_bit_128_submit_vaes_avx512(IMB_JOB *job); +void aes_cntr_bit_192_submit_vaes_avx512(IMB_JOB *job); +void aes_cntr_bit_256_submit_vaes_avx512(IMB_JOB *job); + +/* AES-ECB */ +void aes_ecb_enc_256_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_192_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_128_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +/* AES-CBCS */ +void aes_cbcs_1_9_dec_128_vaes_avx512(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes, void *next_iv); + +/* moved from MB MGR */ + +IMB_JOB *submit_job_pon_enc_vaes_avx512(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_vaes_avx512(IMB_JOB *job); + +IMB_JOB *submit_job_pon_enc_no_ctr_vaes_avx512(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_no_ctr_vaes_avx512(IMB_JOB *job); + +IMB_JOB *submit_job_aes_xcbc_vaes_avx512(MB_MGR_AES_XCBC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes_xcbc_vaes_avx512(MB_MGR_AES_XCBC_OOO *state); + +IMB_JOB *submit_job_aes128_enc_vaes_avx512(MB_MGR_AES_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_enc_vaes_avx512(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes192_enc_vaes_avx512(MB_MGR_AES_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes192_enc_vaes_avx512(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes256_enc_vaes_avx512(MB_MGR_AES_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_enc_vaes_avx512(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_zuc_eea3_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_gfni_avx512(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc_eia3_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_gfni_avx512(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc256_eea3_gfni_avx512(MB_MGR_ZUC_OOO *state); + +IMB_JOB *aes_cntr_ccm_128_vaes_avx512(IMB_JOB *job); + +IMB_JOB *aes_cntr_ccm_256_vaes_avx512(IMB_JOB *job); + +IMB_JOB *submit_job_zuc256_eia3_gfni_avx512(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_gfni_avx512(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +IMB_JOB *submit_job_aes128_cbcs_1_9_enc_vaes_avx512(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_cbcs_1_9_enc_vaes_avx512(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_snow3g_uea2_vaes_avx512(MB_MGR_SNOW3G_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_snow3g_uea2_vaes_avx512(MB_MGR_SNOW3G_OOO *state); + +IMB_JOB *submit_job_snow3g_uia2_vaes_avx512(MB_MGR_SNOW3G_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_snow3g_uia2_vaes_avx512(MB_MGR_SNOW3G_OOO *state); + +IMB_JOB *submit_job_aes128_cmac_auth_vaes_avx512(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_cmac_auth_vaes_avx512(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes256_cmac_auth_vaes_avx512(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_cmac_auth_vaes_avx512(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes128_ccm_auth_vaes_avx512(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_ccm_auth_vaes_avx512(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_aes256_ccm_auth_vaes_avx512(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_ccm_auth_vaes_avx512(MB_MGR_CCM_OOO *state); + +void poly1305_mac_fma_avx512(IMB_JOB *job); + +uint32_t ethernet_fcs_avx512(const void *msg, const uint64_t len); +uint32_t ethernet_fcs_avx512_local(const void *msg, const uint64_t len, + const void *tag_ouput); +uint32_t crc16_x25_avx512(const void *msg, const uint64_t len); +uint32_t crc32_sctp_avx512(const void *msg, const uint64_t len); +uint32_t crc24_lte_a_avx512(const void *msg, const uint64_t len); +uint32_t crc24_lte_b_avx512(const void *msg, const uint64_t len); +uint32_t crc16_fp_data_avx512(const void *msg, const uint64_t len); +uint32_t crc11_fp_header_avx512(const void *msg, const uint64_t len); +uint32_t crc7_fp_header_avx512(const void *msg, const uint64_t len); +uint32_t crc10_iuup_data_avx512(const void *msg, const uint64_t len); +uint32_t crc6_iuup_header_avx512(const void *msg, const uint64_t len); +uint32_t crc32_wimax_ofdma_data_avx512(const void *msg, const uint64_t len); +uint32_t crc8_wimax_ofdma_hcs_avx512(const void *msg, const uint64_t len); + +void snow3g_f9_1_buffer_vaes_avx512(const snow3g_key_schedule_t *pHandle, + const void *pIV, + const void *pBufferIn, + const uint64_t lengthInBits, + void *pDigest); + + +void aes_docsis128_dec_crc32_vaes_avx512(IMB_JOB *job); +void aes_docsis256_dec_crc32_vaes_avx512(IMB_JOB *job); + +IMB_JOB * +submit_job_aes_docsis128_enc_crc32_vaes_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job); +IMB_JOB * +flush_job_aes_docsis128_enc_crc32_vaes_avx512(MB_MGR_DOCSIS_AES_OOO *state); + +IMB_JOB * +submit_job_aes_docsis256_enc_crc32_vaes_avx512(MB_MGR_DOCSIS_AES_OOO *state, + IMB_JOB *job); +IMB_JOB * +flush_job_aes_docsis256_enc_crc32_vaes_avx512(MB_MGR_DOCSIS_AES_OOO *state); + + +#endif /* IMB_ASM_AVX512_T2_H */ + diff --git a/lib/include/arch_avx_type1.h b/lib/include/arch_avx_type1.h new file mode 100644 index 0000000000000000000000000000000000000000..c0a8812fedd89ec26ea8cf7a3528cf0a96c1285c --- /dev/null +++ b/lib/include/arch_avx_type1.h @@ -0,0 +1,253 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* AVX + AESNI + PCLMULQDQ */ + +#ifndef IMB_ASM_AVX_T1_H +#define IMB_ASM_AVX_T1_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + + +/* AES-CBC */ +void aes_cbc_enc_128_x8(AES_ARGS *args, uint64_t len_in_bytes); +void aes_cbc_enc_192_x8(AES_ARGS *args, uint64_t len_in_bytes); +void aes_cbc_enc_256_x8(AES_ARGS *args, uint64_t len_in_bytes); + +void aes_cbc_dec_128_avx(const void *in, const uint8_t *IV, const void *keys, + void *out, uint64_t len_bytes); +void aes_cbc_dec_192_avx(const void *in, const uint8_t *IV, const void *keys, + void *out, uint64_t len_bytes); +void aes_cbc_dec_256_avx(const void *in, const uint8_t *IV, const void *keys, + void *out, uint64_t len_bytes); + +/* AES-CTR */ +void aes_cntr_256_avx(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); +void aes_cntr_192_avx(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); +void aes_cntr_128_avx(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); + +/* AES-CTR-BITLEN */ +void aes_cntr_bit_256_avx(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bits, uint64_t IV_len); +void aes_cntr_bit_192_avx(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bits, uint64_t IV_len); +void aes_cntr_bit_128_avx(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bits, uint64_t IV_len); + +/* AES-CCM */ +IMB_JOB *aes_cntr_ccm_128_avx(IMB_JOB *job); +IMB_JOB *aes_cntr_ccm_256_avx(IMB_JOB *job); + +/* AES-ECB */ +void aes_ecb_enc_256_avx(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_192_avx(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_128_avx(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_avx(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_avx(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_avx(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_vaes_avx512(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +/* AES128-ECBENC */ +void aes128_ecbenc_x3_avx(const void *in, void *keys, + void *out1, void *out2, void *out3); + +/* AES-CBCS */ +void aes_cbcs_1_9_dec_128_avx(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes, void *next_iv); + +/* stitched AES128-CNTR, CRC32 and BIP */ +IMB_JOB *submit_job_pon_enc_avx(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_avx(IMB_JOB *job); + +IMB_JOB *submit_job_pon_enc_no_ctr_avx(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_no_ctr_avx(IMB_JOB *job); + +/* moved from MB MGR */ +IMB_JOB *submit_job_aes128_enc_avx(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_enc_avx(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes192_enc_avx(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes192_enc_avx(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes256_enc_avx(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes256_enc_avx(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes_xcbc_avx(MB_MGR_AES_XCBC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes_xcbc_avx(MB_MGR_AES_XCBC_OOO *state); + +IMB_JOB *submit_job_aes_cntr_avx(IMB_JOB *job); + +IMB_JOB *submit_job_aes_cntr_bit_avx(IMB_JOB *job); + +IMB_JOB *submit_job_zuc_eea3_avx(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_avx(MB_MGR_ZUC_OOO *state); + +IMB_JOB *flush_job_zuc256_eea3_avx(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_avx(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_zuc_eia3_avx(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_avx(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eia3_avx(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_avx(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +IMB_JOB *submit_job_sha1_avx(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha1_avx(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha224_avx(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha224_avx(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha256_avx(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha256_avx(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha384_avx(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha384_avx(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha512_avx(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha512_avx(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +uint32_t hec_32_avx(const uint8_t *in); +uint64_t hec_64_avx(const uint8_t *in); + +IMB_JOB *submit_job_aes128_cbcs_1_9_enc_avx(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_cbcs_1_9_enc_avx(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_chacha20_enc_dec_avx(IMB_JOB *job); + +IMB_JOB *snow_v_avx(IMB_JOB *job); +IMB_JOB *snow_v_aead_init_avx(IMB_JOB *job); + +IMB_JOB *submit_job_hmac_avx(MB_MGR_HMAC_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_avx(MB_MGR_HMAC_SHA_1_OOO *state); + +IMB_JOB *submit_job_hmac_sha_224_avx(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_224_avx(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_256_avx(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_256_avx(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_384_avx(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_384_avx(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_sha_512_avx(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_512_avx(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_md5_avx(MB_MGR_HMAC_MD5_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_md5_avx(MB_MGR_HMAC_MD5_OOO *state); + +IMB_JOB *submit_job_aes128_cmac_auth_avx(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_cmac_auth_avx(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes256_cmac_auth_avx(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_cmac_auth_avx(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes128_ccm_auth_avx(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_ccm_auth_avx(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_aes256_ccm_auth_avx(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_ccm_auth_avx(MB_MGR_CCM_OOO *state); + +void aes_cmac_256_subkey_gen_avx(const void *key_exp, + void *key1, void *key2); + +void aes128_cbc_mac_x8(AES_ARGS *args, uint64_t len); + +uint32_t ethernet_fcs_avx(const void *msg, const uint64_t len); +uint32_t ethernet_fcs_avx_local(const void *msg, const uint64_t len, + const void *tag_ouput); +uint32_t crc16_x25_avx(const void *msg, const uint64_t len); +uint32_t crc32_sctp_avx(const void *msg, const uint64_t len); +uint32_t crc24_lte_a_avx(const void *msg, const uint64_t len); +uint32_t crc24_lte_b_avx(const void *msg, const uint64_t len); +uint32_t crc16_fp_data_avx(const void *msg, const uint64_t len); +uint32_t crc11_fp_header_avx(const void *msg, const uint64_t len); +uint32_t crc7_fp_header_avx(const void *msg, const uint64_t len); +uint32_t crc10_iuup_data_avx(const void *msg, const uint64_t len); +uint32_t crc6_iuup_header_avx(const void *msg, const uint64_t len); +uint32_t crc32_wimax_ofdma_data_avx(const void *msg, const uint64_t len); +uint32_t crc8_wimax_ofdma_hcs_avx(const void *msg, const uint64_t len); + +/* SHA */ +void call_sha1_mult_avx_from_c(SHA1_ARGS *args, uint32_t size_in_blocks); +void call_sha_256_mult_avx_from_c(SHA256_ARGS *args, uint32_t size_in_blocks); +void call_sha512_x2_avx_from_c(SHA512_ARGS *args, uint64_t size_in_blocks); + +#endif /* IMB_ASM_AVX_T1_H */ diff --git a/lib/include/arch_noaesni.h b/lib/include/arch_noaesni.h new file mode 100644 index 0000000000000000000000000000000000000000..7edb9d2c41dc3674ca4c8ed6acb5dbb60e5df98d --- /dev/null +++ b/lib/include/arch_noaesni.h @@ -0,0 +1,191 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* interface to asm routines */ + +#ifndef IMB_ARCH_NOAESNI_H +#define IMB_ARCH_NOAESNI_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* AES-CBC */ + +void aes_cbc_dec_128_sse_no_aesni(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes); +void aes_cbc_dec_192_sse_no_aesni(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes); +void aes_cbc_dec_256_sse_no_aesni(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes); + +/* AES-CTR */ +void aes_cntr_256_sse_no_aesni(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); +void aes_cntr_192_sse_no_aesni(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); +void aes_cntr_128_sse_no_aesni(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); + +/* AES-CTR-BITLEN */ +void aes_cntr_bit_256_sse_no_aesni(const void *in, const void *IV, + const void *keys, void *out, + uint64_t len_bits, uint64_t IV_len); +void aes_cntr_bit_192_sse_no_aesni(const void *in, const void *IV, + const void *keys, void *out, + uint64_t len_bits, uint64_t IV_len); +void aes_cntr_bit_128_sse_no_aesni(const void *in, const void *IV, + const void *keys, void *out, + uint64_t len_bits, uint64_t IV_len); + +/* AES-CCM */ +IMB_JOB *aes_cntr_ccm_128_sse_no_aesni(IMB_JOB *job); +IMB_JOB *aes_cntr_ccm_256_sse_no_aesni(IMB_JOB *job); + +/* AES-ECB */ +void aes_ecb_enc_256_sse_no_aesni(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_192_sse_no_aesni(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_128_sse_no_aesni(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_sse_no_aesni(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_sse_no_aesni(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_sse_no_aesni(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +/* AES128-ECBENC */ +void aes128_ecbenc_x3_sse_no_aesni(const void *in, void *keys, + void *out1, void *out2, void *out3); + +/* AES-CBCS */ +void aes_cbcs_1_9_dec_128_sse_no_aesni(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes, void *next_iv); + +/* stitched AES128-CNTR, CRC32 and BIP */ +IMB_JOB *submit_job_pon_enc_sse_no_aesni(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_sse_no_aesni(IMB_JOB *job); + +IMB_JOB *submit_job_pon_enc_no_ctr_sse_no_aesni(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_no_ctr_sse_no_aesni(IMB_JOB *job); + +/* moved from MB MGR */ +IMB_JOB *submit_job_aes128_enc_sse_no_aesni(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_enc_sse_no_aesni(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes192_enc_sse_no_aesni(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes192_enc_sse_no_aesni(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes256_enc_sse_no_aesni(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes256_enc_sse_no_aesni(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes_xcbc_sse_no_aesni(MB_MGR_AES_XCBC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes_xcbc_sse_no_aesni(MB_MGR_AES_XCBC_OOO *state); + +IMB_JOB *submit_job_aes128_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes256_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes256_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes128_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_aes256_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_aes_cntr_sse_no_aesni(IMB_JOB *job); + +IMB_JOB *submit_job_aes_cntr_bit_sse_no_aesni(IMB_JOB *job); + +IMB_JOB *submit_job_zuc_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc256_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +uint32_t hec_32_sse_no_aesni(const uint8_t *in); +uint64_t hec_64_sse_no_aesni(const uint8_t *in); + +IMB_JOB *submit_job_aes128_cbcs_1_9_enc_sse_no_aesni(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_cbcs_1_9_enc_sse_no_aesni(MB_MGR_AES_OOO *state); + +IMB_JOB *snow_v_sse_no_aesni(IMB_JOB *job); +IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); + +void aes128_cbc_mac_x4_no_aesni(AES_ARGS *args, uint64_t len); + +uint32_t ethernet_fcs_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc16_x25_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc32_sctp_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc24_lte_a_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc24_lte_b_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc16_fp_data_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc11_fp_header_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc7_fp_header_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc10_iuup_data_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc6_iuup_header_sse_no_aesni(const void *msg, const uint64_t len); +uint32_t crc32_wimax_ofdma_data_sse_no_aesni(const void *msg, + const uint64_t len); +uint32_t crc8_wimax_ofdma_hcs_sse_no_aesni(const void *msg, const uint64_t len); + +uint32_t +ethernet_fcs_sse_no_aesni_local(const void *msg, const uint64_t len, + const void *tag_ouput); + +#endif /* IMB_ARCH_NOAESNI_H */ + + diff --git a/lib/include/arch_sse_type1.h b/lib/include/arch_sse_type1.h new file mode 100644 index 0000000000000000000000000000000000000000..a74702c7dcd9057b5e3e612784a019d14a4c467c --- /dev/null +++ b/lib/include/arch_sse_type1.h @@ -0,0 +1,249 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* ARCH SSE TYPE 1: SSE4.2, AESNI, PCLMULQDQ, CMOV, BSWAP */ + +#ifndef IMB_ARCH_SSE_TYPE1_H +#define IMB_ARCH_SSE_TYPE1_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* AES-CBC */ +void aes_cbc_enc_128_x8(AES_ARGS *args, uint64_t len_in_bytes); +void aes_cbc_enc_192_x8(AES_ARGS *args, uint64_t len_in_bytes); +void aes_cbc_enc_256_x8(AES_ARGS *args, uint64_t len_in_bytes); + + +void aes_cbc_dec_128_sse(const void *in, const uint8_t *IV, const void *keys, + void *out, uint64_t len_bytes); +void aes_cbc_dec_192_sse(const void *in, const uint8_t *IV, const void *keys, + void *out, uint64_t len_bytes); +void aes_cbc_dec_256_sse(const void *in, const uint8_t *IV, const void *keys, + void *out, uint64_t len_bytes); + +/* AES-CTR */ +void aes_cntr_256_sse(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); +void aes_cntr_192_sse(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); +void aes_cntr_128_sse(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bytes, uint64_t IV_len); + +/* AES-CTR-BITLEN */ +void aes_cntr_bit_256_sse(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bits, uint64_t IV_len); +void aes_cntr_bit_192_sse(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bits, uint64_t IV_len); +void aes_cntr_bit_128_sse(const void *in, const void *IV, const void *keys, + void *out, uint64_t len_bits, uint64_t IV_len); + +/* AES-CCM */ +IMB_JOB *aes_cntr_ccm_128_sse(IMB_JOB *job); + +IMB_JOB *aes_cntr_ccm_256_sse(IMB_JOB *job); + +/* AES-ECB */ +void aes_ecb_enc_256_by4_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_192_by4_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_128_by4_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_by4_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_by4_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_by4_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +/* AES128-ECBENC */ +void aes128_ecbenc_x3_sse(const void *in, void *keys, + void *out1, void *out2, void *out3); + +/* AES-CBCS */ +void aes_cbcs_1_9_dec_128_sse(const void *in, const uint8_t *IV, + const void *keys, void *out, + uint64_t len_bytes, void *next_iv); + +/* stitched AES128-CNTR, CRC32 and BIP */ +IMB_JOB *submit_job_pon_enc_sse(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_sse(IMB_JOB *job); + +IMB_JOB *submit_job_pon_enc_no_ctr_sse(IMB_JOB *job); +IMB_JOB *submit_job_pon_dec_no_ctr_sse(IMB_JOB *job); + +/* CRC */ +uint32_t ethernet_fcs_sse(const void *msg, const uint64_t len); +uint32_t crc16_x25_sse(const void *msg, const uint64_t len); +uint32_t crc32_sctp_sse(const void *msg, const uint64_t len); +uint32_t crc24_lte_a_sse(const void *msg, const uint64_t len); +uint32_t crc24_lte_b_sse(const void *msg, const uint64_t len); +uint32_t crc16_fp_data_sse(const void *msg, const uint64_t len); +uint32_t crc11_fp_header_sse(const void *msg, const uint64_t len); +uint32_t crc7_fp_header_sse(const void *msg, const uint64_t len); +uint32_t crc10_iuup_data_sse(const void *msg, const uint64_t len); +uint32_t crc6_iuup_header_sse(const void *msg, const uint64_t len); +uint32_t crc32_wimax_ofdma_data_sse(const void *msg, const uint64_t len); +uint32_t crc8_wimax_ofdma_hcs_sse(const void *msg, const uint64_t len); +uint32_t ethernet_fcs_sse_local(const void *msg, const uint64_t len, + const void *tag_ouput); + +/* SHA */ +void call_sha1_mult_sse_from_c(SHA1_ARGS *args, uint32_t size_in_blocks); +void call_sha_256_mult_sse_from_c(SHA256_ARGS *args, uint32_t size_in_blocks); +void call_sha512_x2_sse_from_c(SHA512_ARGS *args, uint64_t size_in_blocks); + +/* moved from MB MGR */ +IMB_JOB *submit_job_aes128_enc_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_enc_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes192_enc_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes192_enc_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes256_enc_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes256_enc_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_hmac_sse(MB_MGR_HMAC_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sse(MB_MGR_HMAC_SHA_1_OOO *state); + +IMB_JOB *submit_job_hmac_sha_224_sse(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_224_sse(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_256_sse(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_256_sse(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_384_sse(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_384_sse(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_sha_512_sse(MB_MGR_HMAC_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_512_sse(MB_MGR_HMAC_SHA_512_OOO *state); + +IMB_JOB *submit_job_hmac_md5_sse(MB_MGR_HMAC_MD5_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_md5_sse(MB_MGR_HMAC_MD5_OOO *state); + + +IMB_JOB *submit_job_aes_xcbc_sse(MB_MGR_AES_XCBC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes_xcbc_sse(MB_MGR_AES_XCBC_OOO *state); + +IMB_JOB *submit_job_aes128_cmac_auth_sse(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_cmac_auth_sse(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes256_cmac_auth_sse(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_cmac_auth_sse(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes128_ccm_auth_sse(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_aes256_ccm_auth_sse(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_ccm_auth_sse(MB_MGR_CCM_OOO *state); + +IMB_JOB *flush_job_aes256_ccm_auth_sse(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_zuc_eea3_no_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_no_gfni_sse(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_no_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc256_eea3_no_gfni_sse(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc_eia3_no_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_no_gfni_sse(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eia3_no_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_no_gfni_sse(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +IMB_JOB *submit_job_sha1_sse(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha1_sse(MB_MGR_SHA_1_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha224_sse(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha224_sse(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha256_sse(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha256_sse(MB_MGR_SHA_256_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha384_sse(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha384_sse(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +IMB_JOB *submit_job_sha512_sse(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_sha512_sse(MB_MGR_SHA_512_OOO *state, + IMB_JOB *job); + +void aes_cmac_256_subkey_gen_sse(const void *key_exp, + void *key1, void *key2); +uint32_t hec_32_sse(const uint8_t *in); +uint64_t hec_64_sse(const uint8_t *in); + +IMB_JOB *submit_job_aes128_cbcs_1_9_enc_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_cbcs_1_9_enc_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_chacha20_enc_dec_sse(IMB_JOB *job); + +IMB_JOB *snow_v_sse(IMB_JOB *job); +IMB_JOB *snow_v_aead_init_sse(IMB_JOB *job); + +IMB_JOB *submit_job_snow3g_uea2_sse(MB_MGR_SNOW3G_OOO *state, IMB_JOB *job); +IMB_JOB *flush_job_snow3g_uea2_sse(MB_MGR_SNOW3G_OOO *state); + +IMB_JOB *submit_job_snow3g_uia2_sse(MB_MGR_SNOW3G_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_snow3g_uia2_sse(MB_MGR_SNOW3G_OOO *state); + +#endif /* IMB_ARCH_SSE_TYPE1_H */ diff --git a/lib/include/arch_sse_type2.h b/lib/include/arch_sse_type2.h new file mode 100644 index 0000000000000000000000000000000000000000..7910132b5071a3346d9d62ae34e79705539b9bb3 --- /dev/null +++ b/lib/include/arch_sse_type2.h @@ -0,0 +1,67 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* ARCH SSE TYPE 2: SSE4.2, AESNI, PCLMULQDQ, CMOV, BSWAP, SHANI */ + +#ifndef IMB_ARCH_SSE_TYPE2_H +#define IMB_ARCH_SSE_TYPE2_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* SHA */ +void call_sha1_ni_x2_sse_from_c(SHA1_ARGS *args, uint32_t size_in_blocks); +void call_sha224_ni_x2_sse_from_c(SHA256_ARGS *args, uint32_t size_in_blocks); +void call_sha256_ni_x2_sse_from_c(SHA256_ARGS *args, uint32_t size_in_blocks); + +/* Moved from MB MGR */ + +IMB_JOB *submit_job_hmac_ni_sse(MB_MGR_HMAC_SHA_1_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_ni_sse(MB_MGR_HMAC_SHA_1_OOO *state); + +IMB_JOB *submit_job_hmac_sha_224_ni_sse(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_224_ni_sse(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_hmac_sha_256_ni_sse(MB_MGR_HMAC_SHA_256_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_hmac_sha_256_ni_sse(MB_MGR_HMAC_SHA_256_OOO *state); + +IMB_JOB *submit_job_sha1_ni_sse(MB_MGR_SHA_1_OOO *state, IMB_JOB *job); + +IMB_JOB *flush_job_sha1_ni_sse(MB_MGR_SHA_1_OOO *state, IMB_JOB *job); + +IMB_JOB *submit_job_sha224_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job); + +IMB_JOB *flush_job_sha224_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job); + +IMB_JOB *submit_job_sha256_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job); + +IMB_JOB *flush_job_sha256_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job); + +#endif /* IMB_ARCH_SSE_TYPE2_H */ diff --git a/lib/include/arch_sse_type3.h b/lib/include/arch_sse_type3.h new file mode 100644 index 0000000000000000000000000000000000000000..0eaae1dd1eb18b95c92fe0b0e85ba13e99dd0c22 --- /dev/null +++ b/lib/include/arch_sse_type3.h @@ -0,0 +1,115 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +/* ARCH SSE TYPE 3: SSE4.2, AESNI, PCLMULQDQ, CMOV, BSWAP, SHANI, GFNI */ + +#ifndef IMB_ASM_SSE_T3_H +#define IMB_ASM_SSE_T3_H + +#include "ipsec-mb.h" +#include "ipsec_ooo_mgr.h" + +/* AES-CBC */ +void aes_cbc_enc_128_x8_sse(AES_ARGS *args, uint64_t len_in_bytes); +void aes_cbc_enc_192_x8_sse(AES_ARGS *args, uint64_t len_in_bytes); +void aes_cbc_enc_256_x8_sse(AES_ARGS *args, uint64_t len_in_bytes); + +void aes_cbc_dec_128_by8_sse(const void *in, const uint8_t *IV, + const void *keys, void *out, uint64_t len_bytes); +void aes_cbc_dec_192_by8_sse(const void *in, const uint8_t *IV, + const void *keys, void *out, uint64_t len_bytes); +void aes_cbc_dec_256_by8_sse(const void *in, const uint8_t *IV, + const void *keys, void *out, uint64_t len_bytes); + +/* AES-ECB */ +void aes_ecb_enc_256_by8_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_192_by8_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_enc_128_by8_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +void aes_ecb_dec_256_by8_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_192_by8_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); +void aes_ecb_dec_128_by8_sse(const void *in, const void *keys, + void *out, uint64_t len_bytes); + +/* moved from MB MGR */ + +IMB_JOB *submit_job_aes128_enc_x8_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes128_enc_x8_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes192_enc_x8_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes192_enc_x8_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes256_enc_x8_sse(MB_MGR_AES_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_aes256_enc_x8_sse(MB_MGR_AES_OOO *state); + +IMB_JOB *submit_job_aes128_cmac_auth_x8_sse(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_cmac_auth_x8_sse(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes256_cmac_auth_x8_sse(MB_MGR_CMAC_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_cmac_auth_x8_sse(MB_MGR_CMAC_OOO *state); + +IMB_JOB *submit_job_aes128_ccm_auth_x8_sse(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes128_ccm_auth_x8_sse(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_aes256_ccm_auth_x8_sse(MB_MGR_CCM_OOO *state, + IMB_JOB *job); + +IMB_JOB *flush_job_aes256_ccm_auth_x8_sse(MB_MGR_CCM_OOO *state); + +IMB_JOB *submit_job_zuc_eea3_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eea3_gfni_sse(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eea3_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc256_eea3_gfni_sse(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc_eia3_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job); +IMB_JOB *flush_job_zuc_eia3_gfni_sse(MB_MGR_ZUC_OOO *state); + +IMB_JOB *submit_job_zuc256_eia3_gfni_sse(MB_MGR_ZUC_OOO *state, + IMB_JOB *job, + const uint64_t tag_sz); +IMB_JOB *flush_job_zuc256_eia3_gfni_sse(MB_MGR_ZUC_OOO *state, + const uint64_t tag_sz); + +#endif /* IMB_ASM_SSE_T3_H */ diff --git a/lib/include/arch_x86_64.h b/lib/include/arch_x86_64.h new file mode 100644 index 0000000000000000000000000000000000000000..bb21f7feaf46148b7816fba92ec9df18e4a5a5fd --- /dev/null +++ b/lib/include/arch_x86_64.h @@ -0,0 +1,135 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef IMB_ARCH_X86_64_H +#define IMB_ARCH_X86_64_H + +#include "ipsec-mb.h" + +IMB_DLL_LOCAL void *poly1305_mac_scalar(IMB_JOB *job); +IMB_DLL_LOCAL void +poly1305_aead_update_scalar(const void *msg, const uint64_t msg_len, + void *hash, const void *key); +IMB_DLL_LOCAL void +poly1305_aead_complete_scalar(const void *hash, const void *key, void *tag); + +/** + * @brief DES CBC encryption + * + * @param input source buffer with plain text + * @param output destination buffer for cipher text + * @param size number of bytes to encrypt (multiple of 8) + * @param ks pointer to key schedule structure + * @param ivec pointer to initialization vector + */ +IMB_DLL_LOCAL +void des_enc_cbc_basic(const void *input, void *output, const int size, + const uint64_t *ks, const uint64_t *ivec); + +/** + * @brief DES CBC decryption + * + * @param input source buffer with cipher text + * @param output destination buffer for plain text + * @param size number of bytes to decrypt (multiple of 8) + * @param ks pointer to key schedule structure + * @param ivec pointer to initialization vector + */ +IMB_DLL_LOCAL +void des_dec_cbc_basic(const void *input, void *output, const int size, + const uint64_t *ks, const uint64_t *ivec); + +/** + * @brief 3DES CBC encryption + * + * @param input source buffer with plain text + * @param output destination buffer for cipher text + * @param size number of bytes to encrypt (multiple of 8) + * @param ks1 pointer to key schedule 1 structure + * @param ks2 pointer to key schedule 2 structure + * @param ks3 pointer to key schedule 3 structure + * @param ivec pointer to initialization vector + */ +IMB_DLL_LOCAL +void des3_enc_cbc_basic(const void *input, void *output, const int size, + const uint64_t *ks1, const uint64_t *ks2, + const uint64_t *ks3, const uint64_t *ivec); + +/** + * @brief 3DES CBC decryption + * + * @param input source buffer with cipher text + * @param output destination buffer for plain text + * @param size number of bytes to decrypt (multiple of 8) + * @param ks1 pointer to key schedule 1 structure + * @param ks2 pointer to key schedule 2 structure + * @param ks3 pointer to key schedule 3 structure + * @param ivec pointer to initialization vector + */ +IMB_DLL_LOCAL +void des3_dec_cbc_basic(const void *input, void *output, const int size, + const uint64_t *ks1, const uint64_t *ks2, + const uint64_t *ks3, const uint64_t *ivec); + +/** + * @brief DOCSIS DES encryption + * + * @param input source buffer with plain text + * @param output destination buffer for cipher text + * @param size number of bytes to encrypt + * @param ks pointer to key schedule structure + * @param ivec pointer to initialization vector + */ +IMB_DLL_LOCAL +void docsis_des_enc_basic(const void *input, void *output, const int size, + const uint64_t *ks, const uint64_t *ivec); + +/** + * @brief DOCSIS DES decryption + * + * @param input source buffer with cipher text + * @param output destination buffer for plain text + * @param size number of bytes to decrypt + * @param ks pointer to key schedule structure + * @param ivec pointer to initialization vector + */ +IMB_DLL_LOCAL +void docsis_des_dec_basic(const void *input, void *output, const int size, + const uint64_t *ks, const uint64_t *ivec); + +/** + * @brief Runs self test on selected CAVP algorithms + * + * @param p_mgr initialized MB manager structure + * + * @return Self test status + * @retval 0 self test failed + * @retval 1 self test passed + */ +IMB_DLL_LOCAL int self_test(IMB_MGR *p_mgr); + +#endif /* IMB_ARCH_X86_64_H */ diff --git a/lib/include/cet.inc b/lib/include/cet.inc index c29730d11626c2e168bb0e7115eccee7e1894ac7..b8e9dde853919e5a73cabb468bb18a86468bbd87 100644 --- a/lib/include/cet.inc +++ b/lib/include/cet.inc @@ -29,7 +29,10 @@ %define _CET_INC_ %macro endbranch64 0 -db 0xf3, 0x0f, 0x1e, 0xfa + ;; generate enbranch64 opcode only for Linux/FreeBSD +%ifdef LINUX + db 0xf3, 0x0f, 0x1e, 0xfa +%endif %endmacro %endif ; _CET_INC_ diff --git a/lib/include/chacha20_poly1305.h b/lib/include/chacha20_poly1305.h index 1804c8531d9b49394b409a1096d040d47a65c9f3..233689c6e7745b9a5685eb0f729c6eea8f9a8d66 100644 --- a/lib/include/chacha20_poly1305.h +++ b/lib/include/chacha20_poly1305.h @@ -29,6 +29,7 @@ #define IMB_CHACHA20POLY1305_H #include "ipsec-mb.h" +#include "include/arch_x86_64.h" /* new internal API's */ IMB_JOB *aead_chacha20_poly1305_sse(IMB_MGR *mgr, IMB_JOB *job); @@ -68,11 +69,6 @@ IMB_JOB *submit_job_chacha20_poly_enc_sse(IMB_JOB *, void *poly_key); void poly1305_key_gen_sse(const void *key, const void *iv, void *poly_key); void poly1305_key_gen_avx(const void *key, const void *iv, void *poly_key); -void poly1305_aead_update_scalar(const void *msg, const uint64_t msg_len, - void *hash, const void *key); -void poly1305_aead_complete_scalar(const void *hash, const void *key, - void *tag); - void poly1305_aead_update_avx512(const void *msg, const uint64_t msg_len, void *hash, const void *key); void poly1305_aead_complete_avx512(const void *hash, const void *key, diff --git a/lib/include/clear_regs.asm b/lib/include/clear_regs.asm index 8461aa0305bf5150a25e3f1c058de75f6744e8b0..3df53f47a59d425f3f4f0413ec6e3a61dc39187f 100644 --- a/lib/include/clear_regs.asm +++ b/lib/include/clear_regs.asm @@ -170,7 +170,7 @@ %assign i (i+1) %endrep %endif ; LINUX - vzeroupper + vzeroupper %endmacro ; diff --git a/lib/include/des.h b/lib/include/des.h index f55509ddddff2cb6179dd37c1d6c5e614d60d3ac..f8944b3aff54d9a22ceac1d18c627c544273f10c 100644 --- a/lib/include/des.h +++ b/lib/include/des.h @@ -29,84 +29,7 @@ #define IMB_DES_H #include - -/** - * @brief DES CBC encryption - * - * @param input source buffer with plain text - * @param output destination buffer for cipher text - * @param size number of bytes to encrypt (multiple of 8) - * @param ks pointer to key schedule structure - * @param ivec pointer to initialization vector - */ -void des_enc_cbc_basic(const void *input, void *output, const int size, - const uint64_t *ks, const uint64_t *ivec); - -/** - * @brief DES CBC decryption - * - * @param input source buffer with cipher text - * @param output destination buffer for plain text - * @param size number of bytes to decrypt (multiple of 8) - * @param ks pointer to key schedule structure - * @param ivec pointer to initialization vector - */ -void des_dec_cbc_basic(const void *input, void *output, const int size, - const uint64_t *ks, const uint64_t *ivec); - -/** - * @brief 3DES CBC encryption - * - * @param input source buffer with plain text - * @param output destination buffer for cipher text - * @param size number of bytes to encrypt (multiple of 8) - * @param ks1 pointer to key schedule 1 structure - * @param ks2 pointer to key schedule 2 structure - * @param ks3 pointer to key schedule 3 structure - * @param ivec pointer to initialization vector - */ -void des3_enc_cbc_basic(const void *input, void *output, const int size, - const uint64_t *ks1, const uint64_t *ks2, - const uint64_t *ks3, const uint64_t *ivec); - -/** - * @brief 3DES CBC decryption - * - * @param input source buffer with cipher text - * @param output destination buffer for plain text - * @param size number of bytes to decrypt (multiple of 8) - * @param ks1 pointer to key schedule 1 structure - * @param ks2 pointer to key schedule 2 structure - * @param ks3 pointer to key schedule 3 structure - * @param ivec pointer to initialization vector - */ -void des3_dec_cbc_basic(const void *input, void *output, const int size, - const uint64_t *ks1, const uint64_t *ks2, - const uint64_t *ks3, const uint64_t *ivec); - -/** - * @brief DOCSIS DES encryption - * - * @param input source buffer with plain text - * @param output destination buffer for cipher text - * @param size number of bytes to encrypt - * @param ks pointer to key schedule structure - * @param ivec pointer to initialization vector - */ -void docsis_des_enc_basic(const void *input, void *output, const int size, - const uint64_t *ks, const uint64_t *ivec); - -/** - * @brief DOCSIS DES decryption - * - * @param input source buffer with cipher text - * @param output destination buffer for plain text - * @param size number of bytes to decrypt - * @param ks pointer to key schedule structure - * @param ivec pointer to initialization vector - */ -void docsis_des_dec_basic(const void *input, void *output, const int size, - const uint64_t *ks, const uint64_t *ivec); +#include "arch_x86_64.h" /* ========================================================================= */ /* DES and 3DES inline function for use in mb_mgr_code.h */ diff --git a/lib/include/docsis_common.h b/lib/include/docsis_common.h index b92dc6c737af3c55c2d11bde68c184b5752f17d7..ca57ab9539db7fe905fa182e435b8d2cea0979ad 100644 --- a/lib/include/docsis_common.h +++ b/lib/include/docsis_common.h @@ -31,9 +31,9 @@ * JOB submit and flush helper functions to be used from mb_mgr_code.h * * @note These need to be defined prior to including this file: - * ETHERNET_FCS, AES_CFB_ONE, SUBMIT_JOB_AES128_DEC and - * SUBMIT_JOB_AES128_ENC, SUBMIT_JOB_AES256_DEC and - * SUBMIT_JOB_AES256_DEC. + * ETHERNET_FCS, AES_CFB_ONE, SUBMIT_JOB_AES_CBC_128_DEC and + * SUBMIT_JOB_AES_CBC_128_ENC, SUBMIT_JOB_AES_CBC_256_ENC and + * SUBMIT_JOB_AES_CBC_256_DEC. * * @note The file defines the following: * DOCSIS_LAST_BLOCK, DOCSIS_FIRST_BLOCK, @@ -180,17 +180,17 @@ SUBMIT_JOB_DOCSIS_SEC_ENC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, if (key_size == 16) { if (job->msg_len_to_cipher_in_bytes >= IMB_AES_BLOCK_SIZE) { - tmp = SUBMIT_JOB_AES128_ENC((MB_MGR_AES_OOO *)state, - job); + MB_MGR_AES_OOO *aes_mgr = (MB_MGR_AES_OOO *)state; + tmp = SUBMIT_JOB_AES_CBC_128_ENC(aes_mgr, job); return DOCSIS_LAST_BLOCK(tmp, 16); } else return DOCSIS_FIRST_BLOCK(job, 16); } else { /* Key length = 32 */ if (job->msg_len_to_cipher_in_bytes >= IMB_AES_BLOCK_SIZE) { - tmp = SUBMIT_JOB_AES256_ENC((MB_MGR_AES_OOO *)state, - job); + MB_MGR_AES_OOO *aes_mgr = (MB_MGR_AES_OOO *)state; + tmp = SUBMIT_JOB_AES_CBC_256_ENC(aes_mgr, job); return DOCSIS_LAST_BLOCK(tmp, 32); } else return DOCSIS_FIRST_BLOCK(job, 32); @@ -225,11 +225,11 @@ FLUSH_JOB_DOCSIS_SEC_ENC(MB_MGR_DOCSIS_AES_OOO *state, const uint64_t key_size) IMB_JOB *tmp; if (key_size == 16) { - tmp = FLUSH_JOB_AES128_ENC((MB_MGR_AES_OOO *)state); + tmp = FLUSH_JOB_AES_CBC_128_ENC((MB_MGR_AES_OOO *)state); return DOCSIS_LAST_BLOCK(tmp, 16); } else { /* 32 */ - tmp = FLUSH_JOB_AES256_ENC((MB_MGR_AES_OOO *)state); + tmp = FLUSH_JOB_AES_CBC_256_ENC((MB_MGR_AES_OOO *)state); return DOCSIS_LAST_BLOCK(tmp, 32); } @@ -266,13 +266,13 @@ SUBMIT_JOB_DOCSIS_SEC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, if (key_size == 16) { if (job->msg_len_to_cipher_in_bytes >= IMB_AES_BLOCK_SIZE) { DOCSIS_LAST_BLOCK(job, 16); - return SUBMIT_JOB_AES128_DEC(job); + return SUBMIT_JOB_AES_CBC_128_DEC(job); } else return DOCSIS_FIRST_BLOCK(job, 16); } else { /* 32 */ if (job->msg_len_to_cipher_in_bytes >= IMB_AES_BLOCK_SIZE) { DOCSIS_LAST_BLOCK(job, 32); - return SUBMIT_JOB_AES256_DEC(job); + return SUBMIT_JOB_AES_CBC_256_DEC(job); } else return DOCSIS_FIRST_BLOCK(job, 32); } @@ -367,9 +367,9 @@ SUBMIT_JOB_DOCSIS_SEC_CRC_DEC(MB_MGR_DOCSIS_AES_OOO *state, IMB_JOB *job, if (job->msg_len_to_cipher_in_bytes >= IMB_AES_BLOCK_SIZE) { DOCSIS_LAST_BLOCK(job, key_size); if (key_size == 16) - job = SUBMIT_JOB_AES128_DEC(job); + job = SUBMIT_JOB_AES_CBC_128_DEC(job); else /* 32 */ - job = SUBMIT_JOB_AES256_DEC(job); + job = SUBMIT_JOB_AES_CBC_256_DEC(job); } else { job = DOCSIS_FIRST_BLOCK(job, key_size); } diff --git a/lib/include/error.inc b/lib/include/error.inc index 65c8c9f8c51e5c59b466230db29d79f20bfd7159..54df265f764d2cd6d5bb192920da1827134ea00a 100644 --- a/lib/include/error.inc +++ b/lib/include/error.inc @@ -89,7 +89,16 @@ SET_ERRNO_TYPES \ IMB_ERR_JOB_NULL_HMAC_IPAD, \ IMB_ERR_JOB_NULL_XCBC_K1_EXP, \ IMB_ERR_JOB_NULL_XCBC_K2, \ - IMB_ERR_JOB_NULL_XCBC_K3 + IMB_ERR_JOB_NULL_XCBC_K3, \ + IMB_ERR_JOB_CIPH_DIR, \ + IMB_ERR_JOB_NULL_GHASH_INIT_TAG, \ + IMB_ERR_MISSING_CPUFLAGS_INIT_MGR, \ + IMB_ERR_NULL_JOB, \ + IMB_ERR_QUEUE_SPACE, \ + IMB_ERR_NULL_BURST, \ + IMB_ERR_BURST_SIZE, \ + IMB_ERR_BURST_OOO, \ + IMB_ERR_SELFTEST ;; Reset global imb_errno to 0 %macro IMB_ERR_CHECK_RESET 0 diff --git a/lib/include/gcm.h b/lib/include/gcm.h index 154f7ed07c134328cbcfd6811188be9f2743415f..82ba93b481e4ba4254f2b4333c9a7d0a6274d5d3 100644 --- a/lib/include/gcm.h +++ b/lib/include/gcm.h @@ -844,25 +844,25 @@ aes_gcm_dec_var_iv_256_sse_no_aesni(const struct gcm_key_data *key_data, */ IMB_DLL_EXPORT void -ghash_sse_no_aesni(struct gcm_key_data *key_data, const void *in, +ghash_sse_no_aesni(const struct gcm_key_data *key_data, const void *in, const uint64_t in_len, void *io_tag, const uint64_t tag_len); IMB_DLL_EXPORT void -ghash_sse(struct gcm_key_data *key_data, const void *in, +ghash_sse(const struct gcm_key_data *key_data, const void *in, const uint64_t in_len, void *io_tag, const uint64_t tag_len); IMB_DLL_EXPORT void -ghash_avx_gen2(struct gcm_key_data *key_data, const void *in, +ghash_avx_gen2(const struct gcm_key_data *key_data, const void *in, const uint64_t in_len, void *io_tag, const uint64_t tag_len); IMB_DLL_EXPORT void -ghash_avx_gen4(struct gcm_key_data *key_data, const void *in, +ghash_avx_gen4(const struct gcm_key_data *key_data, const void *in, const uint64_t in_len, void *io_tag, const uint64_t tag_len); IMB_DLL_EXPORT void -ghash_avx512(struct gcm_key_data *key_data, const void *in, +ghash_avx512(const struct gcm_key_data *key_data, const void *in, const uint64_t in_len, void *io_tag, const uint64_t tag_len); IMB_DLL_EXPORT void -ghash_vaes_avx512(struct gcm_key_data *key_data, const void *in, +ghash_vaes_avx512(const struct gcm_key_data *key_data, const void *in, const uint64_t in_len, void *io_tag, const uint64_t tag_len); diff --git a/lib/include/ipsec_ooo_mgr.h b/lib/include/ipsec_ooo_mgr.h index f5a8334041a12ff7c211a2d54d310a2bd4cbeeed..982aaee8dc8706ff035027be1c8fead67fcffeff 100644 --- a/lib/include/ipsec_ooo_mgr.h +++ b/lib/include/ipsec_ooo_mgr.h @@ -114,17 +114,17 @@ typedef struct { typedef struct { DECLARE_ALIGNED(uint32_t digest[SHA1_DIGEST_SZ], 32); - uint8_t *data_ptr[AVX512_NUM_SHA1_LANES]; + const uint8_t *data_ptr[AVX512_NUM_SHA1_LANES]; } SHA1_ARGS; typedef struct { DECLARE_ALIGNED(uint32_t digest[SHA256_DIGEST_SZ], 32); - uint8_t *data_ptr[AVX512_NUM_SHA256_LANES]; + const uint8_t *data_ptr[AVX512_NUM_SHA256_LANES]; } SHA256_ARGS; typedef struct { DECLARE_ALIGNED(uint64_t digest[SHA512_DIGEST_SZ], 32); - uint8_t *data_ptr[AVX512_NUM_SHA512_LANES]; + const uint8_t *data_ptr[AVX512_NUM_SHA512_LANES]; } SHA512_ARGS; typedef struct { @@ -155,7 +155,7 @@ typedef struct { DECLARE_ALIGNED(uint8_t *out[16], 64); const uint8_t *keys[16]; DECLARE_ALIGNED(uint8_t iv[16*32], 32); - DECLARE_ALIGNED(uint32_t digest[16], 64); + DECLARE_ALIGNED(uint32_t digest[16*4], 64); /* Memory for 128 bytes of KS for 16 buffers */ DECLARE_ALIGNED(uint32_t ks[16 * 2 * 16], 64); } ZUC_ARGS_x16; @@ -338,6 +338,15 @@ typedef struct { uint64_t road_block; } MB_MGR_HMAC_SHA_1_OOO; +typedef struct { + SHA1_ARGS args; + DECLARE_ALIGNED(uint64_t lens[AVX512_NUM_SHA1_LANES], 32); + uint64_t unused_lanes; + HMAC_SHA1_LANE_DATA ldata[AVX512_NUM_SHA1_LANES]; + uint32_t num_lanes_inuse; + uint64_t road_block; +} MB_MGR_SHA_1_OOO; + typedef struct { SHA256_ARGS args; DECLARE_ALIGNED(uint16_t lens[16], 16); @@ -347,6 +356,15 @@ typedef struct { uint64_t road_block; } MB_MGR_HMAC_SHA_256_OOO; +typedef struct { + SHA256_ARGS args; + DECLARE_ALIGNED(uint64_t lens[AVX512_NUM_SHA256_LANES], 16); + uint64_t unused_lanes; + HMAC_SHA1_LANE_DATA ldata[AVX512_NUM_SHA256_LANES]; + uint32_t num_lanes_inuse; + uint64_t road_block; +} MB_MGR_SHA_256_OOO; + typedef struct { SHA512_ARGS args; DECLARE_ALIGNED(uint16_t lens[8], 16); @@ -355,6 +373,15 @@ typedef struct { uint64_t road_block; } MB_MGR_HMAC_SHA_512_OOO; +typedef struct { + SHA512_ARGS args; + DECLARE_ALIGNED(uint64_t lens[AVX512_NUM_SHA512_LANES], 16); + uint64_t unused_lanes; + HMAC_SHA512_LANE_DATA ldata[AVX512_NUM_SHA512_LANES]; + uint32_t num_lanes_inuse; + uint64_t road_block; +} MB_MGR_SHA_512_OOO; + /* MD5-HMAC out-of-order scheduler fields */ typedef struct { MD5_ARGS args; @@ -389,11 +416,370 @@ init_mb_mgr_sse_no_aesni_internal(IMB_MGR *state, const int reset_mgrs); IMB_DLL_LOCAL void init_mb_mgr_sse_internal(IMB_MGR *state, const int reset_mgrs); IMB_DLL_LOCAL void +init_mb_mgr_sse_t1_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void +init_mb_mgr_sse_t2_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void +init_mb_mgr_sse_t3_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void init_mb_mgr_avx_internal(IMB_MGR *state, const int reset_mgrs); IMB_DLL_LOCAL void +init_mb_mgr_avx_t1_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void +init_mb_mgr_avx_t2_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void init_mb_mgr_avx2_internal(IMB_MGR *state, const int reset_mgrs); IMB_DLL_LOCAL void +init_mb_mgr_avx2_t1_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void +init_mb_mgr_avx2_t2_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void init_mb_mgr_avx512_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void +init_mb_mgr_avx512_t1_internal(IMB_MGR *state, const int reset_mgrs); +IMB_DLL_LOCAL void +init_mb_mgr_avx512_t2_internal(IMB_MGR *state, const int reset_mgrs); + +IMB_DLL_EXPORT uint32_t +get_next_burst_sse_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_sse_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_sse_t3(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_avx_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_avx_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_avx2_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_avx2_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_avx512_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +get_next_burst_avx512_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); + +IMB_DLL_EXPORT uint32_t +submit_burst_sse_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_sse_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_sse_t3(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_avx_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_avx_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_avx2_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_avx2_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_avx512_t1(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_avx512_t2(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs); + +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_sse_t1(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_sse_t2(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_sse_t3(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_avx_t1(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_avx_t2(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_avx2_t1(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_avx2_t2(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_avx512_t1(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_avx512_t2(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); + +IMB_DLL_EXPORT uint32_t +flush_burst_sse_t1(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_sse_t2(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_sse_t3(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_avx_t1(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_avx_t2(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_avx2_t1(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_avx2_t2(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_avx512_t1(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_avx512_t2(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs); + +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_sse_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_sse_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_sse_t3(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_avx_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_avx_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_avx2_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_avx2_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_avx512_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_avx512_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); + +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_sse_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_sse_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_sse_t3(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_avx_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_avx_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_avx2_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_avx2_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_avx512_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_avx512_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); + +IMB_DLL_EXPORT uint32_t +submit_hash_burst_sse_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_sse_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_sse_t3(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_avx_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_avx_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_avx2_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_avx2_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_avx512_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_avx512_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); + +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_sse_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_sse_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_sse_t3(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_avx_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_avx_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_avx2_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_avx2_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_avx512_t1(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_avx512_t2(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); + +/* SSE TYPE1 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_sse_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_sse_t1(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_sse_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_sse_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_sse_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_sse_t1(IMB_MGR *state); + +/* SSE TYPE2 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_sse_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_sse_t2(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_sse_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_sse_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_sse_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_sse_t2(IMB_MGR *state); + +/* SSE TYPE3 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_sse_t3(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_sse_t3(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_sse_t3(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_sse_t3(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_sse_t3(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_sse_t3(IMB_MGR *state); + +/* AVX TYPE1 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_avx_t1(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_avx_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx_t1(IMB_MGR *state); + +/* AVX TYPE2 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_avx_t2(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_avx_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx_t2(IMB_MGR *state); + +/* AVX2 TYPE1 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx2_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_avx2_t1(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_avx2_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx2_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx2_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx2_t1(IMB_MGR *state); + +/* AVX2 TYPE2 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx2_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_avx2_t2(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_avx2_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx2_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx2_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx2_t2(IMB_MGR *state); + +/* AVX512 TYPE1 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx512_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_avx512_t1(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_avx512_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx512_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx512_t1(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx512_t1(IMB_MGR *state); + +/* AVX512 TYPE2 manager functions */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx512_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *flush_job_avx512_t2(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t queue_size_avx512_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx512_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx512_t2(IMB_MGR *state); +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx512_t2(IMB_MGR *state); IMB_DLL_LOCAL void init_mb_mgr_aarch64_no_aesni_internal(IMB_MGR *state, const int reset_mgrs); diff --git a/lib/include/job_api_docsis.h b/lib/include/job_api_docsis.h new file mode 100644 index 0000000000000000000000000000000000000000..17236b01e7f811c1163ba3456eb04e453f6b8ccd --- /dev/null +++ b/lib/include/job_api_docsis.h @@ -0,0 +1,127 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" +#include "include/docsis_common.h" + +#ifndef JOB_API_DOCSIS_H +#define JOB_API_DOCSIS_H + +__forceinline +IMB_JOB * +submit_docsis_enc_job(IMB_MGR *state, IMB_JOB *job) +{ + if (16 == job->key_len_in_bytes) { + if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis128_crc32_sec_ooo; + + return SUBMIT_JOB_DOCSIS128_SEC_CRC_ENC(p_ooo, job); + } else { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis128_sec_ooo; + + return SUBMIT_JOB_DOCSIS128_SEC_ENC(p_ooo, job); + } + } else { /* 32 */ + if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis256_crc32_sec_ooo; + + return SUBMIT_JOB_DOCSIS256_SEC_CRC_ENC(p_ooo, job); + } else { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis256_sec_ooo; + + return SUBMIT_JOB_DOCSIS256_SEC_ENC(p_ooo, job); + } + } +} + +__forceinline +IMB_JOB * +flush_docsis_enc_job(IMB_MGR *state, IMB_JOB *job) +{ + if (16 == job->key_len_in_bytes) { + if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis128_crc32_sec_ooo; + + return FLUSH_JOB_DOCSIS128_SEC_CRC_ENC(p_ooo); + } else { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis128_sec_ooo; + + return FLUSH_JOB_DOCSIS128_SEC_ENC(p_ooo); + } + } else { /* 32 */ + if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis256_crc32_sec_ooo; + + return FLUSH_JOB_DOCSIS256_SEC_CRC_ENC(p_ooo); + } else { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis256_sec_ooo; + + return FLUSH_JOB_DOCSIS256_SEC_ENC(p_ooo); + } + } +} + +__forceinline +IMB_JOB * +submit_docsis_dec_job(IMB_MGR *state, IMB_JOB *job) +{ + if (16 == job->key_len_in_bytes) { + if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis128_crc32_sec_ooo; + + return SUBMIT_JOB_DOCSIS128_SEC_CRC_DEC(p_ooo, job); + } else { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis128_sec_ooo; + + return SUBMIT_JOB_DOCSIS128_SEC_DEC(p_ooo, job); + } + } else { /* 32 */ + if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis256_crc32_sec_ooo; + + return SUBMIT_JOB_DOCSIS256_SEC_CRC_DEC(p_ooo, job); + } else { + MB_MGR_DOCSIS_AES_OOO *p_ooo = + state->docsis256_sec_ooo; + + return SUBMIT_JOB_DOCSIS256_SEC_DEC(p_ooo, job); + } + } +} + +#endif /* JOB_API_DOCSIS_H */ diff --git a/lib/include/job_api_gcm.h b/lib/include/job_api_gcm.h new file mode 100644 index 0000000000000000000000000000000000000000..9efb852b5f29e523af6fe0c006c6f8ce449b1cb4 --- /dev/null +++ b/lib/include/job_api_gcm.h @@ -0,0 +1,341 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" + +#ifndef JOB_API_GCM_H +#define JOB_API_GCM_H + +__forceinline +IMB_JOB * +submit_gcm_sgl_enc(IMB_MGR *state, IMB_JOB *job) +{ + switch (job->key_len_in_bytes) { + case IMB_KEY_128_BYTES: + if (job->sgl_state == IMB_SGL_INIT) + IMB_AES128_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + else if (job->sgl_state == IMB_SGL_UPDATE) + IMB_AES128_GCM_ENC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->dst, job->src, + job->msg_len_to_cipher_in_bytes); + else if (job->sgl_state == IMB_SGL_COMPLETE) + IMB_AES128_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + else { /* IMB_SGL_ALL */ + unsigned int i; + + IMB_AES128_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + for (i = 0; i < job->num_sgl_io_segs; i++) + IMB_AES128_GCM_ENC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len); + IMB_AES128_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + break; + case IMB_KEY_192_BYTES: + if (job->sgl_state == IMB_SGL_INIT) + IMB_AES192_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + else if (job->sgl_state == IMB_SGL_UPDATE) + IMB_AES192_GCM_ENC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->dst, job->src, + job->msg_len_to_cipher_in_bytes); + else if (job->sgl_state == IMB_SGL_COMPLETE) + IMB_AES192_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + else { /* IMB_SGL_ALL */ + unsigned int i; + + IMB_AES192_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + for (i = 0; i < job->num_sgl_io_segs; i++) + IMB_AES192_GCM_ENC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len); + IMB_AES192_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + break; + case IMB_KEY_256_BYTES: + default: + if (job->sgl_state == IMB_SGL_INIT) + IMB_AES256_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + else if (job->sgl_state == IMB_SGL_UPDATE) + IMB_AES256_GCM_ENC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->dst, job->src, + job->msg_len_to_cipher_in_bytes); + else if (job->sgl_state == IMB_SGL_COMPLETE) + IMB_AES256_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + else { /* IMB_SGL_ALL */ + unsigned int i; + + IMB_AES256_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + for (i = 0; i < job->num_sgl_io_segs; i++) + IMB_AES256_GCM_ENC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len); + IMB_AES256_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + break; + } + + job->status = IMB_STATUS_COMPLETED; + + return job; +} + +__forceinline +IMB_JOB * +submit_gcm_sgl_dec(IMB_MGR *state, IMB_JOB *job) +{ + switch (job->key_len_in_bytes) { + case IMB_KEY_128_BYTES: + if (job->sgl_state == IMB_SGL_INIT) + IMB_AES128_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + else if (job->sgl_state == IMB_SGL_UPDATE) + IMB_AES128_GCM_DEC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->dst, job->src, + job->msg_len_to_cipher_in_bytes); + else if (job->sgl_state == IMB_SGL_COMPLETE) + IMB_AES128_GCM_DEC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + else { /* IMB_SGL_ALL */ + unsigned int i; + + IMB_AES128_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + for (i = 0; i < job->num_sgl_io_segs; i++) + IMB_AES128_GCM_DEC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len); + IMB_AES128_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + break; + case IMB_KEY_192_BYTES: + if (job->sgl_state == IMB_SGL_INIT) + IMB_AES192_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + else if (job->sgl_state == IMB_SGL_UPDATE) + IMB_AES192_GCM_DEC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->dst, job->src, + job->msg_len_to_cipher_in_bytes); + else if (job->sgl_state == IMB_SGL_COMPLETE) + IMB_AES192_GCM_DEC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + else { /* IMB_SGL_ALL */ + unsigned int i; + + IMB_AES192_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + for (i = 0; i < job->num_sgl_io_segs; i++) + IMB_AES192_GCM_DEC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len); + IMB_AES192_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + break; + case IMB_KEY_256_BYTES: + default: + if (job->sgl_state == IMB_SGL_INIT) + IMB_AES256_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + else if (job->sgl_state == IMB_SGL_UPDATE) + IMB_AES256_GCM_DEC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->dst, job->src, + job->msg_len_to_cipher_in_bytes); + else if (job->sgl_state == IMB_SGL_COMPLETE) + IMB_AES256_GCM_DEC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + else { /* IMB_SGL_ALL */ + unsigned int i; + + IMB_AES256_GCM_INIT_VAR_IV(state, job->enc_keys, + job->u.GCM.ctx, + job->iv, + job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes); + for (i = 0; i < job->num_sgl_io_segs; i++) + IMB_AES256_GCM_DEC_UPDATE(state, job->enc_keys, + job->u.GCM.ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len); + IMB_AES256_GCM_ENC_FINALIZE(state, job->enc_keys, + job->u.GCM.ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + break; + } + + job->status = IMB_STATUS_COMPLETED; + + return job; +} + +__forceinline +void +process_gmac(IMB_MGR *state, IMB_JOB *job, const IMB_KEY_SIZE_BYTES key_size) +{ + struct gcm_context_data ctx; + const struct gcm_key_data *key = job->u.GMAC._key; + const uint8_t *iv = job->u.GMAC._iv; + const uint64_t iv_len = job->u.GMAC.iv_len_in_bytes; + const uint8_t *src = job->src + job->hash_start_src_offset_in_bytes; + const uint64_t src_len = job->msg_len_to_hash_in_bytes; + + if (key_size == IMB_KEY_128_BYTES) { + IMB_AES128_GMAC_INIT(state, key, &ctx, iv, iv_len); + IMB_AES128_GMAC_UPDATE(state, key, &ctx, src, src_len); + IMB_AES128_GMAC_FINALIZE(state, key, &ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } else if (key_size == IMB_KEY_192_BYTES) { + IMB_AES192_GMAC_INIT(state, key, &ctx, iv, iv_len); + IMB_AES192_GMAC_UPDATE(state, key, &ctx, src, src_len); + IMB_AES192_GMAC_FINALIZE(state, key, &ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } else { /* key_size == 256 */ + IMB_AES256_GMAC_INIT(state, key, &ctx, iv, iv_len); + IMB_AES256_GMAC_UPDATE(state, key, &ctx, src, src_len); + IMB_AES256_GMAC_FINALIZE(state, key, &ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } +} + +__forceinline IMB_JOB *process_ghash(IMB_MGR *state, IMB_JOB *job) +{ + /* copy initial tag value to the destination */ + memcpy(job->auth_tag_output, job->u.GHASH._init_tag, + job->auth_tag_output_len_in_bytes); + + /* compute new tag value */ + IMB_GHASH(state, job->u.GHASH._key, + job->src + job->hash_start_src_offset_in_bytes, + job->msg_len_to_hash_in_bytes, + job->auth_tag_output, job->auth_tag_output_len_in_bytes); + + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; +} + +#endif /* JOB_API_GCM_H */ diff --git a/lib/include/job_api_kasumi.h b/lib/include/job_api_kasumi.h new file mode 100644 index 0000000000000000000000000000000000000000..462b3423029871ebb525007e7b32e427fb45bd80 --- /dev/null +++ b/lib/include/job_api_kasumi.h @@ -0,0 +1,66 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" + +#ifndef JOB_API_KASUMI_H +#define JOB_API_KASUMI_H + +__forceinline +IMB_JOB * +submit_kasumi_uea1_job(IMB_MGR *state, IMB_JOB *job) +{ + const kasumi_key_sched_t *key = job->enc_keys; + const uint64_t iv = *(const uint64_t *)job->iv; + const uint32_t msg_bitlen = + (const uint32_t)job->msg_len_to_cipher_in_bits; + const uint32_t msg_bitoff = + (const uint32_t)job->cipher_start_src_offset_in_bits; + + /* Use bit length API if + * - msg length is not a multiple of bytes + * - bit offset is not a multiple of bytes + */ + if ((msg_bitlen & 0x07) || (msg_bitoff & 0x07)) { + IMB_KASUMI_F8_1_BUFFER_BIT(state, key, iv, job->src, job->dst, + msg_bitlen, msg_bitoff); + + } else { + const uint32_t msg_bytelen = msg_bitlen >> 3; + const uint32_t msg_byteoff = msg_bitoff >> 3; + const void *src = job->src + msg_byteoff; + void *dst = job->dst + msg_byteoff; + + IMB_KASUMI_F8_1_BUFFER(state, key, iv, src, dst, + msg_bytelen); + } + + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +#endif /* JOB_API_KASUMI_H */ diff --git a/lib/include/job_api_snowv.h b/lib/include/job_api_snowv.h new file mode 100644 index 0000000000000000000000000000000000000000..ff0dfe7d18930fabad12292bd3104022728559c5 --- /dev/null +++ b/lib/include/job_api_snowv.h @@ -0,0 +1,93 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" + +#ifndef JOB_API_SNOWV_H +#define JOB_API_SNOWV_H + +__forceinline +IMB_JOB * +submit_snow_v_aead_job(IMB_MGR *state, IMB_JOB *job) +{ + struct gcm_key_data gdata_key; + imb_uint128_t *auth = (imb_uint128_t *) job->auth_tag_output; + imb_uint128_t temp; + imb_uint128_t hkey_endpad[2]; + + temp.low = BSWAP64((job->u.SNOW_V_AEAD.aad_len_in_bytes << 3)); + temp.high = BSWAP64((job->msg_len_to_cipher_in_bytes << 3)); + + /* if hkey_endpad[1].high == 0: + * SUBMIT_JOB_SNOW_V_AEAD does enc/decrypt operation + * and fills hkey_endpad with first 2 keystreams + * else + * SUBMIT_JOB_SNOW_V_AEAD fills hkey_endpad with first + * 2 keystreams (no operations on src vector are done) + */ + if(job->cipher_direction == IMB_DIR_ENCRYPT) + hkey_endpad[1].high = 0; + else + hkey_endpad[1].high = 1; + + job->u.SNOW_V_AEAD.reserved = hkey_endpad; + job = SUBMIT_JOB_SNOW_V_AEAD(job); + + memset(auth, 0, sizeof(imb_uint128_t)); + + /* GHASH key H */ + IMB_GHASH_PRE(state, (void *)hkey_endpad, &gdata_key); + + /* push AAD into GHASH */ + IMB_GHASH(state, &gdata_key, job->u.SNOW_V_AEAD.aad, + job->u.SNOW_V_AEAD.aad_len_in_bytes, + (void *)auth, sizeof(imb_uint128_t)); + + if (job->cipher_direction == IMB_DIR_ENCRYPT) + IMB_GHASH(state, &gdata_key, job->dst, + job->msg_len_to_cipher_in_bytes, + (void *)auth, sizeof(imb_uint128_t)); + else + IMB_GHASH(state, &gdata_key, job->src, + job->msg_len_to_cipher_in_bytes, + (void *)auth, sizeof(imb_uint128_t)); + + IMB_GHASH(state, &gdata_key, (void *)&temp, sizeof(temp), + (void *)auth, sizeof(imb_uint128_t)); + + /* The resulting AuthTag */ + auth->low = auth->low ^ hkey_endpad[1].low; + auth->high = auth->high ^ hkey_endpad[1].high; + + if (job->cipher_direction == IMB_DIR_DECRYPT) { + hkey_endpad[1].high = 0; + job = SUBMIT_JOB_SNOW_V_AEAD(job); + } + return job; +} + +#endif /* JOB_API_SNOWV_H */ diff --git a/lib/include/kasumi_interface.h b/lib/include/kasumi_interface.h new file mode 100644 index 0000000000000000000000000000000000000000..7ac6c5fbd5847b69738c03afd17b416b43552e90 --- /dev/null +++ b/lib/include/kasumi_interface.h @@ -0,0 +1,141 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + + +#ifndef _KASUMI_INTERFACE_H_ +#define _KASUMI_INTERFACE_H_ + +#include +#include "ipsec-mb.h" + +/* Range of input data for KASUMI is from 1 to 20000 bits */ +#define KASUMI_MIN_LEN 1 +#define KASUMI_MAX_LEN 20000 + +#define BYTESIZE (8) +#define BITSIZE(x) ((int)(sizeof(x)*BYTESIZE)) + +/* SSE */ +size_t kasumi_key_sched_size_sse(void); +int kasumi_init_f8_key_sched_sse(const void *pKey, kasumi_key_sched_t *pCtx); +int kasumi_init_f9_key_sched_sse(const void *pKey, kasumi_key_sched_t *pCtx); + +void kasumi_f8_1_buffer_sse(const kasumi_key_sched_t *pCtx, const uint64_t IV, + const void *pBufferIn, void *pBufferOut, + const uint32_t cipherLengthInBytes); + +void kasumi_f8_1_buffer_bit_sse(const kasumi_key_sched_t *pCtx, + const uint64_t IV, + const void *pBufferIn, void *pBufferOut, + const uint32_t cipherLengthInBits, + const uint32_t offsetInBits); + +void kasumi_f8_2_buffer_sse(const kasumi_key_sched_t *pCtx, + const uint64_t IV1, const uint64_t IV2, + const void *pBufferIn1, void *pBufferOut1, + const uint32_t lengthInBytes1, + const void *pBufferIn2, void *pBufferOut2, + const uint32_t lengthInBytes2); + +void kasumi_f8_3_buffer_sse(const kasumi_key_sched_t *pCtx, const uint64_t IV1, + const uint64_t IV2, const uint64_t IV3, + const void *pBufferIn1, void *pBufferOut1, + const void *pBufferIn2, void *pBufferOut2, + const void *pBufferIn3, void *pBufferOut3, + const uint32_t lengthInBytes); + +void kasumi_f8_4_buffer_sse(const kasumi_key_sched_t *pCtx, + const uint64_t IV1, const uint64_t IV2, + const uint64_t IV3, const uint64_t IV4, + const void *pBufferIn1, void *pBufferOut1, + const void *pBufferIn2, void *pBufferOut2, + const void *pBufferIn3, void *pBufferOut3, + const void *pBufferIn4, void *pBufferOut4, + const uint32_t lengthInBytes); + +void kasumi_f8_n_buffer_sse(const kasumi_key_sched_t *pKeySchedule, + const uint64_t IV[], + const void * const pDataIn[], void *pDataOut[], + const uint32_t dataLen[], const uint32_t dataCount); + +void kasumi_f9_1_buffer_sse(const kasumi_key_sched_t *pCtx, + const void *pBufferIn, + const uint32_t lengthInBytes, void *pDigest); + +void kasumi_f9_1_buffer_user_sse(const kasumi_key_sched_t *pCtx, + const uint64_t IV, const void *pBufferIn, + const uint32_t lengthInBits, + void *pDigest, const uint32_t direction); + +/* AVX */ +size_t kasumi_key_sched_size_avx(void); +int kasumi_init_f8_key_sched_avx(const void *pKey, kasumi_key_sched_t *pCtx); +int kasumi_init_f9_key_sched_avx(const void *pKey, kasumi_key_sched_t *pCtx); + +void kasumi_f8_1_buffer_avx(const kasumi_key_sched_t *pCtx, const uint64_t IV, + const void *pBufferIn, void *pBufferOut, + const uint32_t cipherLengthInBytes); +void kasumi_f8_1_buffer_bit_avx(const kasumi_key_sched_t *pCtx, + const uint64_t IV, + const void *pBufferIn, void *pBufferOut, + const uint32_t cipherLengthInBits, + const uint32_t offsetInBits); +void kasumi_f8_2_buffer_avx(const kasumi_key_sched_t *pCtx, + const uint64_t IV1, const uint64_t IV2, + const void *pBufferIn1, void *pBufferOut1, + const uint32_t lengthInBytes1, + const void *pBufferIn2, void *pBufferOut2, + const uint32_t lengthInBytes2); +void kasumi_f8_3_buffer_avx(const kasumi_key_sched_t *pCtx, const uint64_t IV1, + const uint64_t IV2, const uint64_t IV3, + const void *pBufferIn1, void *pBufferOut1, + const void *pBufferIn2, void *pBufferOut2, + const void *pBufferIn3, void *pBufferOut3, + const uint32_t lengthInBytes); +void kasumi_f8_4_buffer_avx(const kasumi_key_sched_t *pCtx, + const uint64_t IV1, const uint64_t IV2, + const uint64_t IV3, const uint64_t IV4, + const void *pBufferIn1, void *pBufferOut1, + const void *pBufferIn2, void *pBufferOut2, + const void *pBufferIn3, void *pBufferOut3, + const void *pBufferIn4, void *pBufferOut4, + const uint32_t lengthInBytes); +void kasumi_f8_n_buffer_avx(const kasumi_key_sched_t *pKeySchedule, + const uint64_t IV[], + const void * const pDataIn[], void *pDataOut[], + const uint32_t dataLen[], const uint32_t dataCount); + +void kasumi_f9_1_buffer_avx(const kasumi_key_sched_t *pCtx, + const void *pBufferIn, + const uint32_t lengthInBytes, void *pDigest); + +void kasumi_f9_1_buffer_user_avx(const kasumi_key_sched_t *pCtx, + const uint64_t IV, const void *pBufferIn, + const uint32_t lengthInBits, + void *pDigest, const uint32_t direction); +#endif /*_KASUMI_INTERFACE_H_*/ + diff --git a/lib/include/kasumi_internal.h b/lib/include/kasumi_internal.h index b5240a131229b370f5d56b1b888105b5a85cfe09..1bbb81d3f11192e2f15b6321b8be600b443db992 100755 --- a/lib/include/kasumi_internal.h +++ b/lib/include/kasumi_internal.h @@ -42,7 +42,9 @@ #include "wireless_common.h" #include "include/clear_regs_mem.h" #include "include/constant_lookup.h" +#include "memcpy.h" #include "error.h" +#include "kasumi_interface.h" /*--------------------------------------------------------------------- * Kasumi Inner S-Boxes @@ -151,19 +153,12 @@ static const uint16_t sso_kasumi_S9e[] = { 0x1008, 0xdaed, 0x1e0f, 0xf178, 0x69b4, 0xa1d0, 0x763b, 0x9bcd }; -/* Range of input data for KASUMI is from 1 to 20000 bits */ -#define KASUMI_MIN_LEN 1 -#define KASUMI_MAX_LEN 20000 - /* KASUMI cipher definitions */ #define NUM_KASUMI_ROUNDS (8) /* 8 rounds in the kasumi spec */ #define QWORDSIZEINBITS (64) #define QWORDSIZEINBYTES (8) #define LAST_PADDING_BIT (1) -#define BYTESIZE (8) -#define BITSIZE(x) ((int)(sizeof(x)*BYTESIZE)) - /*--------- 16 bit rotate left ------------------------------------------*/ #define ROL16(a,b) (uint16_t)((a<>(16-b))) @@ -746,25 +741,6 @@ kasumi_init_f9_key_sched(const void *const pKey, return kasumi_compute_sched(0xAA, pKey, pCtx); } -size_t -kasumi_key_sched_size_sse(void); - -int -kasumi_init_f8_key_sched_sse(const void *pKey, kasumi_key_sched_t *pCtx); - -int -kasumi_init_f9_key_sched_sse(const void *pKey, kasumi_key_sched_t *pCtx); - -size_t -kasumi_key_sched_size_avx(void); - -int -kasumi_init_f8_key_sched_avx(const void *pKey, kasumi_key_sched_t *pCtx); - -int -kasumi_init_f9_key_sched_avx(const void *pKey, kasumi_key_sched_t *pCtx); - - static inline void kasumi_f8_1_buffer(const kasumi_key_sched_t *pCtx, const uint64_t IV, const void *pIn, void *pOut, @@ -780,7 +756,7 @@ kasumi_f8_1_buffer(const kasumi_key_sched_t *pCtx, const uint64_t IV, uint8_t *pBufferOut = (uint8_t *) pOut; uint32_t lengthInBytes = length; - /* IV Endianity */ + /* IV Endianness */ a.b64[0] = BSWAP64(IV); /* First encryption to create modifier */ @@ -881,7 +857,7 @@ kasumi_f8_1_buffer_bit(const kasumi_key_sched_t *pCtx, const uint64_t IV, SafeBuf safeOutBuf = {0}; SafeBuf safeInBuf = {0}; - /* IV Endianity */ + /* IV Endianness */ a.b64[0] = BSWAP64(IV); /* First encryption to create modifier */ @@ -1026,7 +1002,7 @@ kasumi_f8_2_buffer(const kasumi_key_sched_t *pCtx, kasumi_union_t temp; - /* IV Endianity */ + /* IV Endianness */ a1.b64[0] = BSWAP64(IV1); a2.b64[0] = BSWAP64(IV2); @@ -1209,7 +1185,7 @@ kasumi_f8_3_buffer(const kasumi_key_sched_t *pCtx, kasumi_union_t a2, b2; /* the modifier */ kasumi_union_t a3, b3; /* the modifier */ - /* IV Endianity */ + /* IV Endianness */ a1.b64[0] = BSWAP64(IV1); a2.b64[0] = BSWAP64(IV2); a3.b64[0] = BSWAP64(IV3); @@ -1328,7 +1304,7 @@ kasumi_f8_4_buffer(const kasumi_key_sched_t *pCtx, const uint64_t IV1, kasumi_union_t a4, b4; /* the modifier */ uint16_t *pTemp[4] = {b1.b16, b2.b16, b3.b16, b4.b16}; - /* IV Endianity */ + /* IV Endianness */ b1.b64[0] = BSWAP64(IV1); b2.b64[0] = BSWAP64(IV2); b3.b64[0] = BSWAP64(IV3); @@ -1680,7 +1656,7 @@ kasumi_f9_1_buffer(const kasumi_key_sched_t *pCtx, const void *dataIn, /* Not a whole 8 byte block remaining */ mask.b64[0] = ~(mask.b64[0] >> (BYTESIZE * lengthInBytes)); - memcpy(&safeBuf.b64, pIn, lengthInBytes); + safe_memcpy(&safeBuf.b64, pIn, lengthInBytes); mask.b64[0] &= BSWAP64(safeBuf.b64); a.b64[0] ^= mask.b64[0]; @@ -1768,7 +1744,7 @@ kasumi_f9_1_buffer_user(const kasumi_key_sched_t *pCtx, const uint64_t IV, message.b64[0] = 0; mask.b64[0] = ~(mask.b64[0] >> lengthInBits); /*round up and copy last lengthInBits */ - memcpy(&safebuff.b64[0], pIn, (lengthInBits + 7) / 8); + safe_memcpy(&safebuff.b64[0], pIn, (lengthInBits + 7) / 8); message.b64[0] = BSWAP64(safebuff.b64[0]); temp.b64[0] = mask.b64[0] & message.b64[0]; temp.b64[0] |= @@ -1811,94 +1787,5 @@ kasumi_f9_1_buffer_user(const kasumi_key_sched_t *pCtx, const uint64_t IV, #endif } -void kasumi_f8_1_buffer_sse(const kasumi_key_sched_t *pCtx, const uint64_t IV, - const void *pBufferIn, void *pBufferOut, - const uint32_t cipherLengthInBytes); - -void kasumi_f8_1_buffer_bit_sse(const kasumi_key_sched_t *pCtx, - const uint64_t IV, - const void *pBufferIn, void *pBufferOut, - const uint32_t cipherLengthInBits, - const uint32_t offsetInBits); - -void kasumi_f8_2_buffer_sse(const kasumi_key_sched_t *pCtx, - const uint64_t IV1, const uint64_t IV2, - const void *pBufferIn1, void *pBufferOut1, - const uint32_t lengthInBytes1, - const void *pBufferIn2, void *pBufferOut2, - const uint32_t lengthInBytes2); - -void kasumi_f8_3_buffer_sse(const kasumi_key_sched_t *pCtx, const uint64_t IV1, - const uint64_t IV2, const uint64_t IV3, - const void *pBufferIn1, void *pBufferOut1, - const void *pBufferIn2, void *pBufferOut2, - const void *pBufferIn3, void *pBufferOut3, - const uint32_t lengthInBytes); - -void kasumi_f8_4_buffer_sse(const kasumi_key_sched_t *pCtx, - const uint64_t IV1, const uint64_t IV2, - const uint64_t IV3, const uint64_t IV4, - const void *pBufferIn1, void *pBufferOut1, - const void *pBufferIn2, void *pBufferOut2, - const void *pBufferIn3, void *pBufferOut3, - const void *pBufferIn4, void *pBufferOut4, - const uint32_t lengthInBytes); - -void kasumi_f8_n_buffer_sse(const kasumi_key_sched_t *pKeySchedule, - const uint64_t IV[], - const void * const pDataIn[], void *pDataOut[], - const uint32_t dataLen[], const uint32_t dataCount); - -void kasumi_f9_1_buffer_sse(const kasumi_key_sched_t *pCtx, - const void *pBufferIn, - const uint32_t lengthInBytes, void *pDigest); - -void kasumi_f9_1_buffer_user_sse(const kasumi_key_sched_t *pCtx, - const uint64_t IV, const void *pBufferIn, - const uint32_t lengthInBits, - void *pDigest, const uint32_t direction); - - -void kasumi_f8_1_buffer_avx(const kasumi_key_sched_t *pCtx, const uint64_t IV, - const void *pBufferIn, void *pBufferOut, - const uint32_t cipherLengthInBytes); -void kasumi_f8_1_buffer_bit_avx(const kasumi_key_sched_t *pCtx, - const uint64_t IV, - const void *pBufferIn, void *pBufferOut, - const uint32_t cipherLengthInBits, - const uint32_t offsetInBits); -void kasumi_f8_2_buffer_avx(const kasumi_key_sched_t *pCtx, - const uint64_t IV1, const uint64_t IV2, - const void *pBufferIn1, void *pBufferOut1, - const uint32_t lengthInBytes1, - const void *pBufferIn2, void *pBufferOut2, - const uint32_t lengthInBytes2); -void kasumi_f8_3_buffer_avx(const kasumi_key_sched_t *pCtx, const uint64_t IV1, - const uint64_t IV2, const uint64_t IV3, - const void *pBufferIn1, void *pBufferOut1, - const void *pBufferIn2, void *pBufferOut2, - const void *pBufferIn3, void *pBufferOut3, - const uint32_t lengthInBytes); -void kasumi_f8_4_buffer_avx(const kasumi_key_sched_t *pCtx, - const uint64_t IV1, const uint64_t IV2, - const uint64_t IV3, const uint64_t IV4, - const void *pBufferIn1, void *pBufferOut1, - const void *pBufferIn2, void *pBufferOut2, - const void *pBufferIn3, void *pBufferOut3, - const void *pBufferIn4, void *pBufferOut4, - const uint32_t lengthInBytes); -void kasumi_f8_n_buffer_avx(const kasumi_key_sched_t *pKeySchedule, - const uint64_t IV[], - const void * const pDataIn[], void *pDataOut[], - const uint32_t dataLen[], const uint32_t dataCount); - -void kasumi_f9_1_buffer_avx(const kasumi_key_sched_t *pCtx, - const void *pBufferIn, - const uint32_t lengthInBytes, void *pDigest); - -void kasumi_f9_1_buffer_user_avx(const kasumi_key_sched_t *pCtx, - const uint64_t IV, const void *pBufferIn, - const uint32_t lengthInBits, - void *pDigest, const uint32_t direction); #endif /*_KASUMI_INTERNAL_H_*/ diff --git a/lib/include/mb_mgr_burst.h b/lib/include/mb_mgr_burst.h new file mode 100644 index 0000000000000000000000000000000000000000..9186c7be0671393258379f4a7ef58462fad66d19 --- /dev/null +++ b/lib/include/mb_mgr_burst.h @@ -0,0 +1,561 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef MB_MGR_BURST_H +#define MB_MGR_BURST_H + +/* synchronous cipher and hash burst API */ + +#include "ipsec-mb.h" +#include "include/error.h" +#include "include/mb_mgr_job_check.h" /* is_job_invalid() */ + +__forceinline +uint32_t submit_aes_cbc_burst_enc(IMB_MGR *state, + IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_KEY_SIZE_BYTES key_size, + const int run_check) +{ + uint32_t completed_jobs = 0; + + if (run_check) { + uint32_t i; + + /* validate jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + /* validate job */ + if (is_job_invalid(state, job, + IMB_CIPHER_CBC, IMB_AUTH_NULL, + IMB_DIR_ENCRYPT, key_size)) { + job->status = IMB_STATUS_INVALID_ARGS; + return 0; + } + } + } + + if (key_size == IMB_KEY_128_BYTES) { + MB_MGR_AES_OOO *aes_ooo = state->aes128_ooo; + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_AES_CBC_128_ENC(aes_ooo, job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while((job = FLUSH_JOB_AES_CBC_128_ENC(aes_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } else if (key_size == IMB_KEY_192_BYTES) { + MB_MGR_AES_OOO *aes_ooo = state->aes192_ooo; + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_AES_CBC_192_ENC(aes_ooo, job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while((job = FLUSH_JOB_AES_CBC_192_ENC(aes_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } else { /* assume 256-bit key */ + MB_MGR_AES_OOO *aes_ooo = state->aes256_ooo; + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_AES_CBC_256_ENC(aes_ooo, job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while((job = FLUSH_JOB_AES_CBC_256_ENC(aes_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } + + return completed_jobs; +} + +__forceinline +uint32_t submit_aes_cbc_burst_dec(IMB_MGR *state, + IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_KEY_SIZE_BYTES key_size, + const int run_check) +{ + (void) state; + + if (run_check) { + uint32_t i; + + /* validate jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + /* validate job */ + if (is_job_invalid(state, job, + IMB_CIPHER_CBC, IMB_AUTH_NULL, + IMB_DIR_DECRYPT, key_size)) { + job->status = IMB_STATUS_INVALID_ARGS; + return 0; + } + } + } + + if (key_size == IMB_KEY_128_BYTES) { + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + AES_CBC_DEC_128(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & + (~15)); + job->status = IMB_STATUS_COMPLETED; + } + } else if (key_size == IMB_KEY_192_BYTES) { + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + AES_CBC_DEC_192(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & + (~15)); + job->status = IMB_STATUS_COMPLETED; + } + } else /* assume 256-bit key */ { + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + AES_CBC_DEC_256(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & + (~15)); + job->status = IMB_STATUS_COMPLETED; + } + } + + return n_jobs; +} + +__forceinline +uint32_t submit_aes_ctr_burst(IMB_MGR *state, + IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_KEY_SIZE_BYTES key_size, + const int run_check) +{ + if (run_check) { + uint32_t i; + + /* validate jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + /* validate job */ + if (is_job_invalid(state, job, + IMB_CIPHER_CNTR, IMB_AUTH_NULL, + IMB_DIR_ENCRYPT, key_size)) { + job->status = IMB_STATUS_INVALID_ARGS; + return 0; + } + } + } + + if (key_size == IMB_KEY_128_BYTES) { + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + +#ifdef SUBMIT_JOB_AES_CTR_128 + SUBMIT_JOB_AES_CTR_128(job); +#else + AES_CTR_128(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes, + job->iv_len_in_bytes); +#endif + job->status = IMB_STATUS_COMPLETED; + } + } else if (key_size == IMB_KEY_192_BYTES) { + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + +#ifdef SUBMIT_JOB_AES_CTR_192 + SUBMIT_JOB_AES_CTR_192(job); +#else + AES_CTR_192(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes, + job->iv_len_in_bytes); +#endif + job->status = IMB_STATUS_COMPLETED; + } + } else /* assume 256-bit key */ { + uint32_t i; + + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + +#ifdef SUBMIT_JOB_AES_CTR_256 + SUBMIT_JOB_AES_CTR_256(job); +#else + AES_CTR_256(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes, + job->iv_len_in_bytes); +#endif + job->status = IMB_STATUS_COMPLETED; + } + } + + return n_jobs; +} + +__forceinline +uint32_t submit_cipher_burst_and_check(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size, + const int run_check) +{ + /* reset error status */ + imb_set_errno(state, 0); + + if (run_check) + if (jobs == NULL) { + imb_set_errno(state, IMB_ERR_NULL_BURST); + return 0; + } + + switch (cipher) { + case IMB_CIPHER_CBC: + if (dir == IMB_DIR_ENCRYPT) + return submit_aes_cbc_burst_enc(state, jobs, n_jobs, + key_size, run_check); + else + return submit_aes_cbc_burst_dec(state, jobs, n_jobs, + key_size, run_check); + case IMB_CIPHER_CNTR: + return submit_aes_ctr_burst(state, jobs, n_jobs, + key_size, run_check); + default: + break; + } + + /* unsupported cipher mode */ + imb_set_errno(state, IMB_ERR_CIPH_MODE); + + return 0; +} + +uint32_t +SUBMIT_CIPHER_BURST(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size) +{ + return submit_cipher_burst_and_check(state, jobs, n_jobs, + cipher, dir, key_size, 1); +} + +uint32_t +SUBMIT_CIPHER_BURST_NOCHECK(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size) +{ + return submit_cipher_burst_and_check(state, jobs, n_jobs, + cipher, dir, key_size, 0); +} + +__forceinline +uint32_t submit_burst_hmac_sha_x(IMB_MGR *state, + IMB_JOB *jobs, + const uint32_t n_jobs, + const int run_check, + const IMB_HASH_ALG hash_alg) +{ + uint32_t i, completed_jobs = 0; + + if (run_check) { + /* validate jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + /* validate job */ + if (is_job_invalid(state, job, + IMB_CIPHER_NULL, + hash_alg, + IMB_DIR_ENCRYPT, + job->key_len_in_bytes)) { + job->status = IMB_STATUS_INVALID_ARGS; + return 0; + } + } + } + + if (hash_alg == IMB_AUTH_HMAC_SHA_1) { + /* submit all jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_HMAC(state->hmac_sha_1_ooo, job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + /* flush any outstanding jobs */ + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while ((job = FLUSH_JOB_HMAC(state->hmac_sha_1_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } else if (hash_alg == IMB_AUTH_HMAC_SHA_224) { + /* submit all jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_HMAC_SHA_224(state->hmac_sha_224_ooo, + job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + /* flush any outstanding jobs */ + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while ((job = + FLUSH_JOB_HMAC_SHA_224(state->hmac_sha_224_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } else if (hash_alg == IMB_AUTH_HMAC_SHA_256) { + /* submit all jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_HMAC_SHA_256(state->hmac_sha_256_ooo, + job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + /* flush any outstanding jobs */ + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while ((job = + FLUSH_JOB_HMAC_SHA_256(state->hmac_sha_256_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } else if (hash_alg == IMB_AUTH_HMAC_SHA_384) { + /* submit all jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_HMAC_SHA_384(state->hmac_sha_384_ooo, + job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + /* flush any outstanding jobs */ + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while ((job = + FLUSH_JOB_HMAC_SHA_384(state->hmac_sha_384_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } else if (hash_alg == IMB_AUTH_HMAC_SHA_512) { + /* submit all jobs */ + for (i = 0; i < n_jobs; i++) { + IMB_JOB *job = &jobs[i]; + + job = SUBMIT_JOB_HMAC_SHA_512(state->hmac_sha_512_ooo, + job); + if (job != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + /* flush any outstanding jobs */ + if (completed_jobs != n_jobs) { + IMB_JOB *job = NULL; + + while ((job = + FLUSH_JOB_HMAC_SHA_512(state->hmac_sha_512_ooo)) + != NULL) { + job->status = IMB_STATUS_COMPLETED; + completed_jobs++; + } + } + } + + return completed_jobs; +} + +__forceinline +uint32_t submit_hash_burst_and_check(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash, + const int run_check) +{ + /* reset error status */ + imb_set_errno(state, 0); + + if (run_check) { + if (jobs == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_JOB); + return 0; + } + } + + switch (hash) { + case IMB_AUTH_HMAC_SHA_1: + return submit_burst_hmac_sha_x(state, jobs, n_jobs, run_check, + IMB_AUTH_HMAC_SHA_1); + case IMB_AUTH_HMAC_SHA_224: + return submit_burst_hmac_sha_x(state, jobs, n_jobs, run_check, + IMB_AUTH_HMAC_SHA_224); + case IMB_AUTH_HMAC_SHA_256: + return submit_burst_hmac_sha_x(state, jobs, n_jobs, run_check, + IMB_AUTH_HMAC_SHA_256); + case IMB_AUTH_HMAC_SHA_384: + return submit_burst_hmac_sha_x(state, jobs, n_jobs, run_check, + IMB_AUTH_HMAC_SHA_384); + case IMB_AUTH_HMAC_SHA_512: + return submit_burst_hmac_sha_x(state, jobs, n_jobs, run_check, + IMB_AUTH_HMAC_SHA_512); + default: + break; + } + + /* unsupported hash alg */ + imb_set_errno(state, IMB_ERR_HASH_ALGO); + + return 0; +} + +uint32_t +SUBMIT_HASH_BURST(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash) +{ + return submit_hash_burst_and_check(state, jobs, n_jobs, hash, 1); +} + +uint32_t +SUBMIT_HASH_BURST_NOCHECK(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash) +{ + return submit_hash_burst_and_check(state, jobs, n_jobs, hash, 0); +} + +#endif /* MB_MGR_BURST_H */ diff --git a/lib/include/mb_mgr_burst_async.h b/lib/include/mb_mgr_burst_async.h new file mode 100644 index 0000000000000000000000000000000000000000..5c436e3c756b21eb66e6c18ac7bca7d82e9bc51c --- /dev/null +++ b/lib/include/mb_mgr_burst_async.h @@ -0,0 +1,270 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef MB_MGR_BURST_ASYNC_H +#define MB_MGR_BURST_ASYNC_H + +/* asynchronous burst API (chained cipher & hash) */ + +#include "ipsec-mb.h" +#include "include/error.h" +#include "include/mb_mgr_job_check.h" /* is_job_invalid() */ + +__forceinline +void ADV_N_JOBS(int *ptr, const uint32_t n_jobs) +{ + *ptr += (sizeof(IMB_JOB) * n_jobs); + if (*ptr >= (int) (IMB_MAX_JOBS * sizeof(IMB_JOB))) + *ptr -= (int) (IMB_MAX_JOBS * sizeof(IMB_JOB)); +} + + +/* get number of jobs between job_offset and the end of the queue */ +__forceinline uint32_t +get_queue_sz_end(const int job_offset) +{ + return IMB_MAX_JOBS - (job_offset / sizeof(IMB_JOB)); +} + +__forceinline uint32_t +queue_sz_remaining(IMB_MGR *state) +{ + if (state->earliest_job < 0) + return IMB_MAX_JOBS; + + return IMB_MAX_JOBS - get_queue_sz(state); +} + +uint32_t +GET_NEXT_BURST(IMB_MGR *state, const uint32_t n_req_jobs, IMB_JOB **jobs) +{ + uint32_t i, num_jobs, n_ret_jobs, filled_jobs = 0; + IMB_JOB *job = NULL; + + /* reset error status */ + imb_set_errno(state, 0); + +#ifdef SAFE_PARAM + if (jobs == NULL) { + imb_set_errno(state, IMB_ERR_NULL_BURST); + return 0; + } + if (n_req_jobs > IMB_MAX_BURST_SIZE) { + imb_set_errno(state, IMB_ERR_BURST_SIZE); + return 0; + } +#endif + /* set number of jobs to return */ + n_ret_jobs = queue_sz_remaining(state); + if (n_ret_jobs > n_req_jobs) + n_ret_jobs = n_req_jobs; + + /* start filling list from next available job */ + job = JOBS(state, state->next_job); + + /* check enough jobs available before end of queue */ + num_jobs = get_queue_sz_end(state->next_job); + + if (num_jobs < n_ret_jobs) { + /* fill jobs to the end of the queue */ + for (i = 0; i < num_jobs; i++) { + jobs[filled_jobs++] = job; + job++; + } + /* fill remaining jobs from beginning of queue */ + num_jobs = n_ret_jobs - num_jobs; + job = &state->jobs[0]; + } else + /* fill all jobs */ + num_jobs = n_ret_jobs; + + for (i = 0; i < num_jobs; i++) { + jobs[filled_jobs++] = job; + job++; + } + + return filled_jobs; +} + +__forceinline uint32_t +submit_burst_and_check(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs, const int run_check) +{ + uint32_t i, n_ret_jobs = 0, num_jobs = n_jobs; + IMB_JOB *job = NULL; + + /* reset error status */ + imb_set_errno(state, 0); + + if (run_check) { + int job_offset = state->next_job; + + if (jobs == NULL) { + imb_set_errno(state, IMB_ERR_NULL_BURST); + return 0; + } + if (n_jobs > IMB_MAX_BURST_SIZE) { + imb_set_errno(state, IMB_ERR_BURST_SIZE); + return 0; + } + /* check enough space in queue */ + if (queue_sz_remaining(state) < n_jobs) { + imb_set_errno(state, IMB_ERR_QUEUE_SPACE); + return 0; + } + + for (i = 0; i < n_jobs; i++) { + if (jobs[i] == NULL) { + imb_set_errno(state, IMB_ERR_NULL_JOB); + return 0; + } + if (jobs[i] != JOBS(state, job_offset)) { + imb_set_errno(state, IMB_ERR_BURST_OOO); + goto return_invalid_job; + } + ADV_JOBS(&job_offset); + + /* validate job */ + if (is_job_invalid(state, jobs[i], + jobs[i]->cipher_mode, + jobs[i]->hash_alg, + jobs[i]->cipher_direction, + jobs[i]->key_len_in_bytes)) { + goto return_invalid_job; + } + } + } + + /* state was previously empty */ + if (state->earliest_job < 0) + state->earliest_job = state->next_job; + + /* submit all jobs */ + for (i = 0; i < n_jobs; i++) { + jobs[i]->status = IMB_STATUS_BEING_PROCESSED; + submit_new_job(state, jobs[i]); + } + ADV_N_JOBS(&state->next_job, n_jobs); + + /* + * return completed jobs + * - may need 2 passes if jobs wrap in queue + */ + num_jobs = get_queue_sz_end(state->earliest_job); + if (num_jobs > n_jobs) + num_jobs = n_jobs; + + /* start returning from earliest job */ + job = JOBS(state, state->earliest_job); + +return_jobs: + for (i = 0; i < num_jobs; i++) { + if (job->status < IMB_STATUS_COMPLETED) + goto return_jobs_done; + jobs[n_ret_jobs++] = job; + job++; + } + + /* check if all jobs returned + * if not, return remaining jobs from beginning of queue + */ + if (n_ret_jobs < n_jobs) { + num_jobs = n_jobs - num_jobs; + job = &state->jobs[0]; + goto return_jobs; + } + +return_jobs_done: + ADV_N_JOBS(&state->earliest_job, n_ret_jobs); + + if (state->earliest_job == state->next_job) { + state->earliest_job = -1; /* becomes empty */ + state->next_job = 0; + } + + return n_ret_jobs; + +return_invalid_job: + jobs[i]->status = IMB_STATUS_INVALID_ARGS; + jobs[0] = jobs[i]; + return 0; +} + +uint32_t +SUBMIT_BURST(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs) +{ + return submit_burst_and_check(state, n_jobs, jobs, 1); +} + +uint32_t +SUBMIT_BURST_NOCHECK(IMB_MGR *state, const uint32_t n_jobs, IMB_JOB **jobs) +{ + return submit_burst_and_check(state, n_jobs, jobs, 0); +} + +uint32_t +FLUSH_BURST(IMB_MGR *state, const uint32_t max_jobs, IMB_JOB **jobs) +{ + uint32_t i, max_ret_jobs, n_ret_jobs = 0; + + /* reset error status */ + imb_set_errno(state, 0); + +#ifdef SAFE_PARAM + if (jobs == NULL) { + imb_set_errno(state, IMB_ERR_NULL_BURST); + return 0; + } +#endif + /* check if any jobs in queue */ + max_ret_jobs = queue_sz(state); + if (max_ret_jobs == 0) + return 0; + + /* set max number of jobs to return */ + if (max_ret_jobs > max_jobs) + max_ret_jobs = max_jobs; + + for (i = 0; i < max_ret_jobs; i++) { + IMB_JOB *job = JOBS(state, state->earliest_job); + + if (job->status < IMB_STATUS_COMPLETED) + complete_job(state, job); + + jobs[n_ret_jobs++] = job; + ADV_JOBS(&state->earliest_job); + } + + if (state->earliest_job == state->next_job) { + state->earliest_job = -1; /* becomes empty */ + state->next_job = 0; + } + + return n_ret_jobs; +} + +#endif /* MB_MGR_BURST_ASYNC_H */ diff --git a/lib/include/mb_mgr_code.h b/lib/include/mb_mgr_code.h index 61a0c9042ef43029a30ab8712340739a0a849b0f..d5e1107152be68c102f7f0e705755b07da47f456 100644 --- a/lib/include/mb_mgr_code.h +++ b/lib/include/mb_mgr_code.h @@ -28,39 +28,11 @@ #ifndef MB_MGR_CODE_H #define MB_MGR_CODE_H -/* - * This contains the bulk of the mb_mgr code, with #define's to build - * an SSE, AVX, AVX2 or AVX512 version (see mb_mgr_sse.c, mb_mgr_avx.c, etc.) - * - * get_next_job() returns a job object. This must be filled in and returned - * via submit_job() before get_next_job() is called again. - * - * submit_job() and flush_job() returns a job object. This job object ceases - * to be usable at the next call to get_next_job() - */ - -#include /* memcpy(), memset() */ +#include -#include "include/clear_regs_mem.h" -#include "include/des.h" #include "ipsec-mb.h" -#include "error.h" -#include "include/snow3g_submit.h" +#include "include/error.h" -#ifdef LINUX -#define BSWAP64 __builtin_bswap64 -#else -#define BSWAP64 _byteswap_uint64 -#endif - -#define CRC(func, state, job) *((uint32_t *)job->auth_tag_output) = \ - func(state, job->src + job->hash_start_src_offset_in_bytes, \ - job->msg_len_to_hash_in_bytes) -/* - * JOBS() and ADV_JOBS() moved into mb_mgr_code.h - * get_next_job() and get_completed_job() API's are no longer inlines. - * For binary compatibility they have been made proper symbols. - */ __forceinline IMB_JOB *JOBS(IMB_MGR *state, const int offset) { @@ -77,2917 +49,54 @@ void ADV_JOBS(int *ptr) *ptr = 0; } -/* ========================================================================= */ -/* Lower level "out of order" schedulers */ -/* ========================================================================= */ - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES128_DEC(IMB_JOB *job) -{ - AES_CBC_DEC_128(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES192_DEC(IMB_JOB *job) -{ - AES_CBC_DEC_192(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES256_DEC(IMB_JOB *job) -{ - AES_CBC_DEC_256(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ECB_128_ENC(IMB_JOB *job) -{ - AES_ECB_ENC_128(job->src + job->cipher_start_src_offset_in_bytes, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ECB_192_ENC(IMB_JOB *job) -{ - AES_ECB_ENC_192(job->src + job->cipher_start_src_offset_in_bytes, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ECB_256_ENC(IMB_JOB *job) -{ - AES_ECB_ENC_256(job->src + job->cipher_start_src_offset_in_bytes, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ECB_128_DEC(IMB_JOB *job) -{ - AES_ECB_DEC_128(job->src + job->cipher_start_src_offset_in_bytes, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ECB_192_DEC(IMB_JOB *job) -{ - AES_ECB_DEC_192(job->src + job->cipher_start_src_offset_in_bytes, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ECB_256_DEC(IMB_JOB *job) -{ - AES_ECB_DEC_256(job->src + job->cipher_start_src_offset_in_bytes, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15)); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES128_CBCS_1_9_DEC(IMB_JOB *job) -{ - AES_CBCS_1_9_DEC_128(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->dec_keys, - job->dst, - job->msg_len_to_cipher_in_bytes & (~15), - job->cipher_fields.CBCS.next_iv); - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -/* ========================================================================= */ -/* DOCSIS functions */ -/* ========================================================================= */ - -#include "include/docsis_common.h" - -/* ========================================================================= */ -/* Custom hash / cipher */ -/* ========================================================================= */ - -__forceinline -IMB_JOB * -JOB_CUSTOM_CIPHER(IMB_JOB *job) -{ - if (!(job->status & IMB_STATUS_COMPLETED_CIPHER)) { - if (job->cipher_func(job)) - job->status = IMB_STATUS_INTERNAL_ERROR; - else - job->status |= IMB_STATUS_COMPLETED_CIPHER; - } - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_CUSTOM_CIPHER(IMB_JOB *job) -{ - return JOB_CUSTOM_CIPHER(job); -} - -__forceinline -IMB_JOB * -FLUSH_JOB_CUSTOM_CIPHER(IMB_JOB *job) -{ - return JOB_CUSTOM_CIPHER(job); -} - -__forceinline -IMB_JOB * -JOB_CUSTOM_HASH(IMB_JOB *job) -{ - if (!(job->status & IMB_STATUS_COMPLETED_AUTH)) { - if (job->hash_func(job)) - job->status = IMB_STATUS_INTERNAL_ERROR; - else - job->status |= IMB_STATUS_COMPLETED_AUTH; - } - return job; -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_CUSTOM_HASH(IMB_JOB *job) -{ - return JOB_CUSTOM_HASH(job); -} - -__forceinline -IMB_JOB * -FLUSH_JOB_CUSTOM_HASH(IMB_JOB *job) -{ - return JOB_CUSTOM_HASH(job); -} - -__forceinline -IMB_JOB * -submit_kasumi_uea1_job(IMB_MGR *state, IMB_JOB *job) -{ - const kasumi_key_sched_t *key = job->enc_keys; - const uint64_t iv = *(const uint64_t *)job->iv; - const uint32_t msg_bitlen = - (const uint32_t)job->msg_len_to_cipher_in_bits; - const uint32_t msg_bitoff = - (const uint32_t)job->cipher_start_src_offset_in_bits; - - /* Use bit length API if - * - msg length is not a multiple of bytes - * - bit offset is not a multiple of bytes - */ - if ((msg_bitlen & 0x07) || (msg_bitoff & 0x07)) { - IMB_KASUMI_F8_1_BUFFER_BIT(state, key, iv, job->src, job->dst, - msg_bitlen, msg_bitoff); - - } else { - const uint32_t msg_bytelen = msg_bitlen >> 3; - const uint32_t msg_byteoff = msg_bitoff >> 3; - const void *src = job->src + msg_byteoff; - void *dst = job->dst + msg_byteoff; - - IMB_KASUMI_F8_1_BUFFER(state, key, iv, src, dst, - msg_bytelen); - } - - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -__forceinline -IMB_JOB * -submit_docsis_enc_job(IMB_MGR *state, IMB_JOB *job) -{ - if (16 == job->key_len_in_bytes) { - if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis128_crc32_sec_ooo; - - return SUBMIT_JOB_DOCSIS128_SEC_CRC_ENC(p_ooo, job); - } else { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis128_sec_ooo; - - return SUBMIT_JOB_DOCSIS128_SEC_ENC(p_ooo, job); - } - } else { /* 32 */ - if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis256_crc32_sec_ooo; - - return SUBMIT_JOB_DOCSIS256_SEC_CRC_ENC(p_ooo, job); - } else { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis256_sec_ooo; - - return SUBMIT_JOB_DOCSIS256_SEC_ENC(p_ooo, job); - } - } -} - -__forceinline -IMB_JOB * -flush_docsis_enc_job(IMB_MGR *state, IMB_JOB *job) -{ - if (16 == job->key_len_in_bytes) { - if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis128_crc32_sec_ooo; - - return FLUSH_JOB_DOCSIS128_SEC_CRC_ENC(p_ooo); - } else { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis128_sec_ooo; - - return FLUSH_JOB_DOCSIS128_SEC_ENC(p_ooo); - } - } else { /* 32 */ - if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis256_crc32_sec_ooo; - - return FLUSH_JOB_DOCSIS256_SEC_CRC_ENC(p_ooo); - } else { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis256_sec_ooo; - - return FLUSH_JOB_DOCSIS256_SEC_ENC(p_ooo); - } - } -} - -__forceinline -IMB_JOB * -submit_docsis_dec_job(IMB_MGR *state, IMB_JOB *job) -{ - if (16 == job->key_len_in_bytes) { - if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis128_crc32_sec_ooo; - - return SUBMIT_JOB_DOCSIS128_SEC_CRC_DEC(p_ooo, job); - } else { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis128_sec_ooo; - - return SUBMIT_JOB_DOCSIS128_SEC_DEC(p_ooo, job); - } - } else { /* 32 */ - if (job->hash_alg == IMB_AUTH_DOCSIS_CRC32) { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis256_crc32_sec_ooo; - - return SUBMIT_JOB_DOCSIS256_SEC_CRC_DEC(p_ooo, job); - } else { - MB_MGR_DOCSIS_AES_OOO *p_ooo = - state->docsis256_sec_ooo; - - return SUBMIT_JOB_DOCSIS256_SEC_DEC(p_ooo, job); - } - } -} - -__forceinline -IMB_JOB * -submit_snow_v_aead_job(IMB_MGR *state, IMB_JOB *job) -{ - struct gcm_key_data gdata_key; - imb_uint128_t *auth = (imb_uint128_t *) job->auth_tag_output; - imb_uint128_t temp; - imb_uint128_t hkey_endpad[2]; - - temp.low = BSWAP64((job->u.SNOW_V_AEAD.aad_len_in_bytes << 3)); - temp.high = BSWAP64((job->msg_len_to_cipher_in_bytes << 3)); - - /* if hkey_endpad[1].high == 0: - * SUBMIT_JOB_SNOW_V_AEAD does enc/decrypt operation - * and fills hkey_endpad with first 2 keystreams - * else - * SUBMIT_JOB_SNOW_V_AEAD fills hkey_endpad with first - * 2 keystreams (no operations on src vector are done) - */ - if(job->cipher_direction == IMB_DIR_ENCRYPT) - hkey_endpad[1].high = 0; - else - hkey_endpad[1].high = 1; - - job->u.SNOW_V_AEAD.reserved = hkey_endpad; - job = SUBMIT_JOB_SNOW_V_AEAD(job); - - memset(auth, 0, sizeof(imb_uint128_t)); - - /* GHASH key H */ - IMB_GHASH_PRE(state, (void *)hkey_endpad, &gdata_key); - - /* push AAD into GHASH */ - IMB_GHASH(state, &gdata_key, job->u.SNOW_V_AEAD.aad, - job->u.SNOW_V_AEAD.aad_len_in_bytes, - (void *)auth, sizeof(imb_uint128_t)); - - if (job->cipher_direction == IMB_DIR_ENCRYPT) - IMB_GHASH(state, &gdata_key, job->dst, - job->msg_len_to_cipher_in_bytes, - (void *)auth, sizeof(imb_uint128_t)); - else - IMB_GHASH(state, &gdata_key, job->src, - job->msg_len_to_cipher_in_bytes, - (void *)auth, sizeof(imb_uint128_t)); - - IMB_GHASH(state, &gdata_key, (void *)&temp, sizeof(temp), - (void *)auth, sizeof(imb_uint128_t)); - - /* The resulting AuthTag */ - auth->low = auth->low ^ hkey_endpad[1].low; - auth->high = auth->high ^ hkey_endpad[1].high; - - if (job->cipher_direction == IMB_DIR_DECRYPT) { - hkey_endpad[1].high = 0; - job = SUBMIT_JOB_SNOW_V_AEAD(job); - } - return job; -} - -__forceinline -IMB_JOB * -submit_gcm_sgl_enc(IMB_MGR *state, IMB_JOB *job) -{ - switch (job->key_len_in_bytes) { - case IMB_KEY_128_BYTES: - if (job->sgl_state == IMB_SGL_INIT) - IMB_AES128_GCM_INIT_VAR_IV(state, job->enc_keys, - job->u.GCM.ctx, - job->iv, - job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes); - else if (job->sgl_state == IMB_SGL_UPDATE) - IMB_AES128_GCM_ENC_UPDATE(state, job->enc_keys, - job->u.GCM.ctx, - job->dst, job->src, - job->msg_len_to_cipher_in_bytes); - else /* FINALIZE */ - IMB_AES128_GCM_ENC_FINALIZE(state, job->enc_keys, - job->u.GCM.ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - break; - case IMB_KEY_192_BYTES: - if (job->sgl_state == IMB_SGL_INIT) - IMB_AES192_GCM_INIT_VAR_IV(state, job->enc_keys, - job->u.GCM.ctx, - job->iv, - job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes); - else if (job->sgl_state == IMB_SGL_UPDATE) - IMB_AES192_GCM_ENC_UPDATE(state, job->enc_keys, - job->u.GCM.ctx, - job->dst, job->src, - job->msg_len_to_cipher_in_bytes); - else /* FINALIZE */ - IMB_AES192_GCM_ENC_FINALIZE(state, job->enc_keys, - job->u.GCM.ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - break; - case IMB_KEY_256_BYTES: - default: - if (job->sgl_state == IMB_SGL_INIT) - IMB_AES256_GCM_INIT_VAR_IV(state, job->enc_keys, - job->u.GCM.ctx, - job->iv, - job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes); - else if (job->sgl_state == IMB_SGL_UPDATE) - IMB_AES256_GCM_ENC_UPDATE(state, job->enc_keys, - job->u.GCM.ctx, - job->dst, job->src, - job->msg_len_to_cipher_in_bytes); - else /* FINALIZE */ - IMB_AES256_GCM_ENC_FINALIZE(state, job->enc_keys, - job->u.GCM.ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - break; - } - - job->status |= IMB_STATUS_COMPLETED_CIPHER; - - return job; -} - -__forceinline -IMB_JOB * -submit_gcm_sgl_dec(IMB_MGR *state, IMB_JOB *job) -{ - switch (job->key_len_in_bytes) { - case IMB_KEY_128_BYTES: - if (job->sgl_state == IMB_SGL_INIT) - IMB_AES128_GCM_INIT_VAR_IV(state, job->enc_keys, - job->u.GCM.ctx, - job->iv, - job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes); - else if (job->sgl_state == IMB_SGL_UPDATE) - IMB_AES128_GCM_DEC_UPDATE(state, job->enc_keys, - job->u.GCM.ctx, - job->dst, job->src, - job->msg_len_to_cipher_in_bytes); - else /* FINALIZE */ - IMB_AES128_GCM_DEC_FINALIZE(state, job->enc_keys, - job->u.GCM.ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - break; - case IMB_KEY_192_BYTES: - if (job->sgl_state == IMB_SGL_INIT) - IMB_AES192_GCM_INIT_VAR_IV(state, job->enc_keys, - job->u.GCM.ctx, - job->iv, - job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes); - else if (job->sgl_state == IMB_SGL_UPDATE) - IMB_AES192_GCM_DEC_UPDATE(state, job->enc_keys, - job->u.GCM.ctx, - job->dst, job->src, - job->msg_len_to_cipher_in_bytes); - else /* FINALIZE */ - IMB_AES192_GCM_DEC_FINALIZE(state, job->enc_keys, - job->u.GCM.ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - break; - case IMB_KEY_256_BYTES: - default: - if (job->sgl_state == IMB_SGL_INIT) - IMB_AES256_GCM_INIT_VAR_IV(state, job->enc_keys, - job->u.GCM.ctx, - job->iv, - job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes); - else if (job->sgl_state == IMB_SGL_UPDATE) - IMB_AES256_GCM_DEC_UPDATE(state, job->enc_keys, - job->u.GCM.ctx, - job->dst, job->src, - job->msg_len_to_cipher_in_bytes); - else /* FINALIZE */ - IMB_AES256_GCM_DEC_FINALIZE(state, job->enc_keys, - job->u.GCM.ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - break; - } - - job->status |= IMB_STATUS_COMPLETED_CIPHER; - - return job; -} - -/* ========================================================================= */ -/* Cipher submit & flush functions */ -/* ========================================================================= */ -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_ENC(IMB_MGR *state, IMB_JOB *job) -{ - if (IMB_CIPHER_CBC == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - MB_MGR_AES_OOO *aes128_ooo = state->aes128_ooo; - - return SUBMIT_JOB_AES128_ENC(aes128_ooo, job); - } else if (24 == job->key_len_in_bytes) { - MB_MGR_AES_OOO *aes192_ooo = state->aes192_ooo; - - return SUBMIT_JOB_AES192_ENC(aes192_ooo, job); - } else { /* assume 32 */ - MB_MGR_AES_OOO *aes256_ooo = state->aes256_ooo; - - return SUBMIT_JOB_AES256_ENC(aes256_ooo, job); - } - } else if (IMB_CIPHER_CNTR == job->cipher_mode) { - return SUBMIT_JOB_AES_CNTR(job); - } else if (IMB_CIPHER_CNTR_BITLEN == job->cipher_mode) { - return SUBMIT_JOB_AES_CNTR_BIT(job); - } else if (IMB_CIPHER_ECB == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES_ECB_128_ENC(job); - } else if (24 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES_ECB_192_ENC(job); - } else { /* assume 32 */ - return SUBMIT_JOB_AES_ECB_256_ENC(job); - } - } else if (IMB_CIPHER_DOCSIS_SEC_BPI == job->cipher_mode) { - return submit_docsis_enc_job(state, job); - } else if (IMB_CIPHER_PON_AES_CNTR == job->cipher_mode) { - if (job->msg_len_to_cipher_in_bytes == 0) - return SUBMIT_JOB_PON_ENC_NO_CTR(job); - else - return SUBMIT_JOB_PON_ENC(job); - } else if (IMB_CIPHER_GCM == job->cipher_mode) { - return SUBMIT_JOB_AES_GCM_ENC(state, job); - } else if (IMB_CIPHER_GCM_SGL == job->cipher_mode) { - return submit_gcm_sgl_enc(state, job); - } else if (IMB_CIPHER_CUSTOM == job->cipher_mode) { - return SUBMIT_JOB_CUSTOM_CIPHER(job); - } else if (IMB_CIPHER_DES == job->cipher_mode) { -#ifdef SUBMIT_JOB_DES_CBC_ENC - MB_MGR_DES_OOO *des_enc_ooo = state->des_enc_ooo; - - return SUBMIT_JOB_DES_CBC_ENC(des_enc_ooo, job); -#else - return DES_CBC_ENC(job); -#endif /* SUBMIT_JOB_DES_CBC_ENC */ - } else if (IMB_CIPHER_CHACHA20 == job->cipher_mode) { - return SUBMIT_JOB_CHACHA20_ENC_DEC(job); - } else if (IMB_CIPHER_CHACHA20_POLY1305 == job->cipher_mode) { - return SUBMIT_JOB_CHACHA20_POLY1305(state, job); - } else if (IMB_CIPHER_CHACHA20_POLY1305_SGL == job->cipher_mode) { - return SUBMIT_JOB_CHACHA20_POLY1305_SGL(state, job); - } else if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { -#ifdef SUBMIT_JOB_DOCSIS_DES_ENC - MB_MGR_DES_OOO *docsis_des_enc_ooo = state->docsis_des_enc_ooo; - - return SUBMIT_JOB_DOCSIS_DES_ENC(docsis_des_enc_ooo, - job); -#else - return DOCSIS_DES_ENC(job); -#endif /* SUBMIT_JOB_DOCSIS_DES_ENC */ - } else if (IMB_CIPHER_DES3 == job->cipher_mode) { -#ifdef SUBMIT_JOB_3DES_CBC_ENC - MB_MGR_DES_OOO *des3_enc_ooo = state->des3_enc_ooo; - - return SUBMIT_JOB_3DES_CBC_ENC(des3_enc_ooo, job); -#else - return DES3_CBC_ENC(job); -#endif - } else if (IMB_CIPHER_CCM == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - return AES_CNTR_CCM_128(job); - } else { /* assume 32 */ - return AES_CNTR_CCM_256(job); - } - } else if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; - - return SUBMIT_JOB_ZUC_EEA3(zuc_eea3_ooo, job); - } else { /* assume 32 */ - MB_MGR_ZUC_OOO *zuc256_eea3_ooo = - state->zuc256_eea3_ooo; - - return SUBMIT_JOB_ZUC256_EEA3(zuc256_eea3_ooo, job); - } - } else if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) { -#ifdef SUBMIT_JOB_SNOW3G_UEA2 - return SUBMIT_JOB_SNOW3G_UEA2(state, job); -#else - return def_submit_snow3g_uea2_job(state, job); -#endif - } else if (IMB_CIPHER_KASUMI_UEA1_BITLEN == job->cipher_mode) { - return submit_kasumi_uea1_job(state, job); - } else if (IMB_CIPHER_CBCS_1_9 == job->cipher_mode) { - MB_MGR_AES_OOO *aes128_cbcs_ooo = state->aes128_cbcs_ooo; - - return SUBMIT_JOB_AES128_CBCS_1_9_ENC(aes128_cbcs_ooo, job); - } else if (IMB_CIPHER_SNOW_V == job->cipher_mode) { - return SUBMIT_JOB_SNOW_V(job); - } else if (IMB_CIPHER_SNOW_V_AEAD == job->cipher_mode) { - return submit_snow_v_aead_job(state, job); - } else { /* assume IMB_CIPHER_NULL */ - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; - } -} - -__forceinline -IMB_JOB * -FLUSH_JOB_AES_ENC(IMB_MGR *state, IMB_JOB *job) -{ - if (IMB_CIPHER_CBC == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - MB_MGR_AES_OOO *aes128_ooo = state->aes128_ooo; - - return FLUSH_JOB_AES128_ENC(aes128_ooo); - } else if (24 == job->key_len_in_bytes) { - MB_MGR_AES_OOO *aes192_ooo = state->aes192_ooo; - - return FLUSH_JOB_AES192_ENC(aes192_ooo); - } else { /* assume 32 */ - MB_MGR_AES_OOO *aes256_ooo = state->aes256_ooo; - - return FLUSH_JOB_AES256_ENC(aes256_ooo); - } - } else if (IMB_CIPHER_DOCSIS_SEC_BPI == job->cipher_mode) { - return flush_docsis_enc_job(state, job); -#ifdef FLUSH_JOB_DES_CBC_ENC - } else if (IMB_CIPHER_DES == job->cipher_mode) { - MB_MGR_DES_OOO *des_enc_ooo = state->des_enc_ooo; - - return FLUSH_JOB_DES_CBC_ENC(des_enc_ooo); -#endif /* FLUSH_JOB_DES_CBC_ENC */ -#ifdef FLUSH_JOB_3DES_CBC_ENC - } else if (IMB_CIPHER_DES3 == job->cipher_mode) { - MB_MGR_DES_OOO *des3_enc_ooo = state->des3_enc_ooo; - - return FLUSH_JOB_3DES_CBC_ENC(des3_enc_ooo); -#endif /* FLUSH_JOB_3DES_CBC_ENC */ -#ifdef FLUSH_JOB_DOCSIS_DES_ENC - } else if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { - MB_MGR_DES_OOO *docsis_des_enc_ooo = state->docsis_des_enc_ooo; - - return FLUSH_JOB_DOCSIS_DES_ENC(docsis_des_enc_ooo); -#endif /* FLUSH_JOB_DOCSIS_DES_ENC */ - } else if (IMB_CIPHER_CUSTOM == job->cipher_mode) { - return FLUSH_JOB_CUSTOM_CIPHER(job); - } else if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; - - return FLUSH_JOB_ZUC_EEA3(zuc_eea3_ooo); - } else { /* assume 32 */ - MB_MGR_ZUC_OOO *zuc256_eea3_ooo = - state->zuc256_eea3_ooo; - - return FLUSH_JOB_ZUC256_EEA3(zuc256_eea3_ooo); - } - } else if (IMB_CIPHER_CBCS_1_9 == job->cipher_mode) { - MB_MGR_AES_OOO *aes128_cbcs_ooo = state->aes128_cbcs_ooo; - - return FLUSH_JOB_AES128_CBCS_1_9_ENC(aes128_cbcs_ooo); -#ifdef FLUSH_JOB_SNOW3G_UEA2 - } else if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) { - return FLUSH_JOB_SNOW3G_UEA2(state); -#endif - /** - * assume IMB_CIPHER_CNTR/CNTR_BITLEN, IMB_CIPHER_ECB, - * IMB_CIPHER_CCM, IMB_CIPHER_NULL or IMB_CIPHER_GCM - */ - } else { - return NULL; - } -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_AES_DEC(IMB_MGR *state, IMB_JOB *job) +__forceinline uint32_t +get_queue_sz(IMB_MGR *state) { - if (IMB_CIPHER_CBC == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES128_DEC(job); - } else if (24 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES192_DEC(job); - } else { /* assume 32 */ - return SUBMIT_JOB_AES256_DEC(job); - } - } else if (IMB_CIPHER_CNTR == job->cipher_mode) { - return SUBMIT_JOB_AES_CNTR(job); - } else if (IMB_CIPHER_CNTR_BITLEN == job->cipher_mode) { - return SUBMIT_JOB_AES_CNTR_BIT(job); - } else if (IMB_CIPHER_ECB == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES_ECB_128_DEC(job); - } else if (24 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES_ECB_192_DEC(job); - } else { /* assume 32 */ - return SUBMIT_JOB_AES_ECB_256_DEC(job); - } - } else if (IMB_CIPHER_DOCSIS_SEC_BPI == job->cipher_mode) { - return submit_docsis_dec_job(state, job); - } else if (IMB_CIPHER_PON_AES_CNTR == job->cipher_mode) { - if (job->msg_len_to_cipher_in_bytes == 0) - return SUBMIT_JOB_PON_DEC_NO_CTR(job); - else - return SUBMIT_JOB_PON_DEC(job); - } else if (IMB_CIPHER_GCM == job->cipher_mode) { - return SUBMIT_JOB_AES_GCM_DEC(state, job); - } else if (IMB_CIPHER_GCM_SGL == job->cipher_mode) { - return submit_gcm_sgl_dec(state, job); - } else if (IMB_CIPHER_DES == job->cipher_mode) { -#ifdef SUBMIT_JOB_DES_CBC_DEC - MB_MGR_DES_OOO *des_dec_ooo = state->des_dec_ooo; - - return SUBMIT_JOB_DES_CBC_DEC(des_dec_ooo, job); -#else - (void) state; - return DES_CBC_DEC(job); -#endif /* SUBMIT_JOB_DES_CBC_DEC */ - } else if (IMB_CIPHER_CHACHA20 == job->cipher_mode) { - return SUBMIT_JOB_CHACHA20_ENC_DEC(job); - } else if (IMB_CIPHER_CHACHA20_POLY1305 == job->cipher_mode) { - return SUBMIT_JOB_CHACHA20_POLY1305(state, job); - } else if (IMB_CIPHER_CHACHA20_POLY1305_SGL == job->cipher_mode) { - return SUBMIT_JOB_CHACHA20_POLY1305_SGL(state, job); - } else if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { -#ifdef SUBMIT_JOB_DOCSIS_DES_DEC - MB_MGR_DES_OOO *docsis_des_dec_ooo = state->docsis_des_dec_ooo; - - return SUBMIT_JOB_DOCSIS_DES_DEC(docsis_des_dec_ooo, - job); -#else - return DOCSIS_DES_DEC(job); -#endif /* SUBMIT_JOB_DOCSIS_DES_DEC */ - } else if (IMB_CIPHER_DES3 == job->cipher_mode) { -#ifdef SUBMIT_JOB_3DES_CBC_DEC - MB_MGR_DES_OOO *des3_dec_ooo = state->des3_dec_ooo; - - return SUBMIT_JOB_3DES_CBC_DEC(des3_dec_ooo, job); -#else - return DES3_CBC_DEC(job); -#endif - } else if (IMB_CIPHER_CUSTOM == job->cipher_mode) { - return SUBMIT_JOB_CUSTOM_CIPHER(job); - } else if (IMB_CIPHER_CCM == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - return AES_CNTR_CCM_128(job); - } else { /* assume 32 */ - return AES_CNTR_CCM_256(job); - } - } else if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; + const int a = (state->next_job - state->earliest_job) / sizeof(IMB_JOB); - return SUBMIT_JOB_ZUC_EEA3(zuc_eea3_ooo, job); - } else { /* assume 32 */ - MB_MGR_ZUC_OOO *zuc256_eea3_ooo = - state->zuc256_eea3_ooo; - - return SUBMIT_JOB_ZUC256_EEA3(zuc256_eea3_ooo, job); - } - } else if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) { -#ifdef SUBMIT_JOB_SNOW3G_UEA2 - return SUBMIT_JOB_SNOW3G_UEA2(state, job); -#else - return def_submit_snow3g_uea2_job(state, job); -#endif - } else if (IMB_CIPHER_KASUMI_UEA1_BITLEN == job->cipher_mode) { - return submit_kasumi_uea1_job(state, job); - } else if (IMB_CIPHER_CBCS_1_9 == job->cipher_mode) { - return SUBMIT_JOB_AES128_CBCS_1_9_DEC(job); - } else if (IMB_CIPHER_SNOW_V == job->cipher_mode) { - return SUBMIT_JOB_SNOW_V(job); - } else if (IMB_CIPHER_SNOW_V_AEAD == job->cipher_mode) { - return submit_snow_v_aead_job(state, job); - } else { - /* assume IMB_CIPHER_NULL */ - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; - } + return a & (IMB_MAX_JOBS-1); } -__forceinline -IMB_JOB * -FLUSH_JOB_AES_DEC(IMB_MGR *state, IMB_JOB *job) +__forceinline uint32_t +queue_sz(IMB_MGR *state) { -#ifdef FLUSH_JOB_SNOW3G_UEA2 - if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) - return FLUSH_JOB_SNOW3G_UEA2(state); -#endif - -#ifdef FLUSH_JOB_DES_CBC_DEC - if (IMB_CIPHER_DES == job->cipher_mode) { - MB_MGR_DES_OOO *des_dec_ooo = state->des_dec_ooo; - - return FLUSH_JOB_DES_CBC_DEC(des_dec_ooo); - } -#endif /* FLUSH_JOB_DES_CBC_DEC */ - -#ifdef FLUSH_JOB_3DES_CBC_DEC - if (IMB_CIPHER_DES3 == job->cipher_mode) { - MB_MGR_DES_OOO *des3_dec_ooo = state->des3_dec_ooo; - - return FLUSH_JOB_3DES_CBC_DEC(des3_dec_ooo); - } -#endif /* FLUSH_JOB_3DES_CBC_DEC */ - -#ifdef FLUSH_JOB_DOCSIS_DES_DEC - - if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { - MB_MGR_DES_OOO *docsis_des_dec_ooo = state->docsis_des_dec_ooo; - - return FLUSH_JOB_DOCSIS_DES_DEC(docsis_des_dec_ooo); - } -#endif /* FLUSH_JOB_DOCSIS_DES_DEC */ - - if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { - if (16 == job->key_len_in_bytes) { - MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; - - return FLUSH_JOB_ZUC_EEA3(zuc_eea3_ooo); - } else { /* assume 32 */ - MB_MGR_ZUC_OOO *zuc256_eea3_ooo = - state->zuc256_eea3_ooo; - - return FLUSH_JOB_ZUC256_EEA3(zuc256_eea3_ooo); - } - } + if (state->earliest_job < 0) + return 0; - return NULL; + return get_queue_sz(state); } /* ========================================================================= */ -/* Hash submit & flush functions */ -/* ========================================================================= */ - -__forceinline -void -process_gmac(IMB_MGR *state, IMB_JOB *job, const IMB_KEY_SIZE_BYTES key_size) -{ - struct gcm_context_data ctx; - const struct gcm_key_data *key = job->u.GMAC._key; - const uint8_t *iv = job->u.GMAC._iv; - const uint64_t iv_len = job->u.GMAC.iv_len_in_bytes; - const uint8_t *src = job->src + job->hash_start_src_offset_in_bytes; - const uint64_t src_len = job->msg_len_to_hash_in_bytes; - - if (key_size == IMB_KEY_128_BYTES) { - IMB_AES128_GMAC_INIT(state, key, &ctx, iv, iv_len); - IMB_AES128_GMAC_UPDATE(state, key, &ctx, src, src_len); - IMB_AES128_GMAC_FINALIZE(state, key, &ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } else if (key_size == IMB_KEY_192_BYTES) { - IMB_AES192_GMAC_INIT(state, key, &ctx, iv, iv_len); - IMB_AES192_GMAC_UPDATE(state, key, &ctx, src, src_len); - IMB_AES192_GMAC_FINALIZE(state, key, &ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } else { /* key_size == 256 */ - IMB_AES256_GMAC_INIT(state, key, &ctx, iv, iv_len); - IMB_AES256_GMAC_UPDATE(state, key, &ctx, src, src_len); - IMB_AES256_GMAC_FINALIZE(state, key, &ctx, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } -} - -__forceinline -IMB_JOB * -SUBMIT_JOB_HASH(IMB_MGR *state, IMB_JOB *job) -{ - MB_MGR_HMAC_SHA_1_OOO *hmac_sha_1_ooo = state->hmac_sha_1_ooo; - MB_MGR_HMAC_SHA_256_OOO *hmac_sha_224_ooo = state->hmac_sha_224_ooo; - MB_MGR_HMAC_SHA_256_OOO *hmac_sha_256_ooo = state->hmac_sha_256_ooo; - MB_MGR_HMAC_SHA_512_OOO *hmac_sha_384_ooo = state->hmac_sha_384_ooo; - MB_MGR_HMAC_SHA_512_OOO *hmac_sha_512_ooo = state->hmac_sha_512_ooo; - MB_MGR_HMAC_MD5_OOO *hmac_md5_ooo = state->hmac_md5_ooo; - MB_MGR_AES_XCBC_OOO *aes_xcbc_ooo = state->aes_xcbc_ooo; - MB_MGR_CCM_OOO *aes_ccm_ooo = state->aes_ccm_ooo; - MB_MGR_CCM_OOO *aes256_ccm_ooo = state->aes256_ccm_ooo; - MB_MGR_CMAC_OOO *aes_cmac_ooo = state->aes_cmac_ooo; - MB_MGR_CMAC_OOO *aes256_cmac_ooo = state->aes256_cmac_ooo; - MB_MGR_ZUC_OOO *zuc_eia3_ooo = state->zuc_eia3_ooo; - MB_MGR_ZUC_OOO *zuc256_eia3_ooo = state->zuc256_eia3_ooo; -#ifdef AVX512 - MB_MGR_SNOW3G_OOO *snow3g_uia2_ooo = state->snow3g_uia2_ooo; -#endif - - - switch (job->hash_alg) { - case IMB_AUTH_HMAC_SHA_1: -#ifdef HASH_USE_SHAEXT - if (state->features & IMB_FEATURE_SHANI) - return SUBMIT_JOB_HMAC_NI(hmac_sha_1_ooo, job); -#endif - return SUBMIT_JOB_HMAC(hmac_sha_1_ooo, job); - case IMB_AUTH_HMAC_SHA_224: -#ifdef HASH_USE_SHAEXT - if (state->features & IMB_FEATURE_SHANI) - return SUBMIT_JOB_HMAC_SHA_224_NI - (hmac_sha_224_ooo, job); -#endif - return SUBMIT_JOB_HMAC_SHA_224(hmac_sha_224_ooo, job); - case IMB_AUTH_HMAC_SHA_256: -#ifdef HASH_USE_SHAEXT - if (state->features & IMB_FEATURE_SHANI) - return SUBMIT_JOB_HMAC_SHA_256_NI - (hmac_sha_256_ooo, job); -#endif - return SUBMIT_JOB_HMAC_SHA_256(hmac_sha_256_ooo, job); - case IMB_AUTH_HMAC_SHA_384: - return SUBMIT_JOB_HMAC_SHA_384(hmac_sha_384_ooo, job); - case IMB_AUTH_HMAC_SHA_512: - return SUBMIT_JOB_HMAC_SHA_512(hmac_sha_512_ooo, job); - case IMB_AUTH_AES_XCBC: - return SUBMIT_JOB_AES_XCBC(aes_xcbc_ooo, job); - case IMB_AUTH_MD5: - return SUBMIT_JOB_HMAC_MD5(hmac_md5_ooo, job); - case IMB_AUTH_CUSTOM: - return SUBMIT_JOB_CUSTOM_HASH(job); - case IMB_AUTH_AES_CCM: - if (16 == job->key_len_in_bytes) { - return SUBMIT_JOB_AES128_CCM_AUTH(aes_ccm_ooo, job); - } else { /* assume 32 */ - return SUBMIT_JOB_AES256_CCM_AUTH(aes256_ccm_ooo, job); - } - case IMB_AUTH_AES_CMAC: - /* - * CMAC OOO MGR assumes job len in bits - * (for CMAC length is provided in bytes) - */ - job->msg_len_to_hash_in_bits = - job->msg_len_to_hash_in_bytes * 8; - return SUBMIT_JOB_AES128_CMAC_AUTH(aes_cmac_ooo, job); - case IMB_AUTH_AES_CMAC_BITLEN: - return SUBMIT_JOB_AES128_CMAC_AUTH(aes_cmac_ooo, job); - case IMB_AUTH_AES_CMAC_256: - job->msg_len_to_hash_in_bits = - job->msg_len_to_hash_in_bytes * 8; - return SUBMIT_JOB_AES256_CMAC_AUTH(aes256_cmac_ooo, job); - case IMB_AUTH_SHA_1: - IMB_SHA1(state, - job->src + job->hash_start_src_offset_in_bytes, - job->msg_len_to_hash_in_bytes, job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_SHA_224: - IMB_SHA224(state, - job->src + job->hash_start_src_offset_in_bytes, - job->msg_len_to_hash_in_bytes, job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_SHA_256: - IMB_SHA256(state, - job->src + job->hash_start_src_offset_in_bytes, - job->msg_len_to_hash_in_bytes, job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_SHA_384: - IMB_SHA384(state, - job->src + job->hash_start_src_offset_in_bytes, - job->msg_len_to_hash_in_bytes, job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_SHA_512: - IMB_SHA512(state, - job->src + job->hash_start_src_offset_in_bytes, - job->msg_len_to_hash_in_bytes, job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_ZUC_EIA3_BITLEN: - return SUBMIT_JOB_ZUC_EIA3(zuc_eia3_ooo, job); - case IMB_AUTH_ZUC256_EIA3_BITLEN: - return SUBMIT_JOB_ZUC256_EIA3(zuc256_eia3_ooo, job, - job->auth_tag_output_len_in_bytes); - case IMB_AUTH_SNOW3G_UIA2_BITLEN: -#ifdef AVX512 - return SUBMIT_JOB_SNOW3G_UIA2(snow3g_uia2_ooo, job); -#else - IMB_SNOW3G_F9_1_BUFFER(state, (const snow3g_key_schedule_t *) - job->u.SNOW3G_UIA2._key, - job->u.SNOW3G_UIA2._iv, - job->src + job->hash_start_src_offset_in_bytes, - job->msg_len_to_hash_in_bits, - job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; -#endif - case IMB_AUTH_KASUMI_UIA1: - IMB_KASUMI_F9_1_BUFFER(state, (const kasumi_key_sched_t *) - job->u.KASUMI_UIA1._key, - job->src + job->hash_start_src_offset_in_bytes, - (const uint32_t) job->msg_len_to_hash_in_bytes, - job->auth_tag_output); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_AES_GMAC_128: - process_gmac(state, job, IMB_KEY_128_BYTES); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_AES_GMAC_192: - process_gmac(state, job, IMB_KEY_192_BYTES); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_AES_GMAC_256: - process_gmac(state, job, IMB_KEY_256_BYTES); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_POLY1305: - POLY1305_MAC(job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC32_ETHERNET_FCS: - CRC(IMB_CRC32_ETHERNET_FCS, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC32_SCTP: - CRC(IMB_CRC32_SCTP, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC32_WIMAX_OFDMA_DATA: - CRC(IMB_CRC32_WIMAX_OFDMA_DATA, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC24_LTE_A: - CRC(IMB_CRC24_LTE_A, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC24_LTE_B: - CRC(IMB_CRC24_LTE_B, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC16_X25: - CRC(IMB_CRC16_X25, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC16_FP_DATA: - CRC(IMB_CRC16_FP_DATA, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC11_FP_HEADER: - CRC(IMB_CRC11_FP_HEADER, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC10_IUUP_DATA: - CRC(IMB_CRC10_IUUP_DATA, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC8_WIMAX_OFDMA_HCS: - CRC(IMB_CRC8_WIMAX_OFDMA_HCS, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC7_FP_HEADER: - CRC(IMB_CRC7_FP_HEADER, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - case IMB_AUTH_CRC6_IUUP_HEADER: - CRC(IMB_CRC6_IUUP_HEADER, state, job); - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - /** - * assume IMB_AUTH_GCM, IMB_AUTH_PON_CRC_BIP, - * IMB_AUTH_SNOW_V_AEAD or IMB_AUTH_NULL - */ - default: - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - } -} - -__forceinline -IMB_JOB * -FLUSH_JOB_HASH(IMB_MGR *state, IMB_JOB *job) -{ - MB_MGR_HMAC_SHA_1_OOO *hmac_sha_1_ooo = state->hmac_sha_1_ooo; - MB_MGR_HMAC_SHA_256_OOO *hmac_sha_224_ooo = state->hmac_sha_224_ooo; - MB_MGR_HMAC_SHA_256_OOO *hmac_sha_256_ooo = state->hmac_sha_256_ooo; - MB_MGR_HMAC_SHA_512_OOO *hmac_sha_384_ooo = state->hmac_sha_384_ooo; - MB_MGR_HMAC_SHA_512_OOO *hmac_sha_512_ooo = state->hmac_sha_512_ooo; - MB_MGR_HMAC_MD5_OOO *hmac_md5_ooo = state->hmac_md5_ooo; - MB_MGR_AES_XCBC_OOO *aes_xcbc_ooo = state->aes_xcbc_ooo; - MB_MGR_CCM_OOO *aes_ccm_ooo = state->aes_ccm_ooo; - MB_MGR_CCM_OOO *aes256_ccm_ooo = state->aes256_ccm_ooo; - MB_MGR_CMAC_OOO *aes_cmac_ooo = state->aes_cmac_ooo; - MB_MGR_CMAC_OOO *aes256_cmac_ooo = state->aes256_cmac_ooo; - MB_MGR_ZUC_OOO *zuc_eia3_ooo = state->zuc_eia3_ooo; - MB_MGR_ZUC_OOO *zuc256_eia3_ooo = state->zuc256_eia3_ooo; -#ifdef AVX512 - MB_MGR_SNOW3G_OOO *snow3g_uia2_ooo = state->snow3g_uia2_ooo; -#endif - - switch (job->hash_alg) { - case IMB_AUTH_HMAC_SHA_1: -#ifdef HASH_USE_SHAEXT - if (state->features & IMB_FEATURE_SHANI) - return FLUSH_JOB_HMAC_NI(hmac_sha_1_ooo); -#endif - return FLUSH_JOB_HMAC(hmac_sha_1_ooo); - case IMB_AUTH_HMAC_SHA_224: -#ifdef HASH_USE_SHAEXT - if (state->features & IMB_FEATURE_SHANI) - return FLUSH_JOB_HMAC_SHA_224_NI - (hmac_sha_224_ooo); -#endif - return FLUSH_JOB_HMAC_SHA_224(hmac_sha_224_ooo); - case IMB_AUTH_HMAC_SHA_256: -#ifdef HASH_USE_SHAEXT - if (state->features & IMB_FEATURE_SHANI) - return FLUSH_JOB_HMAC_SHA_256_NI - (hmac_sha_256_ooo); -#endif - return FLUSH_JOB_HMAC_SHA_256(hmac_sha_256_ooo); - case IMB_AUTH_HMAC_SHA_384: - return FLUSH_JOB_HMAC_SHA_384(hmac_sha_384_ooo); - case IMB_AUTH_HMAC_SHA_512: - return FLUSH_JOB_HMAC_SHA_512(hmac_sha_512_ooo); - case IMB_AUTH_AES_XCBC: - return FLUSH_JOB_AES_XCBC(aes_xcbc_ooo); - case IMB_AUTH_MD5: - return FLUSH_JOB_HMAC_MD5(hmac_md5_ooo); - case IMB_AUTH_CUSTOM: - return FLUSH_JOB_CUSTOM_HASH(job); - case IMB_AUTH_AES_CCM: - if (16 == job->key_len_in_bytes) { - return FLUSH_JOB_AES128_CCM_AUTH(aes_ccm_ooo); - } else { /* assume 32 */ - return FLUSH_JOB_AES256_CCM_AUTH(aes256_ccm_ooo); - } - case IMB_AUTH_AES_CMAC: - case IMB_AUTH_AES_CMAC_BITLEN: - return FLUSH_JOB_AES128_CMAC_AUTH(aes_cmac_ooo); - case IMB_AUTH_AES_CMAC_256: - return FLUSH_JOB_AES256_CMAC_AUTH(aes256_cmac_ooo); - case IMB_AUTH_ZUC_EIA3_BITLEN: - return FLUSH_JOB_ZUC_EIA3(zuc_eia3_ooo); - case IMB_AUTH_ZUC256_EIA3_BITLEN: - return FLUSH_JOB_ZUC256_EIA3(zuc256_eia3_ooo, - job->auth_tag_output_len_in_bytes); -#ifdef AVX512 - case IMB_AUTH_SNOW3G_UIA2_BITLEN: - return FLUSH_JOB_SNOW3G_UIA2(snow3g_uia2_ooo); -#endif - default: /* assume GCM or IMB_AUTH_NULL */ - if (!(job->status & IMB_STATUS_COMPLETED_AUTH)) { - job->status |= IMB_STATUS_COMPLETED_AUTH; - return job; - } - /* if HMAC is complete then return NULL */ - return NULL; - } -} - +/* + * Implements: + * GET_NEXT_JOB + * GET_COMPLETED_JOB + * QUEUE_SIZE + * FLUSH_JOB + * SUBMIT_JOB_NOCHECK + * SUBMIT_JOB + */ +#include "include/mb_mgr_job_api.h" /* JOB API */ /* ========================================================================= */ -/* Job submit & flush functions */ -/* ========================================================================= */ - -/* GCM NIST standard: len(M) < 2^39 - 256 */ -#define GCM_MAX_LEN UINT64_C(((1ULL << 39) - 256) - 1) -#define SNOW3G_MAX_BITLEN (UINT32_MAX) -#define MB_MAX_LEN16 ((1 << 16) - 2) - -__forceinline int -is_job_invalid(IMB_MGR *state, const IMB_JOB *job) -{ - const uint64_t auth_tag_len_fips[] = { - 0, /* INVALID selection */ - 20, /* IMB_AUTH_HMAC_SHA_1 */ - 28, /* IMB_AUTH_HMAC_SHA_224 */ - 32, /* IMB_AUTH_HMAC_SHA_256 */ - 48, /* IMB_AUTH_HMAC_SHA_384 */ - 64, /* IMB_AUTH_HMAC_SHA_512 */ - 12, /* IMB_AUTH_AES_XCBC */ - 16, /* IMB_AUTH_MD5 */ - 0, /* IMB_AUTH_NULL */ - 16, /* IMB_AUTH_AES_GMAC */ - 0, /* IMB_AUTH_CUSTOM */ - 0, /* IMB_AUTH_AES_CCM */ - 16, /* IMB_AUTH_AES_CMAC */ - 20, /* IMB_AUTH_SHA_1 */ - 28, /* IMB_AUTH_SHA_224 */ - 32, /* IMB_AUTH_SHA_256 */ - 48, /* IMB_AUTH_SHA_384 */ - 64, /* IMB_AUTH_SHA_512 */ - 4, /* IMB_AUTH_AES_CMAC 3GPP */ - 8, /* IMB_AUTH_PON_CRC_BIP */ - 4, /* IMB_AUTH_ZUC_EIA3_BITLEN */ - 4, /* IMB_AUTH_DOCSIS_CRC32 */ - 4, /* IMB_AUTH_SNOW3G_UIA2_BITLEN */ - 4, /* IMB_AUTH_KASUMI_UIA1 */ - 16, /* IMB_AUTH_AES_GMAC_128 */ - 16, /* IMB_AUTH_AES_GMAC_192 */ - 16, /* IMB_AUTH_AES_GMAC_256 */ - 16, /* IMB_AUTH_AES_CMAC_256 */ - 16, /* IMB_AUTH_POLY1305 */ - 16, /* IMB_AUTH_CHACHA_POLY1305 */ - 16, /* IMB_AUTH_CHACHA_POLY1305_SGL */ - 4, /* IMB_AUTH_ZUC256_EIA3_BITLEN */ - 16, /* IMB_AUTH_SNOW_V_AEAD */ - 16, /* IMB_AUTH_AES_GCM_SGL */ - 4, /* IMB_AUTH_CRC32_ETHERNET_FCS */ - 4, /* IMB_AUTH_CRC32_SCTP */ - 4, /* IMB_AUTH_CRC32_WIMAX_OFDMA_DATA */ - 4, /* IMB_AUTH_CRC24_LTE_A */ - 4, /* IMB_AUTH_CRC24_LTE_B */ - 4, /* IMB_AUTH_CRC16_X25 */ - 4, /* IMB_AUTH_CRC16_FP_DATA */ - 4, /* IMB_AUTH_CRC11_FP_HEADER */ - 4, /* IMB_AUTH_CRC10_IUUP_DATA */ - 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ - 4, /* IMB_AUTH_CRC7_FP_HEADER */ - 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ - }; - const uint64_t auth_tag_len_ipsec[] = { - 0, /* INVALID selection */ - 12, /* IMB_AUTH_HMAC_SHA_1 */ - 14, /* IMB_AUTH_HMAC_SHA_224 */ - 16, /* IMB_AUTH_HMAC_SHA_256 */ - 24, /* IMB_AUTH_HMAC_SHA_384 */ - 32, /* IMB_AUTH_HMAC_SHA_512 */ - 12, /* IMB_AUTH_AES_XCBC */ - 12, /* IMB_AUTH_MD5 */ - 0, /* IMB_AUTH_NULL */ - 16, /* IMB_AUTH_AES_GMAC */ - 0, /* IMB_AUTH_CUSTOM */ - 0, /* IMB_AUTH_AES_CCM */ - 16, /* IMB_AUTH_AES_CMAC */ - 20, /* IMB_AUTH_SHA_1 */ - 28, /* IMB_AUTH_SHA_224 */ - 32, /* IMB_AUTH_SHA_256 */ - 48, /* IMB_AUTH_SHA_384 */ - 64, /* IMB_AUTH_SHA_512 */ - 4, /* IMB_AUTH_AES_CMAC 3GPP */ - 8, /* IMB_AUTH_PON_CRC_BIP */ - 4, /* IMB_AUTH_ZUC_EIA3_BITLEN */ - 4, /* IMB_AUTH_DOCSIS_CRC32 */ - 4, /* IMB_AUTH_SNOW3G_UIA2_BITLEN */ - 4, /* IMB_AUTH_KASUMI_UIA1 */ - 16, /* IMB_AUTH_AES_GMAC_128 */ - 16, /* IMB_AUTH_AES_GMAC_192 */ - 16, /* IMB_AUTH_AES_GMAC_256 */ - 16, /* IMB_AUTH_AES_CMAC_256 */ - 16, /* IMB_AUTH_POLY1305 */ - 16, /* IMB_AUTH_CHACHA_POLY1305 */ - 16, /* IMB_AUTH_CHACHA_POLY1305_SGL */ - 4, /* IMB_AUTH_ZUC256_EIA3_BITLEN */ - 16, /* IMB_AUTH_SNOW_V_AEAD */ - 16, /* IMB_AUTH_AES_GCM_SGL */ - 4, /* IMB_AUTH_CRC32_ETHERNET_FCS */ - 4, /* IMB_AUTH_CRC32_SCTP */ - 4, /* IMB_AUTH_CRC32_WIMAX_OFDMA_DATA */ - 4, /* IMB_AUTH_CRC24_LTE_A */ - 4, /* IMB_AUTH_CRC24_LTE_B */ - 4, /* IMB_AUTH_CRC16_X25 */ - 4, /* IMB_AUTH_CRC16_FP_DATA */ - 4, /* IMB_AUTH_CRC11_FP_HEADER */ - 4, /* IMB_AUTH_CRC10_IUUP_DATA */ - 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ - 4, /* IMB_AUTH_CRC7_FP_HEADER */ - 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ - }; - - /* Maximum length of buffer in PON is 2^14 + 8, since maximum - * PLI value is 2^14 - 1 + 1 extra byte of padding + 8 bytes - * of XGEM header */ - const uint64_t max_pon_len = (1 << 14) + 8; - - switch (job->cipher_mode) { - case IMB_CIPHER_CBC: - case IMB_CIPHER_CBCS_1_9: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->cipher_direction == IMB_DIR_ENCRYPT && - job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->cipher_direction == IMB_DIR_DECRYPT && - job->dec_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16) && - job->key_len_in_bytes != UINT64_C(24) && - job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes == 0) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes & UINT64_C(15)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->cipher_mode == IMB_CIPHER_CBCS_1_9) { - if (job->msg_len_to_cipher_in_bytes > - ((1ULL << (60)) - 1)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->cipher_fields.CBCS.next_iv == NULL) { - imb_set_errno(state, - IMB_ERR_JOB_NULL_NEXT_IV); - return 1; - } - } else if (job->cipher_direction == IMB_DIR_ENCRYPT && - job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_ECB: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->cipher_direction == IMB_DIR_ENCRYPT && - job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->cipher_direction == IMB_DIR_DECRYPT && - job->dec_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16) && - job->key_len_in_bytes != UINT64_C(24) && - job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes == 0 || - job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes & UINT64_C(15)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(0)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_CNTR: - case IMB_CIPHER_CNTR_BITLEN: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16) && - job->key_len_in_bytes != UINT64_C(24) && - job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if ((job->cipher_mode == IMB_CIPHER_CNTR && - job->iv_len_in_bytes != UINT64_C(16) && - job->iv_len_in_bytes != UINT64_C(12)) || - (job->cipher_mode == IMB_CIPHER_CNTR_BITLEN && - job->iv_len_in_bytes != UINT64_C(16))) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - /* - * msg_len_to_cipher_in_bits is used with CNTR_BITLEN, but it is - * effectively the same field as msg_len_to_cipher_in_bytes, - * since it is part of the same union - */ - if (job->msg_len_to_cipher_in_bytes == 0) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - break; - case IMB_CIPHER_NULL: - /* - * No checks required for this mode - * @note NULL cipher doesn't perform memory copy operation - * from source to destination - */ - break; - case IMB_CIPHER_DOCSIS_SEC_BPI: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - /* it has to be set regardless of direction (AES-CFB) */ - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->cipher_direction == IMB_DIR_DECRYPT && - job->dec_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if ((job->key_len_in_bytes != UINT64_C(16)) && - (job->key_len_in_bytes != UINT64_C(32))) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - break; - case IMB_CIPHER_GCM: - case IMB_CIPHER_GCM_SGL: - if (job->msg_len_to_cipher_in_bytes > GCM_MAX_LEN) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->msg_len_to_cipher_in_bytes != 0 && job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - /* Same key structure used for encrypt and decrypt */ - if (job->cipher_direction == IMB_DIR_ENCRYPT && - job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->cipher_direction == IMB_DIR_DECRYPT && - job->dec_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16) && - job->key_len_in_bytes != UINT64_C(24) && - job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->iv_len_in_bytes == 0) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->cipher_mode == IMB_CIPHER_GCM && - job->hash_alg != IMB_AUTH_AES_GMAC) { - imb_set_errno(state, IMB_ERR_HASH_ALGO); - return 1; - } - if (job->cipher_mode == IMB_CIPHER_GCM_SGL && - job->hash_alg != IMB_AUTH_GCM_SGL) { - imb_set_errno(state, IMB_ERR_HASH_ALGO); - return 1; - } - break; - case IMB_CIPHER_CUSTOM: - /* no checks here */ - if (job->cipher_func == NULL) { - imb_set_errno(state, EFAULT); - return 1; - } - break; - case IMB_CIPHER_DES: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->cipher_direction == IMB_DIR_ENCRYPT && - job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->cipher_direction == IMB_DIR_DECRYPT && - job->dec_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(8)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes == 0 || - job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes & UINT64_C(7)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(8)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_DOCSIS_DES: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->cipher_direction == IMB_DIR_ENCRYPT && - job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->cipher_direction == IMB_DIR_DECRYPT && - job->dec_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(8)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes == 0 || - job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(8)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_CCM: - if (job->msg_len_to_cipher_in_bytes != 0) { - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - } - if (job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - /* AES-CTR and CBC-MAC use only encryption keys */ - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - /* currently only AES-CCM-128 and AES-CCM-256 supported */ - if (job->key_len_in_bytes != UINT64_C(16) && - job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - /* - * From RFC3610: - * Nonce length = 15 - L - * Valid L values are: 2 to 8 - * Then valid nonce lengths 13 to 7 (inclusive). - */ - if (job->iv_len_in_bytes > UINT64_C(13) || - job->iv_len_in_bytes < UINT64_C(7)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->hash_alg != IMB_AUTH_AES_CCM) { - imb_set_errno(state, IMB_ERR_HASH_ALGO); - return 1; - } - break; - case IMB_CIPHER_DES3: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(24)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes == 0 || - job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes & UINT64_C(7)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(8)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->cipher_direction == IMB_DIR_ENCRYPT) { - const void * const *ks_ptr = - (const void * const *)job->enc_keys; - - if (ks_ptr == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (ks_ptr[0] == NULL || ks_ptr[1] == NULL || - ks_ptr[2] == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - } else { - const void * const *ks_ptr = - (const void * const *)job->dec_keys; - - if (ks_ptr == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (ks_ptr[0] == NULL || ks_ptr[1] == NULL || - ks_ptr[2] == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - } - break; - case IMB_CIPHER_PON_AES_CNTR: - /* - * CRC and cipher are done together. A few assumptions: - * - CRC and cipher start offsets are the same - * - last 4 bytes (32 bits) of the buffer is CRC - * - updated CRC value is put into the source buffer - * (encryption only) - * - CRC length is msg_len_to_cipher_in_bytes - 4 bytes - * - msg_len_to_cipher_in_bytes is aligned to 4 bytes - * - If msg_len_to_cipher_in_bytes is 0, IV and key pointers - * are not required, as encryption is not done - */ - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - - /* source and destination buffer pointers cannot be the same, - * as there are always 8 bytes that are not ciphered */ - if ((job->src + job->cipher_start_src_offset_in_bytes) - != job->dst) { - imb_set_errno(state, EINVAL); - return 1; - } - if (job->hash_alg != IMB_AUTH_PON_CRC_BIP) { - imb_set_errno(state, IMB_ERR_HASH_ALGO); - return 1; - } - /* - * If message length to cipher != 0, AES-CTR is performed and - * key and IV require to be set properly - */ - if (job->msg_len_to_cipher_in_bytes != UINT64_C(0)) { - - /* message size needs to be aligned to 4 bytes */ - if ((job->msg_len_to_cipher_in_bytes & 3) != 0) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - - /* Subtract 8 bytes to maximum length since - * XGEM header is not ciphered */ - if ((job->msg_len_to_cipher_in_bytes > - (max_pon_len - 8))) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - - if (job->key_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - } - if (job->msg_len_to_cipher_in_bytes >= 4) { - const uint64_t xgem_hdr = *(const uint64_t *) - (job->src + - job->hash_start_src_offset_in_bytes); - - /* PLI is 14 MS bits of XGEM header */ - const uint16_t pli = BSWAP64(xgem_hdr) >> 50; - - /* CRC only if PLI is more than 4 bytes */ - if (pli > 4) { - const uint16_t crc_len = pli - 4; - - if (crc_len > - job->msg_len_to_cipher_in_bytes - 4) { - imb_set_errno(state, - IMB_ERR_JOB_PON_PLI); - return 1; - } - } - } - break; - case IMB_CIPHER_ZUC_EEA3: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16) && - job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bytes == 0 || - job->msg_len_to_cipher_in_bytes > ZUC_MAX_BYTELEN) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->key_len_in_bytes == UINT64_C(16)) { - if (job->iv_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - } else { - if (job->iv_len_in_bytes != UINT64_C(23) && - job->iv_len_in_bytes != UINT64_C(25)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - } - break; - case IMB_CIPHER_SNOW3G_UEA2_BITLEN: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bits == 0 || - job->msg_len_to_cipher_in_bits > SNOW3G_MAX_BITLEN) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_KASUMI_UEA1_BITLEN: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->msg_len_to_cipher_in_bits == 0 || - job->msg_len_to_cipher_in_bits > KASUMI_MAX_LEN) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(8)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_CHACHA20: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - /* Per RFC 7539, max cipher size is (2^32 - 1) x 64 */ - if (job->msg_len_to_cipher_in_bytes == 0 || - job->msg_len_to_cipher_in_bytes > ((1ULL << 38) - 64)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(12)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_CHACHA20_POLY1305: - case IMB_CIPHER_CHACHA20_POLY1305_SGL: - if (job->msg_len_to_cipher_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->msg_len_to_cipher_in_bytes != 0 && job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - /* Per RFC 7539, max cipher size is (2^32 - 1) x 64 */ - if (job->msg_len_to_cipher_in_bytes > ((1ULL << 38) - 64)) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(12)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - break; - case IMB_CIPHER_SNOW_V_AEAD: - case IMB_CIPHER_SNOW_V: - if (job->msg_len_to_cipher_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->msg_len_to_cipher_in_bytes != 0 && job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->enc_keys == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->key_len_in_bytes != UINT64_C(32)) { - imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); - return 1; - } - if (job->iv_len_in_bytes != UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->cipher_mode == IMB_CIPHER_SNOW_V_AEAD && - job->hash_alg != IMB_AUTH_SNOW_V_AEAD) { - imb_set_errno(state, IMB_ERR_HASH_ALGO); - return 1; - } - break; - default: - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - - switch (job->hash_alg) { - case IMB_AUTH_HMAC_SHA_1: - case IMB_AUTH_MD5: - case IMB_AUTH_HMAC_SHA_224: - case IMB_AUTH_HMAC_SHA_256: - case IMB_AUTH_HMAC_SHA_384: - case IMB_AUTH_HMAC_SHA_512: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg] && - job->auth_tag_output_len_in_bytes != - auth_tag_len_fips[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->msg_len_to_hash_in_bytes == 0 || - job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->u.HMAC._hashed_auth_key_xor_ipad == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_HMAC_IPAD); - return 1; - } - if (job->u.HMAC._hashed_auth_key_xor_opad == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_HMAC_OPAD); - return 1; - } - break; - case IMB_AUTH_AES_XCBC: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg] && - job->auth_tag_output_len_in_bytes != - auth_tag_len_fips[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->u.XCBC._k1_expanded == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_XCBC_K1_EXP); - return 1; - } - if (job->u.XCBC._k2 == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_XCBC_K2); - return 1; - } - if (job->u.XCBC._k3 == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_XCBC_K3); - return 1; - } - break; - case IMB_AUTH_NULL: - break; - case IMB_AUTH_CRC32_ETHERNET_FCS: - case IMB_AUTH_CRC32_SCTP: - case IMB_AUTH_CRC32_WIMAX_OFDMA_DATA: - case IMB_AUTH_CRC24_LTE_A: - case IMB_AUTH_CRC24_LTE_B: - case IMB_AUTH_CRC16_X25: - case IMB_AUTH_CRC16_FP_DATA: - case IMB_AUTH_CRC11_FP_HEADER: - case IMB_AUTH_CRC10_IUUP_DATA: - case IMB_AUTH_CRC8_WIMAX_OFDMA_HCS: - case IMB_AUTH_CRC7_FP_HEADER: - case IMB_AUTH_CRC6_IUUP_HEADER: - if (job->src == NULL && job->msg_len_to_hash_in_bytes != 0) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - break; - case IMB_AUTH_AES_GMAC: - if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || - job->auth_tag_output_len_in_bytes > UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if ((job->u.GCM.aad_len_in_bytes > 0) && - (job->u.GCM.aad == NULL)) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); - return 1; - } - if (job->cipher_mode != IMB_CIPHER_GCM) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - /* - * msg_len_to_hash_in_bytes not checked against zero. - * It is not used for AES-GCM & GMAC - see - * SUBMIT_JOB_AES_GCM_ENC and SUBMIT_JOB_AES_GCM_DEC functions. - */ - break; - case IMB_AUTH_GCM_SGL: - if (job->cipher_mode != IMB_CIPHER_GCM_SGL) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->u.GCM.ctx == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SGL_CTX); - return 1; - } - if (job->sgl_state == IMB_SGL_COMPLETE) { - if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || - job->auth_tag_output_len_in_bytes > UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - } - if (job->sgl_state == IMB_SGL_INIT) { - if ((job->u.GCM.aad_len_in_bytes > 0) && - (job->u.GCM.aad == NULL)) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); - return 1; - } - } - break; - case IMB_AUTH_AES_GMAC_128: - case IMB_AUTH_AES_GMAC_192: - case IMB_AUTH_AES_GMAC_256: - if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || - job->auth_tag_output_len_in_bytes > UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - /* This GMAC mode is to be used as stand-alone, - * not combined with GCM */ - if (job->cipher_mode == IMB_CIPHER_GCM) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->u.GMAC._key == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->u.GMAC._iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->u.GMAC.iv_len_in_bytes == 0) { - imb_set_errno(state, IMB_ERR_JOB_IV_LEN); - return 1; - } - if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - break; - case IMB_AUTH_CUSTOM: - if (job->hash_func == NULL) { - imb_set_errno(state, EFAULT); - return 1; - } - break; - case IMB_AUTH_AES_CCM: - if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->u.CCM.aad_len_in_bytes > 46) { - /* 3 x AES_BLOCK - 2 bytes for AAD len */ - imb_set_errno(state, IMB_ERR_JOB_AAD_LEN); - return 1; - } - if ((job->u.CCM.aad_len_in_bytes > 0) && - (job->u.CCM.aad == NULL)) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); - return 1; - } - /* M can be any even number from 4 to 16 */ - if (job->auth_tag_output_len_in_bytes < UINT64_C(4) || - job->auth_tag_output_len_in_bytes > UINT64_C(16) || - ((job->auth_tag_output_len_in_bytes & 1) != 0)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->cipher_mode != IMB_CIPHER_CCM) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - /* - * AES-CCM allows for only one message for - * cipher and authentication. - * AAD can be used to extend authentication over - * clear text fields. - */ - if (job->msg_len_to_cipher_in_bytes != - job->msg_len_to_hash_in_bytes) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->cipher_start_src_offset_in_bytes != - job->hash_start_src_offset_in_bytes) { - imb_set_errno(state, IMB_ERR_JOB_SRC_OFFSET); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - break; - case IMB_AUTH_AES_CMAC: - case IMB_AUTH_AES_CMAC_BITLEN: - case IMB_AUTH_AES_CMAC_256: - /* - * WARNING: When using IMB_AUTH_AES_CMAC_BITLEN, length of - * message is passed in bits, using job->msg_len_to_hash_in_bits - * (unlike "normal" IMB_AUTH_AES_CMAC, where is passed in bytes, - * using job->msg_len_to_hash_in_bytes). - */ - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if ((job->u.CMAC._key_expanded == NULL) || - (job->u.CMAC._skey1 == NULL) || - (job->u.CMAC._skey2 == NULL)) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - /* T is 128 bits but 96 bits is also allowed due to - * IPsec use case (RFC 4494) and 32 bits for CMAC 3GPP. - */ - if (job->auth_tag_output_len_in_bytes < UINT64_C(4) || - job->auth_tag_output_len_in_bytes > UINT64_C(16)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - break; - case IMB_AUTH_SHA_1: - case IMB_AUTH_SHA_224: - case IMB_AUTH_SHA_256: - case IMB_AUTH_SHA_384: - case IMB_AUTH_SHA_512: - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - break; - case IMB_AUTH_PON_CRC_BIP: - /* - * Authentication tag in PON is BIP 32-bit value only - * CRC is done together with cipher, - * its initial value is read from the source buffer and - * updated value put into the destination buffer. - * - msg_len_to_hash_in_bytes is aligned to 4 bytes - */ - if (((job->msg_len_to_hash_in_bytes & UINT64_C(3)) != 0) || - (job->msg_len_to_hash_in_bytes < UINT64_C(8)) || - (job->msg_len_to_hash_in_bytes > max_pon_len)) { - /* - * Length aligned to 4 bytes (and at least 8 bytes, - * including 8-byte XGEM header and no more - * than max length) - */ - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - /* 64-bits: - * - BIP 32-bits - * - CRC 32-bits - */ - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->cipher_mode != IMB_CIPHER_PON_AES_CNTR) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - break; - case IMB_AUTH_ZUC_EIA3_BITLEN: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if ((job->msg_len_to_hash_in_bits < ZUC_MIN_BITLEN) || - (job->msg_len_to_hash_in_bits > ZUC_MAX_BITLEN)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->u.ZUC_EIA3._key == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->u.ZUC_EIA3._iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - break; - case IMB_AUTH_ZUC256_EIA3_BITLEN: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if ((job->msg_len_to_hash_in_bits < ZUC_MIN_BITLEN) || - (job->msg_len_to_hash_in_bits > ZUC_MAX_BITLEN)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->u.ZUC_EIA3._key == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->u.ZUC_EIA3._iv == NULL) { - /* If 25-byte IV is NULL, check 23-byte IV */ - if (job->u.ZUC_EIA3._iv23 == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - } - if ((job->auth_tag_output_len_in_bytes != 4) && - (job->auth_tag_output_len_in_bytes != 8) && - (job->auth_tag_output_len_in_bytes != 16)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - break; - case IMB_AUTH_DOCSIS_CRC32: - /** - * Use only in combination with DOCSIS_SEC_BPI. - * Assumptions about Ethernet PDU carried over DOCSIS: - * - cipher_start_src_offset_in_bytes <= - * (hash_start_src_offset_in_bytes + 12) - * - msg_len_to_cipher_in_bytes <= - * (msg_len_to_hash_in_bytes - 12 + 4) - * - @note: in-place operation allowed only - * - authentication tag size is 4 bytes - * - @note: in encrypt direction, computed CRC value is put into - * the source buffer - * - encrypt chain order: hash, cipher - * - decrypt chain order: cipher, hash - */ - if (job->cipher_mode != IMB_CIPHER_DOCSIS_SEC_BPI) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->msg_len_to_cipher_in_bytes && - job->msg_len_to_hash_in_bytes) { - const uint64_t ciph_adjust = - IMB_DOCSIS_CRC32_MIN_ETH_PDU_SIZE - - 2 - /* ETH TYPE */ - IMB_DOCSIS_CRC32_TAG_SIZE; - - if ((job->msg_len_to_cipher_in_bytes + ciph_adjust) > - job->msg_len_to_hash_in_bytes) { - imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); - return 1; - } - if (job->cipher_start_src_offset_in_bytes < - (job->hash_start_src_offset_in_bytes + 12)) { - imb_set_errno(state, IMB_ERR_JOB_SRC_OFFSET); - return 1; - } - } - if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - /* Ethernet FCS CRC is 32-bits */ - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if ((job->cipher_direction == IMB_DIR_ENCRYPT && - job->chain_order != IMB_ORDER_HASH_CIPHER) || - (job->cipher_direction == IMB_DIR_DECRYPT && - job->chain_order != IMB_ORDER_CIPHER_HASH)) { - imb_set_errno(state, IMB_ERR_JOB_CHAIN_ORDER); - return 1; - } - break; - case IMB_AUTH_SNOW3G_UIA2_BITLEN: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if ((job->msg_len_to_hash_in_bits == 0) || - (job->msg_len_to_hash_in_bits > SNOW3G_MAX_BITLEN)) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->u.SNOW3G_UIA2._key == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->u.SNOW3G_UIA2._iv == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_IV); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - break; - case IMB_AUTH_KASUMI_UIA1: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - /* - * KASUMI-UIA1 needs to be at least 8 bytes - * (IV + direction bit + '1' + 0s to align to byte boundary) - */ - if ((job->msg_len_to_hash_in_bytes < - (IMB_KASUMI_BLOCK_SIZE + 1)) || - (job->msg_len_to_hash_in_bytes > - (KASUMI_MAX_LEN / BYTESIZE))) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); - return 1; - } - if (job->u.KASUMI_UIA1._key == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - break; - case IMB_AUTH_POLY1305: - if (job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->u.POLY1305._key == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH_KEY); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - break; - case IMB_AUTH_CHACHA20_POLY1305: - if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->msg_len_to_hash_in_bytes != 0 && job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->cipher_mode != IMB_CIPHER_CHACHA20_POLY1305) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->u.CHACHA20_POLY1305.aad == NULL && - job->u.CHACHA20_POLY1305.aad_len_in_bytes > 0) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - break; - case IMB_AUTH_CHACHA20_POLY1305_SGL: - if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); - return 1; - } - if (job->msg_len_to_hash_in_bytes != 0 && job->dst == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_DST); - return 1; - } - if (job->cipher_mode != IMB_CIPHER_CHACHA20_POLY1305_SGL) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - if (job->u.CHACHA20_POLY1305.aad == NULL && - job->u.CHACHA20_POLY1305.aad_len_in_bytes > 0) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->u.CHACHA20_POLY1305.ctx == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_SGL_CTX); - return 1; - } - break; - case IMB_AUTH_SNOW_V_AEAD: - if ((job->u.SNOW_V_AEAD.aad_len_in_bytes > 0) && - (job->u.SNOW_V_AEAD.aad == NULL)) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); - return 1; - } - if (job->auth_tag_output == NULL) { - imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); - return 1; - } - if (job->auth_tag_output_len_in_bytes != - auth_tag_len_ipsec[job->hash_alg]) { - imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); - return 1; - } - if (job->cipher_mode != IMB_CIPHER_SNOW_V_AEAD) { - imb_set_errno(state, IMB_ERR_CIPH_MODE); - return 1; - } - break; - default: - imb_set_errno(state, IMB_ERR_HASH_ALGO); - return 1; - } - return 0; -} - -__forceinline -IMB_JOB *SUBMIT_JOB_AES(IMB_MGR *state, IMB_JOB *job) -{ - if (job->cipher_direction == IMB_DIR_ENCRYPT) - job = SUBMIT_JOB_AES_ENC(state, job); - else - job = SUBMIT_JOB_AES_DEC(state, job); - - return job; -} - -__forceinline -IMB_JOB *FLUSH_JOB_AES(IMB_MGR *state, IMB_JOB *job) -{ - if (job->cipher_direction == IMB_DIR_ENCRYPT) - job = FLUSH_JOB_AES_ENC(state, job); - else - job = FLUSH_JOB_AES_DEC(state, job); - - return job; -} - -/* submit a half-completed job, based on the status */ -__forceinline -IMB_JOB *RESUBMIT_JOB(IMB_MGR *state, IMB_JOB *job) -{ - while (job != NULL && job->status < IMB_STATUS_COMPLETED) { - if (job->status == IMB_STATUS_COMPLETED_AUTH) - job = SUBMIT_JOB_AES(state, job); - else /* assumed job->status = IMB_STATUS_COMPLETED_CIPHER */ - job = SUBMIT_JOB_HASH(state, job); - } - - return job; -} - -__forceinline -IMB_JOB *submit_new_job(IMB_MGR *state, IMB_JOB *job) -{ - if (job->chain_order == IMB_ORDER_CIPHER_HASH) - job = SUBMIT_JOB_AES(state, job); - else - job = SUBMIT_JOB_HASH(state, job); - - job = RESUBMIT_JOB(state, job); - return job; -} - -__forceinline -void complete_job(IMB_MGR *state, IMB_JOB *job) -{ - if (job->chain_order == IMB_ORDER_CIPHER_HASH) { - /* while() loop optimized for cipher_hash order */ - while (job->status < IMB_STATUS_COMPLETED) { - IMB_JOB *tmp = FLUSH_JOB_AES(state, job); - - if (tmp == NULL) - tmp = FLUSH_JOB_HASH(state, job); - - (void) RESUBMIT_JOB(state, tmp); - } - } else { - /* while() loop optimized for hash_cipher order */ - while (job->status < IMB_STATUS_COMPLETED) { - IMB_JOB *tmp = FLUSH_JOB_HASH(state, job); - - if (tmp == NULL) - tmp = FLUSH_JOB_AES(state, job); - - (void) RESUBMIT_JOB(state, tmp); - } - } -} - -__forceinline -IMB_JOB * -submit_job_and_check(IMB_MGR *state, const int run_check) -{ - IMB_JOB *job = NULL; -#ifndef LINUX - DECLARE_ALIGNED(imb_uint128_t xmm_save[10], 16); - - SAVE_XMMS(xmm_save); -#endif - - job = JOBS(state, state->next_job); - - if (run_check) { - if (is_job_invalid(state, job)) { - job->status = IMB_STATUS_INVALID_ARGS; - } else { - job->status = IMB_STATUS_BEING_PROCESSED; - job = submit_new_job(state, job); - } - } else { - job->status = IMB_STATUS_BEING_PROCESSED; - job = submit_new_job(state, job); - } - - if (state->earliest_job < 0) { - /* state was previously empty */ - if (job == NULL) - state->earliest_job = state->next_job; - ADV_JOBS(&state->next_job); - goto exit; - } - - ADV_JOBS(&state->next_job); - - if (state->earliest_job == state->next_job) { - /* Full */ - job = JOBS(state, state->earliest_job); - complete_job(state, job); - ADV_JOBS(&state->earliest_job); - goto exit; - } - - /* not full */ - job = JOBS(state, state->earliest_job); - if (job->status < IMB_STATUS_COMPLETED) { - job = NULL; - goto exit; - } - - ADV_JOBS(&state->earliest_job); -exit: -#ifdef SAFE_DATA - CLEAR_SCRATCH_GPS(); - CLEAR_SCRATCH_SIMD_REGS(); -#endif /* SAFE_DATA */ - -#ifndef LINUX - RESTORE_XMMS(xmm_save); -#endif - return job; -} - -IMB_JOB * -SUBMIT_JOB(IMB_MGR *state) -{ - /* reset error status */ - imb_set_errno(state, 0); - -#ifdef SAFE_PARAM - if (state == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); - return NULL; - } -#endif - - return submit_job_and_check(state, 1); -} - -IMB_JOB * -SUBMIT_JOB_NOCHECK(IMB_MGR *state) -{ - /* reset error status */ - imb_set_errno(state, 0); - -#ifdef SAFE_PARAM - if (state == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); - return NULL; - } -#endif - - return submit_job_and_check(state, 0); -} - -IMB_JOB * -FLUSH_JOB(IMB_MGR *state) -{ - /* reset error status */ - imb_set_errno(state, 0); - -#ifdef SAFE_PARAM - if (state == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); - return NULL; - } -#endif - IMB_JOB *job; -#ifndef LINUX - DECLARE_ALIGNED(imb_uint128_t xmm_save[10], 16); -#endif - if (state->earliest_job < 0) - return NULL; /* empty */ - -#ifndef LINUX - SAVE_XMMS(xmm_save); -#endif - job = JOBS(state, state->earliest_job); - complete_job(state, job); - - ADV_JOBS(&state->earliest_job); - - if (state->earliest_job == state->next_job) - state->earliest_job = -1; /* becomes empty */ - -#ifdef SAFE_DATA - CLEAR_SCRATCH_GPS(); - CLEAR_SCRATCH_SIMD_REGS(); -#endif /* SAFE_DATA */ - -#ifndef LINUX - RESTORE_XMMS(xmm_save); -#endif - return job; -} +/* + * Implements: + * GET_NEXT_BURST + * SUBMIT_BURST + * SUBMIT_BURST_NOCHECK + * FLUSH_BURST + */ +#include "include/mb_mgr_burst_async.h" /* asynchronous burst API */ /* ========================================================================= */ -/* ========================================================================= */ - -uint32_t -QUEUE_SIZE(IMB_MGR *state) -{ - /* reset error status */ - imb_set_errno(state, 0); - -#ifdef SAFE_PARAM - if (state == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); - return 0; - } -#endif - int a, b; - - if (state->earliest_job < 0) - return 0; - a = state->next_job / sizeof(IMB_JOB); - b = state->earliest_job / sizeof(IMB_JOB); - return ((a-b) & (IMB_MAX_JOBS-1)); -} - -IMB_JOB * -GET_COMPLETED_JOB(IMB_MGR *state) -{ - /* reset error status */ - imb_set_errno(state, 0); - -#ifdef SAFE_PARAM - if (state == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); - return NULL; - } -#endif - IMB_JOB *job; - - if (state->earliest_job < 0) - return NULL; - - job = JOBS(state, state->earliest_job); - if (job->status < IMB_STATUS_COMPLETED) - return NULL; - - ADV_JOBS(&state->earliest_job); - - if (state->earliest_job == state->next_job) - state->earliest_job = -1; - - return job; -} - -IMB_JOB * -GET_NEXT_JOB(IMB_MGR *state) -{ - /* reset error status */ - imb_set_errno(state, 0); - -#ifdef SAFE_PARAM - if (state == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); - return NULL; - } -#endif +/* + * Implements: + * SUBMIT_CIPHER_BURST + * SUBMIT_CIPHER_BURST_NOCHECK + * SUBMIT_HASH_BURST + * SUBMIT_HASH_BURST_NOCHECK + */ - return JOBS(state, state->next_job); -} +#include "include/mb_mgr_burst.h" /* synchronous cipher/hash burst API */ #endif /* MB_MGR_CODE_H */ diff --git a/lib/include/mb_mgr_datastruct.asm b/lib/include/mb_mgr_datastruct.asm index f61595cf6af6cd58faf06ee22a3d0c3d4fb012ef..162820468a2731781647a046ff3eb5954a78edbc 100644 --- a/lib/include/mb_mgr_datastruct.asm +++ b/lib/include/mb_mgr_datastruct.asm @@ -239,7 +239,7 @@ FIELD _zucarg_in, 16*8, 64 ; array of 16 pointers to in text FIELD _zucarg_out, 16*8, 64 ; array of 16 pointers to out text FIELD _zucarg_keys, 16*8, 8 ; array of 16 pointers to keys FIELD _zucarg_IV, 16*32, 32 ; array of IVs (up to 25 bytes each) -FIELD _zucarg_digest, 16*4, 64 ; array of 16 digests +FIELD _zucarg_digest, 16*16, 64 ; array of 16 digests FIELD _zucarg_KS, 16*128, 64 ; array of 128-byte keystream of 16 buffers END_FIELDS %assign _ZUC_ARGS_X16_size _FIELD_OFFSET diff --git a/lib/include/mb_mgr_job_api.h b/lib/include/mb_mgr_job_api.h new file mode 100644 index 0000000000000000000000000000000000000000..bc480ce889d93a955efe6c6755619ec7538f9e5d --- /dev/null +++ b/lib/include/mb_mgr_job_api.h @@ -0,0 +1,1273 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef MB_MGR_JOB_API_H +#define MB_MGR_JOB_API_H + +/* + * This contains the bulk of the mb_mgr code, with #define's to build + * an SSE, AVX, AVX2 or AVX512 version (see mb_mgr_sse.c, mb_mgr_avx.c, etc.) + * + * get_next_job() returns a job object. This must be filled in and returned + * via submit_job() before get_next_job() is called again. + * + * submit_job() and flush_job() returns a job object. This job object ceases + * to be usable at the next call to get_next_job() + */ + +#include + +#include "include/clear_regs_mem.h" +#include "include/des.h" +#include "ipsec-mb.h" +#include "include/error.h" +#include "include/snow3g_submit.h" +#include "include/job_api_gcm.h" +#include "include/job_api_snowv.h" +#include "include/job_api_kasumi.h" +#include "include/mb_mgr_job_check.h" /* is_job_invalid() */ + +#define CRC(func, state, job) *((uint32_t *)job->auth_tag_output) = \ + func(state, job->src + job->hash_start_src_offset_in_bytes, \ + job->msg_len_to_hash_in_bytes) + +/* ========================================================================= */ +/* AES-CBC */ +/* ========================================================================= */ + +__forceinline IMB_JOB *SUBMIT_JOB_AES_CBC_128_DEC(IMB_JOB *job) +{ + AES_CBC_DEC_128(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_CBC_192_DEC(IMB_JOB *job) +{ + AES_CBC_DEC_192(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_CBC_256_DEC(IMB_JOB *job) +{ + AES_CBC_DEC_256(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +/* ========================================================================= */ +/* AES-ECB */ +/* ========================================================================= */ + +__forceinline IMB_JOB *SUBMIT_JOB_AES_ECB_128_ENC(IMB_JOB *job) +{ + AES_ECB_ENC_128(job->src + job->cipher_start_src_offset_in_bytes, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_ECB_192_ENC(IMB_JOB *job) +{ + AES_ECB_ENC_192(job->src + job->cipher_start_src_offset_in_bytes, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_ECB_256_ENC(IMB_JOB *job) +{ + AES_ECB_ENC_256(job->src + job->cipher_start_src_offset_in_bytes, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_ECB_128_DEC(IMB_JOB *job) +{ + AES_ECB_DEC_128(job->src + job->cipher_start_src_offset_in_bytes, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_ECB_192_DEC(IMB_JOB *job) +{ + AES_ECB_DEC_192(job->src + job->cipher_start_src_offset_in_bytes, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_ECB_256_DEC(IMB_JOB *job) +{ + AES_ECB_DEC_256(job->src + job->cipher_start_src_offset_in_bytes, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15)); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +/* ========================================================================= */ +/* AES-CBCS */ +/* ========================================================================= */ + +__forceinline IMB_JOB * SUBMIT_JOB_AES128_CBCS_1_9_DEC(IMB_JOB *job) +{ + AES_CBCS_1_9_DEC_128(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->dec_keys, + job->dst, + job->msg_len_to_cipher_in_bytes & (~15), + job->cipher_fields.CBCS.next_iv); + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +/* ========================================================================= */ +/* DOCSIS - it has to be below AES DEC */ +/* ========================================================================= */ + +#include "include/job_api_docsis.h" + +/* ========================================================================= */ +/* AES-GCM */ +/* ========================================================================= */ +__forceinline IMB_JOB *SUBMIT_JOB_AES_GCM_DEC(IMB_MGR *state, IMB_JOB *job) +{ + DECLARE_ALIGNED(struct gcm_context_data ctx, 16); + (void) state; + + if (16 == job->key_len_in_bytes) { + AES_GCM_DEC_IV_128(job->dec_keys, + &ctx, job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->iv, job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } else if (24 == job->key_len_in_bytes) { + AES_GCM_DEC_IV_192(job->dec_keys, + &ctx, job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->iv, job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } else { /* assume 32 bytes */ + AES_GCM_DEC_IV_256(job->dec_keys, + &ctx, job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->iv, job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + + job->status = IMB_STATUS_COMPLETED; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_GCM_ENC(IMB_MGR *state, IMB_JOB *job) +{ + DECLARE_ALIGNED(struct gcm_context_data ctx, 16); + (void) state; + + if (16 == job->key_len_in_bytes) { + AES_GCM_ENC_IV_128(job->enc_keys, + &ctx, job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->iv, job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } else if (24 == job->key_len_in_bytes) { + AES_GCM_ENC_IV_192(job->enc_keys, + &ctx, job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->iv, job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } else { /* assume 32 bytes */ + AES_GCM_ENC_IV_256(job->enc_keys, + &ctx, job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->iv, job->iv_len_in_bytes, + job->u.GCM.aad, + job->u.GCM.aad_len_in_bytes, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes); + } + + job->status = IMB_STATUS_COMPLETED; + return job; +} +/* ========================================================================= */ +/* AES-CTR */ +/* ========================================================================= */ +__forceinline IMB_JOB *SUBMIT_JOB_AES_CTR(IMB_JOB *job) +{ + if (IMB_KEY_128_BYTES == job->key_len_in_bytes) { +#ifdef SUBMIT_JOB_AES_CTR_128 + SUBMIT_JOB_AES_CTR_128(job); +#else + AES_CTR_128(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes, + job->iv_len_in_bytes); +#endif + } else if (IMB_KEY_192_BYTES == job->key_len_in_bytes) { +#ifdef SUBMIT_JOB_AES_CTR_192 + SUBMIT_JOB_AES_CTR_192(job); +#else + AES_CTR_192(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes, + job->iv_len_in_bytes); +#endif + } else /* assume 256-bit key */ { +#ifdef SUBMIT_JOB_AES_CTR_256 + SUBMIT_JOB_AES_CTR_256(job); +#else + AES_CTR_256(job->src + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bytes, + job->iv_len_in_bytes); +#endif + } + + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_AES_CTR_BIT(IMB_JOB *job) +{ + if (IMB_KEY_128_BYTES == job->key_len_in_bytes) { +#ifdef SUBMIT_JOB_AES_CTR_128_BIT + SUBMIT_JOB_AES_CTR_128_BIT(job); +#else + AES_CTR_128_BIT(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bits, + job->iv_len_in_bytes); +#endif + } else if (IMB_KEY_192_BYTES == job->key_len_in_bytes) { +#ifdef SUBMIT_JOB_AES_CTR_192_BIT + SUBMIT_JOB_AES_CTR_192_BIT(job); +#else + AES_CTR_192_BIT(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bits, + job->iv_len_in_bytes); +#endif + } else /* assume 256-bit key */ { +#ifdef SUBMIT_JOB_AES_CTR_256_BIT + SUBMIT_JOB_AES_CTR_256_BIT(job); +#else + AES_CTR_256_BIT(job->src + + job->cipher_start_src_offset_in_bytes, + job->iv, + job->enc_keys, + job->dst, + job->msg_len_to_cipher_in_bits, + job->iv_len_in_bytes); +#endif + } + + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; +} + +/* ========================================================================= */ +/* Custom hash / cipher */ +/* ========================================================================= */ + +__forceinline IMB_JOB *JOB_CUSTOM_CIPHER(IMB_JOB *job) +{ + if (!(job->status & IMB_STATUS_COMPLETED_CIPHER)) { + if (job->cipher_func(job)) + job->status = IMB_STATUS_INTERNAL_ERROR; + else + job->status |= IMB_STATUS_COMPLETED_CIPHER; + } + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_CUSTOM_CIPHER(IMB_JOB *job) +{ + return JOB_CUSTOM_CIPHER(job); +} + +__forceinline IMB_JOB *FLUSH_JOB_CUSTOM_CIPHER(IMB_JOB *job) +{ + return JOB_CUSTOM_CIPHER(job); +} + +__forceinline IMB_JOB *JOB_CUSTOM_HASH(IMB_JOB *job) +{ + if (!(job->status & IMB_STATUS_COMPLETED_AUTH)) { + if (job->hash_func(job)) + job->status = IMB_STATUS_INTERNAL_ERROR; + else + job->status |= IMB_STATUS_COMPLETED_AUTH; + } + return job; +} + +__forceinline IMB_JOB *SUBMIT_JOB_CUSTOM_HASH(IMB_JOB *job) +{ + return JOB_CUSTOM_HASH(job); +} + +__forceinline IMB_JOB *FLUSH_JOB_CUSTOM_HASH(IMB_JOB *job) +{ + return JOB_CUSTOM_HASH(job); +} + +/* ========================================================================= */ +/* Cipher submit & flush functions */ +/* ========================================================================= */ +__forceinline IMB_JOB *SUBMIT_JOB_CIPHER_ENC(IMB_MGR *state, IMB_JOB *job) +{ + if (IMB_CIPHER_GCM == job->cipher_mode) { + return SUBMIT_JOB_AES_GCM_ENC(state, job); + } else if (IMB_CIPHER_GCM_SGL == job->cipher_mode) { + return submit_gcm_sgl_enc(state, job); + } else if (IMB_CIPHER_CBC == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + MB_MGR_AES_OOO *aes128_ooo = state->aes128_ooo; + + return SUBMIT_JOB_AES_CBC_128_ENC(aes128_ooo, job); + } else if (24 == job->key_len_in_bytes) { + MB_MGR_AES_OOO *aes192_ooo = state->aes192_ooo; + + return SUBMIT_JOB_AES_CBC_192_ENC(aes192_ooo, job); + } else { /* assume 32 */ + MB_MGR_AES_OOO *aes256_ooo = state->aes256_ooo; + + return SUBMIT_JOB_AES_CBC_256_ENC(aes256_ooo, job); + } + } else if (IMB_CIPHER_CNTR == job->cipher_mode) { + return SUBMIT_JOB_AES_CTR(job); + } else if (IMB_CIPHER_CNTR_BITLEN == job->cipher_mode) { + return SUBMIT_JOB_AES_CTR_BIT(job); + } else if (IMB_CIPHER_ECB == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES_ECB_128_ENC(job); + } else if (24 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES_ECB_192_ENC(job); + } else { /* assume 32 */ + return SUBMIT_JOB_AES_ECB_256_ENC(job); + } + } else if (IMB_CIPHER_DOCSIS_SEC_BPI == job->cipher_mode) { + return submit_docsis_enc_job(state, job); + } else if (IMB_CIPHER_PON_AES_CNTR == job->cipher_mode) { + if (job->msg_len_to_cipher_in_bytes == 0) + return SUBMIT_JOB_PON_ENC_NO_CTR(job); + else + return SUBMIT_JOB_PON_ENC(job); + } else if (IMB_CIPHER_CUSTOM == job->cipher_mode) { + return SUBMIT_JOB_CUSTOM_CIPHER(job); + } else if (IMB_CIPHER_DES == job->cipher_mode) { +#ifdef SUBMIT_JOB_DES_CBC_ENC + MB_MGR_DES_OOO *des_enc_ooo = state->des_enc_ooo; + + return SUBMIT_JOB_DES_CBC_ENC(des_enc_ooo, job); +#else + return DES_CBC_ENC(job); +#endif /* SUBMIT_JOB_DES_CBC_ENC */ + } else if (IMB_CIPHER_CHACHA20 == job->cipher_mode) { + return SUBMIT_JOB_CHACHA20_ENC_DEC(job); + } else if (IMB_CIPHER_CHACHA20_POLY1305 == job->cipher_mode) { + return SUBMIT_JOB_CHACHA20_POLY1305(state, job); + } else if (IMB_CIPHER_CHACHA20_POLY1305_SGL == job->cipher_mode) { + return SUBMIT_JOB_CHACHA20_POLY1305_SGL(state, job); + } else if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { +#ifdef SUBMIT_JOB_DOCSIS_DES_ENC + MB_MGR_DES_OOO *docsis_des_enc_ooo = state->docsis_des_enc_ooo; + + return SUBMIT_JOB_DOCSIS_DES_ENC(docsis_des_enc_ooo, + job); +#else + return DOCSIS_DES_ENC(job); +#endif /* SUBMIT_JOB_DOCSIS_DES_ENC */ + } else if (IMB_CIPHER_DES3 == job->cipher_mode) { +#ifdef SUBMIT_JOB_3DES_CBC_ENC + MB_MGR_DES_OOO *des3_enc_ooo = state->des3_enc_ooo; + + return SUBMIT_JOB_3DES_CBC_ENC(des3_enc_ooo, job); +#else + return DES3_CBC_ENC(job); +#endif + } else if (IMB_CIPHER_CCM == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + return AES_CNTR_CCM_128(job); + } else { /* assume 32 */ + return AES_CNTR_CCM_256(job); + } + } else if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; + + return SUBMIT_JOB_ZUC_EEA3(zuc_eea3_ooo, job); + } else { /* assume 32 */ + MB_MGR_ZUC_OOO *zuc256_eea3_ooo = + state->zuc256_eea3_ooo; + + return SUBMIT_JOB_ZUC256_EEA3(zuc256_eea3_ooo, job); + } + } else if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) { +#ifdef SUBMIT_JOB_SNOW3G_UEA2 + return SUBMIT_JOB_SNOW3G_UEA2(state, job); +#else + return def_submit_snow3g_uea2_job(state, job); +#endif + } else if (IMB_CIPHER_KASUMI_UEA1_BITLEN == job->cipher_mode) { + return submit_kasumi_uea1_job(state, job); + } else if (IMB_CIPHER_CBCS_1_9 == job->cipher_mode) { + MB_MGR_AES_OOO *aes128_cbcs_ooo = state->aes128_cbcs_ooo; + + return SUBMIT_JOB_AES128_CBCS_1_9_ENC(aes128_cbcs_ooo, job); + } else if (IMB_CIPHER_SNOW_V == job->cipher_mode) { + return SUBMIT_JOB_SNOW_V(job); + } else if (IMB_CIPHER_SNOW_V_AEAD == job->cipher_mode) { + return submit_snow_v_aead_job(state, job); + } else { /* assume IMB_CIPHER_NULL */ + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; + } +} + +__forceinline IMB_JOB *FLUSH_JOB_CIPHER_ENC(IMB_MGR *state, IMB_JOB *job) +{ + if (IMB_CIPHER_CBC == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + MB_MGR_AES_OOO *aes128_ooo = state->aes128_ooo; + + return FLUSH_JOB_AES_CBC_128_ENC(aes128_ooo); + } else if (24 == job->key_len_in_bytes) { + MB_MGR_AES_OOO *aes192_ooo = state->aes192_ooo; + + return FLUSH_JOB_AES_CBC_192_ENC(aes192_ooo); + } else { /* assume 32 */ + MB_MGR_AES_OOO *aes256_ooo = state->aes256_ooo; + + return FLUSH_JOB_AES_CBC_256_ENC(aes256_ooo); + } + } else if (IMB_CIPHER_DOCSIS_SEC_BPI == job->cipher_mode) { + return flush_docsis_enc_job(state, job); +#ifdef FLUSH_JOB_DES_CBC_ENC + } else if (IMB_CIPHER_DES == job->cipher_mode) { + MB_MGR_DES_OOO *des_enc_ooo = state->des_enc_ooo; + + return FLUSH_JOB_DES_CBC_ENC(des_enc_ooo); +#endif /* FLUSH_JOB_DES_CBC_ENC */ +#ifdef FLUSH_JOB_3DES_CBC_ENC + } else if (IMB_CIPHER_DES3 == job->cipher_mode) { + MB_MGR_DES_OOO *des3_enc_ooo = state->des3_enc_ooo; + + return FLUSH_JOB_3DES_CBC_ENC(des3_enc_ooo); +#endif /* FLUSH_JOB_3DES_CBC_ENC */ +#ifdef FLUSH_JOB_DOCSIS_DES_ENC + } else if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { + MB_MGR_DES_OOO *docsis_des_enc_ooo = state->docsis_des_enc_ooo; + + return FLUSH_JOB_DOCSIS_DES_ENC(docsis_des_enc_ooo); +#endif /* FLUSH_JOB_DOCSIS_DES_ENC */ + } else if (IMB_CIPHER_CUSTOM == job->cipher_mode) { + return FLUSH_JOB_CUSTOM_CIPHER(job); + } else if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; + + return FLUSH_JOB_ZUC_EEA3(zuc_eea3_ooo); + } else { /* assume 32 */ + MB_MGR_ZUC_OOO *zuc256_eea3_ooo = + state->zuc256_eea3_ooo; + + return FLUSH_JOB_ZUC256_EEA3(zuc256_eea3_ooo); + } + } else if (IMB_CIPHER_CBCS_1_9 == job->cipher_mode) { + MB_MGR_AES_OOO *aes128_cbcs_ooo = state->aes128_cbcs_ooo; + + return FLUSH_JOB_AES128_CBCS_1_9_ENC(aes128_cbcs_ooo); +#ifdef FLUSH_JOB_SNOW3G_UEA2 + } else if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) { + return FLUSH_JOB_SNOW3G_UEA2(state); +#endif + /** + * assume IMB_CIPHER_CNTR/CNTR_BITLEN, IMB_CIPHER_ECB, + * IMB_CIPHER_CCM, IMB_CIPHER_NULL or IMB_CIPHER_GCM + */ + } else { + return NULL; + } +} + +__forceinline IMB_JOB *SUBMIT_JOB_CIPHER_DEC(IMB_MGR *state, IMB_JOB *job) +{ + if (IMB_CIPHER_GCM == job->cipher_mode) { + return SUBMIT_JOB_AES_GCM_DEC(state, job); + } else if (IMB_CIPHER_GCM_SGL == job->cipher_mode) { + return submit_gcm_sgl_dec(state, job); + } else if (IMB_CIPHER_CBC == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES_CBC_128_DEC(job); + } else if (24 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES_CBC_192_DEC(job); + } else { /* assume 32 */ + return SUBMIT_JOB_AES_CBC_256_DEC(job); + } + } else if (IMB_CIPHER_CNTR == job->cipher_mode) { + return SUBMIT_JOB_AES_CTR(job); + } else if (IMB_CIPHER_CNTR_BITLEN == job->cipher_mode) { + return SUBMIT_JOB_AES_CTR_BIT(job); + } else if (IMB_CIPHER_ECB == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES_ECB_128_DEC(job); + } else if (24 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES_ECB_192_DEC(job); + } else { /* assume 32 */ + return SUBMIT_JOB_AES_ECB_256_DEC(job); + } + } else if (IMB_CIPHER_DOCSIS_SEC_BPI == job->cipher_mode) { + return submit_docsis_dec_job(state, job); + } else if (IMB_CIPHER_PON_AES_CNTR == job->cipher_mode) { + if (job->msg_len_to_cipher_in_bytes == 0) + return SUBMIT_JOB_PON_DEC_NO_CTR(job); + else + return SUBMIT_JOB_PON_DEC(job); + } else if (IMB_CIPHER_DES == job->cipher_mode) { +#ifdef SUBMIT_JOB_DES_CBC_DEC + MB_MGR_DES_OOO *des_dec_ooo = state->des_dec_ooo; + + return SUBMIT_JOB_DES_CBC_DEC(des_dec_ooo, job); +#else + (void) state; + return DES_CBC_DEC(job); +#endif /* SUBMIT_JOB_DES_CBC_DEC */ + } else if (IMB_CIPHER_CHACHA20 == job->cipher_mode) { + return SUBMIT_JOB_CHACHA20_ENC_DEC(job); + } else if (IMB_CIPHER_CHACHA20_POLY1305 == job->cipher_mode) { + return SUBMIT_JOB_CHACHA20_POLY1305(state, job); + } else if (IMB_CIPHER_CHACHA20_POLY1305_SGL == job->cipher_mode) { + return SUBMIT_JOB_CHACHA20_POLY1305_SGL(state, job); + } else if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { +#ifdef SUBMIT_JOB_DOCSIS_DES_DEC + MB_MGR_DES_OOO *docsis_des_dec_ooo = state->docsis_des_dec_ooo; + + return SUBMIT_JOB_DOCSIS_DES_DEC(docsis_des_dec_ooo, + job); +#else + return DOCSIS_DES_DEC(job); +#endif /* SUBMIT_JOB_DOCSIS_DES_DEC */ + } else if (IMB_CIPHER_DES3 == job->cipher_mode) { +#ifdef SUBMIT_JOB_3DES_CBC_DEC + MB_MGR_DES_OOO *des3_dec_ooo = state->des3_dec_ooo; + + return SUBMIT_JOB_3DES_CBC_DEC(des3_dec_ooo, job); +#else + return DES3_CBC_DEC(job); +#endif + } else if (IMB_CIPHER_CUSTOM == job->cipher_mode) { + return SUBMIT_JOB_CUSTOM_CIPHER(job); + } else if (IMB_CIPHER_CCM == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + return AES_CNTR_CCM_128(job); + } else { /* assume 32 */ + return AES_CNTR_CCM_256(job); + } + } else if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; + + return SUBMIT_JOB_ZUC_EEA3(zuc_eea3_ooo, job); + } else { /* assume 32 */ + MB_MGR_ZUC_OOO *zuc256_eea3_ooo = + state->zuc256_eea3_ooo; + + return SUBMIT_JOB_ZUC256_EEA3(zuc256_eea3_ooo, job); + } + } else if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) { +#ifdef SUBMIT_JOB_SNOW3G_UEA2 + return SUBMIT_JOB_SNOW3G_UEA2(state, job); +#else + return def_submit_snow3g_uea2_job(state, job); +#endif + } else if (IMB_CIPHER_KASUMI_UEA1_BITLEN == job->cipher_mode) { + return submit_kasumi_uea1_job(state, job); + } else if (IMB_CIPHER_CBCS_1_9 == job->cipher_mode) { + return SUBMIT_JOB_AES128_CBCS_1_9_DEC(job); + } else if (IMB_CIPHER_SNOW_V == job->cipher_mode) { + return SUBMIT_JOB_SNOW_V(job); + } else if (IMB_CIPHER_SNOW_V_AEAD == job->cipher_mode) { + return submit_snow_v_aead_job(state, job); + } else { + /* assume IMB_CIPHER_NULL */ + job->status |= IMB_STATUS_COMPLETED_CIPHER; + return job; + } +} + +__forceinline IMB_JOB *FLUSH_JOB_CIPHER_DEC(IMB_MGR *state, IMB_JOB *job) +{ +#ifdef FLUSH_JOB_SNOW3G_UEA2 + if (IMB_CIPHER_SNOW3G_UEA2_BITLEN == job->cipher_mode) + return FLUSH_JOB_SNOW3G_UEA2(state); +#endif + +#ifdef FLUSH_JOB_DES_CBC_DEC + if (IMB_CIPHER_DES == job->cipher_mode) { + MB_MGR_DES_OOO *des_dec_ooo = state->des_dec_ooo; + + return FLUSH_JOB_DES_CBC_DEC(des_dec_ooo); + } +#endif /* FLUSH_JOB_DES_CBC_DEC */ + +#ifdef FLUSH_JOB_3DES_CBC_DEC + if (IMB_CIPHER_DES3 == job->cipher_mode) { + MB_MGR_DES_OOO *des3_dec_ooo = state->des3_dec_ooo; + + return FLUSH_JOB_3DES_CBC_DEC(des3_dec_ooo); + } +#endif /* FLUSH_JOB_3DES_CBC_DEC */ + +#ifdef FLUSH_JOB_DOCSIS_DES_DEC + + if (IMB_CIPHER_DOCSIS_DES == job->cipher_mode) { + MB_MGR_DES_OOO *docsis_des_dec_ooo = state->docsis_des_dec_ooo; + + return FLUSH_JOB_DOCSIS_DES_DEC(docsis_des_dec_ooo); + } +#endif /* FLUSH_JOB_DOCSIS_DES_DEC */ + + if (IMB_CIPHER_ZUC_EEA3 == job->cipher_mode) { + if (16 == job->key_len_in_bytes) { + MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; + + return FLUSH_JOB_ZUC_EEA3(zuc_eea3_ooo); + } else { /* assume 32 */ + MB_MGR_ZUC_OOO *zuc256_eea3_ooo = + state->zuc256_eea3_ooo; + + return FLUSH_JOB_ZUC256_EEA3(zuc256_eea3_ooo); + } + } + + return NULL; +} + +/* ========================================================================= */ +/* Hash submit & flush functions */ +/* ========================================================================= */ + +__forceinline +IMB_JOB * +SUBMIT_JOB_HASH(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_HMAC_SHA_1_OOO *hmac_sha_1_ooo = state->hmac_sha_1_ooo; + MB_MGR_HMAC_SHA_256_OOO *hmac_sha_224_ooo = state->hmac_sha_224_ooo; + MB_MGR_HMAC_SHA_256_OOO *hmac_sha_256_ooo = state->hmac_sha_256_ooo; + MB_MGR_HMAC_SHA_512_OOO *hmac_sha_384_ooo = state->hmac_sha_384_ooo; + MB_MGR_HMAC_SHA_512_OOO *hmac_sha_512_ooo = state->hmac_sha_512_ooo; + MB_MGR_HMAC_MD5_OOO *hmac_md5_ooo = state->hmac_md5_ooo; + MB_MGR_AES_XCBC_OOO *aes_xcbc_ooo = state->aes_xcbc_ooo; + MB_MGR_CCM_OOO *aes_ccm_ooo = state->aes_ccm_ooo; + MB_MGR_CCM_OOO *aes256_ccm_ooo = state->aes256_ccm_ooo; + MB_MGR_CMAC_OOO *aes_cmac_ooo = state->aes_cmac_ooo; + MB_MGR_CMAC_OOO *aes256_cmac_ooo = state->aes256_cmac_ooo; + MB_MGR_ZUC_OOO *zuc_eia3_ooo = state->zuc_eia3_ooo; + MB_MGR_ZUC_OOO *zuc256_eia3_ooo = state->zuc256_eia3_ooo; + MB_MGR_SHA_1_OOO *sha_1_ooo = state->sha_1_ooo; + MB_MGR_SHA_256_OOO *sha_224_ooo = state->sha_224_ooo; + MB_MGR_SHA_256_OOO *sha_256_ooo = state->sha_256_ooo; + MB_MGR_SHA_512_OOO *sha_384_ooo = state->sha_384_ooo; + MB_MGR_SHA_512_OOO *sha_512_ooo = state->sha_512_ooo; +#if (defined(SAFE_LOOKUP) || defined(AVX512)) && !defined(SSE_AESNI_EMU) + MB_MGR_SNOW3G_OOO *snow3g_uia2_ooo = state->snow3g_uia2_ooo; +#endif + + + switch (job->hash_alg) { + case IMB_AUTH_HMAC_SHA_1: + return SUBMIT_JOB_HMAC(hmac_sha_1_ooo, job); + case IMB_AUTH_HMAC_SHA_224: + return SUBMIT_JOB_HMAC_SHA_224(hmac_sha_224_ooo, job); + case IMB_AUTH_HMAC_SHA_256: + return SUBMIT_JOB_HMAC_SHA_256(hmac_sha_256_ooo, job); + case IMB_AUTH_HMAC_SHA_384: + return SUBMIT_JOB_HMAC_SHA_384(hmac_sha_384_ooo, job); + case IMB_AUTH_HMAC_SHA_512: + return SUBMIT_JOB_HMAC_SHA_512(hmac_sha_512_ooo, job); + case IMB_AUTH_AES_XCBC: + return SUBMIT_JOB_AES_XCBC(aes_xcbc_ooo, job); + case IMB_AUTH_MD5: + return SUBMIT_JOB_HMAC_MD5(hmac_md5_ooo, job); + case IMB_AUTH_CUSTOM: + return SUBMIT_JOB_CUSTOM_HASH(job); + case IMB_AUTH_AES_CCM: + if (16 == job->key_len_in_bytes) { + return SUBMIT_JOB_AES128_CCM_AUTH(aes_ccm_ooo, job); + } else { /* assume 32 */ + return SUBMIT_JOB_AES256_CCM_AUTH(aes256_ccm_ooo, job); + } + case IMB_AUTH_AES_CMAC: + /* + * CMAC OOO MGR assumes job len in bits + * (for CMAC length is provided in bytes) + */ + job->msg_len_to_hash_in_bits = + job->msg_len_to_hash_in_bytes * 8; + return SUBMIT_JOB_AES128_CMAC_AUTH(aes_cmac_ooo, job); + case IMB_AUTH_AES_CMAC_BITLEN: + return SUBMIT_JOB_AES128_CMAC_AUTH(aes_cmac_ooo, job); + case IMB_AUTH_AES_CMAC_256: + job->msg_len_to_hash_in_bits = + job->msg_len_to_hash_in_bytes * 8; + return SUBMIT_JOB_AES256_CMAC_AUTH(aes256_cmac_ooo, job); + case IMB_AUTH_SHA_1: + return SUBMIT_JOB_SHA1(sha_1_ooo, job); + case IMB_AUTH_SHA_224: + return SUBMIT_JOB_SHA224(sha_224_ooo, job); + case IMB_AUTH_SHA_256: + return SUBMIT_JOB_SHA256(sha_256_ooo, job); + case IMB_AUTH_SHA_384: + return SUBMIT_JOB_SHA384(sha_384_ooo, job); + case IMB_AUTH_SHA_512: + return SUBMIT_JOB_SHA512(sha_512_ooo, job); + case IMB_AUTH_ZUC_EIA3_BITLEN: + return SUBMIT_JOB_ZUC_EIA3(zuc_eia3_ooo, job); + case IMB_AUTH_ZUC256_EIA3_BITLEN: + return SUBMIT_JOB_ZUC256_EIA3(zuc256_eia3_ooo, job, + job->auth_tag_output_len_in_bytes); + case IMB_AUTH_SNOW3G_UIA2_BITLEN: +#if (defined(SAFE_LOOKUP) || defined(AVX512)) && !defined(SSE_AESNI_EMU) + return SUBMIT_JOB_SNOW3G_UIA2(snow3g_uia2_ooo, job); +#else + IMB_SNOW3G_F9_1_BUFFER(state, (const snow3g_key_schedule_t *) + job->u.SNOW3G_UIA2._key, + job->u.SNOW3G_UIA2._iv, + job->src + job->hash_start_src_offset_in_bytes, + job->msg_len_to_hash_in_bits, + job->auth_tag_output); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; +#endif + case IMB_AUTH_KASUMI_UIA1: + IMB_KASUMI_F9_1_BUFFER(state, (const kasumi_key_sched_t *) + job->u.KASUMI_UIA1._key, + job->src + job->hash_start_src_offset_in_bytes, + (const uint32_t) job->msg_len_to_hash_in_bytes, + job->auth_tag_output); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_AES_GMAC_128: + process_gmac(state, job, IMB_KEY_128_BYTES); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_AES_GMAC_192: + process_gmac(state, job, IMB_KEY_192_BYTES); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_AES_GMAC_256: + process_gmac(state, job, IMB_KEY_256_BYTES); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_POLY1305: + POLY1305_MAC(job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC32_ETHERNET_FCS: + CRC(IMB_CRC32_ETHERNET_FCS, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC32_SCTP: + CRC(IMB_CRC32_SCTP, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC32_WIMAX_OFDMA_DATA: + CRC(IMB_CRC32_WIMAX_OFDMA_DATA, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC24_LTE_A: + CRC(IMB_CRC24_LTE_A, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC24_LTE_B: + CRC(IMB_CRC24_LTE_B, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC16_X25: + CRC(IMB_CRC16_X25, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC16_FP_DATA: + CRC(IMB_CRC16_FP_DATA, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC11_FP_HEADER: + CRC(IMB_CRC11_FP_HEADER, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC10_IUUP_DATA: + CRC(IMB_CRC10_IUUP_DATA, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC8_WIMAX_OFDMA_HCS: + CRC(IMB_CRC8_WIMAX_OFDMA_HCS, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC7_FP_HEADER: + CRC(IMB_CRC7_FP_HEADER, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_CRC6_IUUP_HEADER: + CRC(IMB_CRC6_IUUP_HEADER, state, job); + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + case IMB_AUTH_GHASH: + return process_ghash(state, job); + default: + /** + * assume IMB_AUTH_GCM, IMB_AUTH_PON_CRC_BIP, + * IMB_AUTH_SNOW_V_AEAD or IMB_AUTH_NULL + */ + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + } +} + +__forceinline +IMB_JOB * +FLUSH_JOB_HASH(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_HMAC_SHA_1_OOO *hmac_sha_1_ooo = state->hmac_sha_1_ooo; + MB_MGR_HMAC_SHA_256_OOO *hmac_sha_224_ooo = state->hmac_sha_224_ooo; + MB_MGR_HMAC_SHA_256_OOO *hmac_sha_256_ooo = state->hmac_sha_256_ooo; + MB_MGR_HMAC_SHA_512_OOO *hmac_sha_384_ooo = state->hmac_sha_384_ooo; + MB_MGR_HMAC_SHA_512_OOO *hmac_sha_512_ooo = state->hmac_sha_512_ooo; + MB_MGR_HMAC_MD5_OOO *hmac_md5_ooo = state->hmac_md5_ooo; + MB_MGR_AES_XCBC_OOO *aes_xcbc_ooo = state->aes_xcbc_ooo; + MB_MGR_CCM_OOO *aes_ccm_ooo = state->aes_ccm_ooo; + MB_MGR_CCM_OOO *aes256_ccm_ooo = state->aes256_ccm_ooo; + MB_MGR_CMAC_OOO *aes_cmac_ooo = state->aes_cmac_ooo; + MB_MGR_CMAC_OOO *aes256_cmac_ooo = state->aes256_cmac_ooo; + MB_MGR_ZUC_OOO *zuc_eia3_ooo = state->zuc_eia3_ooo; + MB_MGR_ZUC_OOO *zuc256_eia3_ooo = state->zuc256_eia3_ooo; + MB_MGR_SHA_1_OOO *sha_1_ooo = state->sha_1_ooo; + MB_MGR_SHA_256_OOO *sha_224_ooo = state->sha_224_ooo; + MB_MGR_SHA_256_OOO *sha_256_ooo = state->sha_256_ooo; + MB_MGR_SHA_512_OOO *sha_384_ooo = state->sha_384_ooo; + MB_MGR_SHA_512_OOO *sha_512_ooo = state->sha_512_ooo; +#if (defined(SAFE_LOOKUP) || defined(AVX512)) && !defined(SSE_AESNI_EMU) + MB_MGR_SNOW3G_OOO *snow3g_uia2_ooo = state->snow3g_uia2_ooo; +#endif + + switch (job->hash_alg) { + case IMB_AUTH_HMAC_SHA_1: + return FLUSH_JOB_HMAC(hmac_sha_1_ooo); + case IMB_AUTH_HMAC_SHA_224: + return FLUSH_JOB_HMAC_SHA_224(hmac_sha_224_ooo); + case IMB_AUTH_HMAC_SHA_256: + return FLUSH_JOB_HMAC_SHA_256(hmac_sha_256_ooo); + case IMB_AUTH_HMAC_SHA_384: + return FLUSH_JOB_HMAC_SHA_384(hmac_sha_384_ooo); + case IMB_AUTH_HMAC_SHA_512: + return FLUSH_JOB_HMAC_SHA_512(hmac_sha_512_ooo); + case IMB_AUTH_SHA_1: + return FLUSH_JOB_SHA1(sha_1_ooo, job); + case IMB_AUTH_SHA_224: + return FLUSH_JOB_SHA224(sha_224_ooo, job); + case IMB_AUTH_SHA_256: + return FLUSH_JOB_SHA256(sha_256_ooo, job); + case IMB_AUTH_SHA_384: + return FLUSH_JOB_SHA384(sha_384_ooo, job); + case IMB_AUTH_SHA_512: + return FLUSH_JOB_SHA512(sha_512_ooo, job); + case IMB_AUTH_AES_XCBC: + return FLUSH_JOB_AES_XCBC(aes_xcbc_ooo); + case IMB_AUTH_MD5: + return FLUSH_JOB_HMAC_MD5(hmac_md5_ooo); + case IMB_AUTH_CUSTOM: + return FLUSH_JOB_CUSTOM_HASH(job); + case IMB_AUTH_AES_CCM: + if (16 == job->key_len_in_bytes) { + return FLUSH_JOB_AES128_CCM_AUTH(aes_ccm_ooo); + } else { /* assume 32 */ + return FLUSH_JOB_AES256_CCM_AUTH(aes256_ccm_ooo); + } + case IMB_AUTH_AES_CMAC: + case IMB_AUTH_AES_CMAC_BITLEN: + return FLUSH_JOB_AES128_CMAC_AUTH(aes_cmac_ooo); + case IMB_AUTH_AES_CMAC_256: + return FLUSH_JOB_AES256_CMAC_AUTH(aes256_cmac_ooo); + case IMB_AUTH_ZUC_EIA3_BITLEN: + return FLUSH_JOB_ZUC_EIA3(zuc_eia3_ooo); + case IMB_AUTH_ZUC256_EIA3_BITLEN: + return FLUSH_JOB_ZUC256_EIA3(zuc256_eia3_ooo, + job->auth_tag_output_len_in_bytes); +#if (defined(SAFE_LOOKUP) || defined(AVX512)) && !defined(SSE_AESNI_EMU) + case IMB_AUTH_SNOW3G_UIA2_BITLEN: + return FLUSH_JOB_SNOW3G_UIA2(snow3g_uia2_ooo); +#endif + default: /* assume GCM or IMB_AUTH_NULL */ + if (!(job->status & IMB_STATUS_COMPLETED_AUTH)) { + job->status |= IMB_STATUS_COMPLETED_AUTH; + return job; + } + /* if HMAC is complete then return NULL */ + return NULL; + } +} + +/* ========================================================================= */ +/* Job submit & flush functions */ +/* ========================================================================= */ + +__forceinline +IMB_JOB *SUBMIT_JOB_CIPHER(IMB_MGR *state, IMB_JOB *job) +{ + if (job->cipher_direction == IMB_DIR_ENCRYPT) + job = SUBMIT_JOB_CIPHER_ENC(state, job); + else + job = SUBMIT_JOB_CIPHER_DEC(state, job); + + return job; +} + +__forceinline +IMB_JOB *FLUSH_JOB_CIPHER(IMB_MGR *state, IMB_JOB *job) +{ + if (job->cipher_direction == IMB_DIR_ENCRYPT) + job = FLUSH_JOB_CIPHER_ENC(state, job); + else + job = FLUSH_JOB_CIPHER_DEC(state, job); + + return job; +} + +/* submit a half-completed job, based on the status */ +__forceinline +IMB_JOB *RESUBMIT_JOB(IMB_MGR *state, IMB_JOB *job) +{ + while (job != NULL && job->status < IMB_STATUS_COMPLETED) { + if (job->status == IMB_STATUS_COMPLETED_AUTH) + job = SUBMIT_JOB_CIPHER(state, job); + else /* assumed job->status = IMB_STATUS_COMPLETED_CIPHER */ + job = SUBMIT_JOB_HASH(state, job); + } + + return job; +} + +__forceinline +IMB_JOB *submit_new_job(IMB_MGR *state, IMB_JOB *job) +{ + if (job->chain_order == IMB_ORDER_CIPHER_HASH) + job = SUBMIT_JOB_CIPHER(state, job); + else + job = SUBMIT_JOB_HASH(state, job); + + job = RESUBMIT_JOB(state, job); + return job; +} + +__forceinline +uint32_t complete_job(IMB_MGR *state, IMB_JOB *job) +{ + uint32_t completed_jobs = 0; + + /** + * complete as many jobs as necessary + * until specified 'job' has completed + */ + if (job->chain_order == IMB_ORDER_CIPHER_HASH) { + /* while() loop optimized for cipher_hash order */ + while (job->status < IMB_STATUS_COMPLETED) { + IMB_JOB *tmp = FLUSH_JOB_CIPHER(state, job); + + if (tmp == NULL) + tmp = FLUSH_JOB_HASH(state, job); + + (void) RESUBMIT_JOB(state, tmp); + completed_jobs++; + } + } else { + /* while() loop optimized for hash_cipher order */ + while (job->status < IMB_STATUS_COMPLETED) { + IMB_JOB *tmp = FLUSH_JOB_HASH(state, job); + + if (tmp == NULL) + tmp = FLUSH_JOB_CIPHER(state, job); + + (void) RESUBMIT_JOB(state, tmp); + completed_jobs++; + } + } + + return completed_jobs; +} + +__forceinline +IMB_JOB * +submit_job_and_check(IMB_MGR *state, const int run_check) +{ + IMB_JOB *job = NULL; + + /* reset error status */ + imb_set_errno(state, 0); + + if (run_check) { + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return NULL; + } + } + +#ifndef LINUX + DECLARE_ALIGNED(imb_uint128_t xmm_save[10], 16); + + SAVE_XMMS(xmm_save); +#endif + + job = JOBS(state, state->next_job); + + if (run_check) { + if (is_job_invalid(state, job, + job->cipher_mode, job->hash_alg, + job->cipher_direction, + job->key_len_in_bytes)) { + job->status = IMB_STATUS_INVALID_ARGS; + } else { + job->status = IMB_STATUS_BEING_PROCESSED; + job = submit_new_job(state, job); + } + } else { + job->status = IMB_STATUS_BEING_PROCESSED; + job = submit_new_job(state, job); + } + + if (state->earliest_job < 0) { + /* state was previously empty */ + if (job == NULL) + state->earliest_job = state->next_job; + ADV_JOBS(&state->next_job); + goto exit; + } + + ADV_JOBS(&state->next_job); + + if (state->earliest_job == state->next_job) { + /* Full */ + job = JOBS(state, state->earliest_job); + (void) complete_job(state, job); + ADV_JOBS(&state->earliest_job); + goto exit; + } + + /* not full */ + job = JOBS(state, state->earliest_job); + if (job->status < IMB_STATUS_COMPLETED) { + job = NULL; + goto exit; + } + + ADV_JOBS(&state->earliest_job); +exit: + +#ifndef LINUX + RESTORE_XMMS(xmm_save); +#endif + return job; +} + +IMB_JOB * +SUBMIT_JOB(IMB_MGR *state) +{ + return submit_job_and_check(state, 1); +} + +IMB_JOB * +SUBMIT_JOB_NOCHECK(IMB_MGR *state) +{ + return submit_job_and_check(state, 0); +} + +IMB_JOB * +FLUSH_JOB(IMB_MGR *state) +{ + /* reset error status */ + imb_set_errno(state, 0); + +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return NULL; + } +#endif + IMB_JOB *job; +#ifndef LINUX + DECLARE_ALIGNED(imb_uint128_t xmm_save[10], 16); +#endif + if (state->earliest_job < 0) + return NULL; /* empty */ + +#ifndef LINUX + SAVE_XMMS(xmm_save); +#endif + job = JOBS(state, state->earliest_job); + (void) complete_job(state, job); + + ADV_JOBS(&state->earliest_job); + + if (state->earliest_job == state->next_job) + state->earliest_job = -1; /* becomes empty */ + +#ifndef LINUX + RESTORE_XMMS(xmm_save); +#endif + return job; +} + +/* ========================================================================= */ +/* ========================================================================= */ + +uint32_t +QUEUE_SIZE(IMB_MGR *state) +{ + /* reset error status */ + imb_set_errno(state, 0); + +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return 0; + } +#endif + return queue_sz(state); +} + +IMB_JOB * +GET_COMPLETED_JOB(IMB_MGR *state) +{ + /* reset error status */ + imb_set_errno(state, 0); + +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return NULL; + } +#endif + IMB_JOB *job; + + if (state->earliest_job < 0) + return NULL; + + job = JOBS(state, state->earliest_job); + if (job->status < IMB_STATUS_COMPLETED) + return NULL; + + ADV_JOBS(&state->earliest_job); + + if (state->earliest_job == state->next_job) + state->earliest_job = -1; + + return job; +} + +IMB_JOB * +GET_NEXT_JOB(IMB_MGR *state) +{ + /* reset error status */ + imb_set_errno(state, 0); + +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return NULL; + } +#endif + + return JOBS(state, state->next_job); +} + +#endif /* MB_MGR_JOB_API_H */ diff --git a/lib/include/mb_mgr_job_check.h b/lib/include/mb_mgr_job_check.h new file mode 100644 index 0000000000000000000000000000000000000000..32cda0fbe74b8d19e1b71c52c026b3871c3cafe1 --- /dev/null +++ b/lib/include/mb_mgr_job_check.h @@ -0,0 +1,1517 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef MB_MGR_JOB_CHECK_H +#define MB_MGR_JOB_CHECK_H + +#include "ipsec-mb.h" +#include "include/error.h" + +/* GCM NIST standard: len(M) < 2^39 - 256 */ +#define GCM_MAX_LEN UINT64_C(((1ULL << 39) - 256) - 1) +#define SNOW3G_MAX_BITLEN (UINT32_MAX) +#define MB_MAX_LEN16 ((1 << 16) - 2) + +__forceinline int +is_job_invalid(IMB_MGR *state, const IMB_JOB *job, + const IMB_CIPHER_MODE cipher_mode, const IMB_HASH_ALG hash_alg, + const IMB_CIPHER_DIRECTION cipher_direction, + const IMB_KEY_SIZE_BYTES key_len_in_bytes) +{ + const uint64_t auth_tag_len_fips[] = { + 0, /* INVALID selection */ + 20, /* IMB_AUTH_HMAC_SHA_1 */ + 28, /* IMB_AUTH_HMAC_SHA_224 */ + 32, /* IMB_AUTH_HMAC_SHA_256 */ + 48, /* IMB_AUTH_HMAC_SHA_384 */ + 64, /* IMB_AUTH_HMAC_SHA_512 */ + 12, /* IMB_AUTH_AES_XCBC */ + 16, /* IMB_AUTH_MD5 */ + 0, /* IMB_AUTH_NULL */ + 16, /* IMB_AUTH_AES_GMAC */ + 0, /* IMB_AUTH_CUSTOM */ + 0, /* IMB_AUTH_AES_CCM */ + 16, /* IMB_AUTH_AES_CMAC */ + 20, /* IMB_AUTH_SHA_1 */ + 28, /* IMB_AUTH_SHA_224 */ + 32, /* IMB_AUTH_SHA_256 */ + 48, /* IMB_AUTH_SHA_384 */ + 64, /* IMB_AUTH_SHA_512 */ + 4, /* IMB_AUTH_AES_CMAC 3GPP */ + 8, /* IMB_AUTH_PON_CRC_BIP */ + 4, /* IMB_AUTH_ZUC_EIA3_BITLEN */ + 4, /* IMB_AUTH_DOCSIS_CRC32 */ + 4, /* IMB_AUTH_SNOW3G_UIA2_BITLEN */ + 4, /* IMB_AUTH_KASUMI_UIA1 */ + 16, /* IMB_AUTH_AES_GMAC_128 */ + 16, /* IMB_AUTH_AES_GMAC_192 */ + 16, /* IMB_AUTH_AES_GMAC_256 */ + 16, /* IMB_AUTH_AES_CMAC_256 */ + 16, /* IMB_AUTH_POLY1305 */ + 16, /* IMB_AUTH_CHACHA_POLY1305 */ + 16, /* IMB_AUTH_CHACHA_POLY1305_SGL */ + 4, /* IMB_AUTH_ZUC256_EIA3_BITLEN */ + 16, /* IMB_AUTH_SNOW_V_AEAD */ + 16, /* IMB_AUTH_AES_GCM_SGL */ + 4, /* IMB_AUTH_CRC32_ETHERNET_FCS */ + 4, /* IMB_AUTH_CRC32_SCTP */ + 4, /* IMB_AUTH_CRC32_WIMAX_OFDMA_DATA */ + 4, /* IMB_AUTH_CRC24_LTE_A */ + 4, /* IMB_AUTH_CRC24_LTE_B */ + 4, /* IMB_AUTH_CRC16_X25 */ + 4, /* IMB_AUTH_CRC16_FP_DATA */ + 4, /* IMB_AUTH_CRC11_FP_HEADER */ + 4, /* IMB_AUTH_CRC10_IUUP_DATA */ + 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ + 4, /* IMB_AUTH_CRC7_FP_HEADER */ + 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ + 16, /* IMB_AUTH_GHASH */ + }; + const uint64_t auth_tag_len_ipsec[] = { + 0, /* INVALID selection */ + 12, /* IMB_AUTH_HMAC_SHA_1 */ + 14, /* IMB_AUTH_HMAC_SHA_224 */ + 16, /* IMB_AUTH_HMAC_SHA_256 */ + 24, /* IMB_AUTH_HMAC_SHA_384 */ + 32, /* IMB_AUTH_HMAC_SHA_512 */ + 12, /* IMB_AUTH_AES_XCBC */ + 12, /* IMB_AUTH_MD5 */ + 0, /* IMB_AUTH_NULL */ + 16, /* IMB_AUTH_AES_GMAC */ + 0, /* IMB_AUTH_CUSTOM */ + 0, /* IMB_AUTH_AES_CCM */ + 16, /* IMB_AUTH_AES_CMAC */ + 20, /* IMB_AUTH_SHA_1 */ + 28, /* IMB_AUTH_SHA_224 */ + 32, /* IMB_AUTH_SHA_256 */ + 48, /* IMB_AUTH_SHA_384 */ + 64, /* IMB_AUTH_SHA_512 */ + 4, /* IMB_AUTH_AES_CMAC 3GPP */ + 8, /* IMB_AUTH_PON_CRC_BIP */ + 4, /* IMB_AUTH_ZUC_EIA3_BITLEN */ + 4, /* IMB_AUTH_DOCSIS_CRC32 */ + 4, /* IMB_AUTH_SNOW3G_UIA2_BITLEN */ + 4, /* IMB_AUTH_KASUMI_UIA1 */ + 16, /* IMB_AUTH_AES_GMAC_128 */ + 16, /* IMB_AUTH_AES_GMAC_192 */ + 16, /* IMB_AUTH_AES_GMAC_256 */ + 16, /* IMB_AUTH_AES_CMAC_256 */ + 16, /* IMB_AUTH_POLY1305 */ + 16, /* IMB_AUTH_CHACHA_POLY1305 */ + 16, /* IMB_AUTH_CHACHA_POLY1305_SGL */ + 4, /* IMB_AUTH_ZUC256_EIA3_BITLEN */ + 16, /* IMB_AUTH_SNOW_V_AEAD */ + 16, /* IMB_AUTH_AES_GCM_SGL */ + 4, /* IMB_AUTH_CRC32_ETHERNET_FCS */ + 4, /* IMB_AUTH_CRC32_SCTP */ + 4, /* IMB_AUTH_CRC32_WIMAX_OFDMA_DATA */ + 4, /* IMB_AUTH_CRC24_LTE_A */ + 4, /* IMB_AUTH_CRC24_LTE_B */ + 4, /* IMB_AUTH_CRC16_X25 */ + 4, /* IMB_AUTH_CRC16_FP_DATA */ + 4, /* IMB_AUTH_CRC11_FP_HEADER */ + 4, /* IMB_AUTH_CRC10_IUUP_DATA */ + 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ + 4, /* IMB_AUTH_CRC7_FP_HEADER */ + 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ + 16, /* IMB_AUTH_GHASH */ + }; + + /* Maximum length of buffer in PON is 2^14 + 8, since maximum + * PLI value is 2^14 - 1 + 1 extra byte of padding + 8 bytes + * of XGEM header */ + const uint64_t max_pon_len = (1 << 14) + 8; + + if (cipher_direction != IMB_DIR_DECRYPT && + cipher_direction != IMB_DIR_ENCRYPT && + cipher_mode != IMB_CIPHER_NULL) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_DIR); + return 1; + } + switch (cipher_mode) { + case IMB_CIPHER_CBC: + case IMB_CIPHER_CBCS_1_9: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (cipher_direction == IMB_DIR_ENCRYPT && + job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (cipher_direction == IMB_DIR_DECRYPT && + job->dec_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16) && + key_len_in_bytes != UINT64_C(24) && + key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes == 0) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes & UINT64_C(15)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (cipher_mode == IMB_CIPHER_CBCS_1_9) { + if (job->msg_len_to_cipher_in_bytes > + ((1ULL << (60)) - 1)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->cipher_fields.CBCS.next_iv == NULL) { + imb_set_errno(state, + IMB_ERR_JOB_NULL_NEXT_IV); + return 1; + } + } else if (cipher_direction == IMB_DIR_ENCRYPT && + job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_ECB: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (cipher_direction == IMB_DIR_ENCRYPT && + job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (cipher_direction == IMB_DIR_DECRYPT && + job->dec_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16) && + key_len_in_bytes != UINT64_C(24) && + key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes == 0 || + job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes & UINT64_C(15)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(0)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_CNTR: + case IMB_CIPHER_CNTR_BITLEN: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16) && + key_len_in_bytes != UINT64_C(24) && + key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if ((cipher_mode == IMB_CIPHER_CNTR && + job->iv_len_in_bytes != UINT64_C(16) && + job->iv_len_in_bytes != UINT64_C(12)) || + (cipher_mode == IMB_CIPHER_CNTR_BITLEN && + job->iv_len_in_bytes != UINT64_C(16))) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + /* + * msg_len_to_cipher_in_bits is used with CNTR_BITLEN, but it is + * effectively the same field as msg_len_to_cipher_in_bytes, + * since it is part of the same union + */ + if (job->msg_len_to_cipher_in_bytes == 0) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + break; + case IMB_CIPHER_NULL: + /* + * No checks required for this mode + * @note NULL cipher doesn't perform memory copy operation + * from source to destination + */ + break; + case IMB_CIPHER_DOCSIS_SEC_BPI: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + /* it has to be set regardless of direction (AES-CFB) */ + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (cipher_direction == IMB_DIR_DECRYPT && + job->dec_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if ((key_len_in_bytes != UINT64_C(16)) && + (key_len_in_bytes != UINT64_C(32))) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + break; + case IMB_CIPHER_GCM: + case IMB_CIPHER_GCM_SGL: + if (job->msg_len_to_cipher_in_bytes > GCM_MAX_LEN) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->msg_len_to_cipher_in_bytes != 0 && job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + /* Same key structure used for encrypt and decrypt */ + if (cipher_direction == IMB_DIR_ENCRYPT && + job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (cipher_direction == IMB_DIR_DECRYPT && + job->dec_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16) && + key_len_in_bytes != UINT64_C(24) && + key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->iv_len_in_bytes == 0) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (cipher_mode == IMB_CIPHER_GCM && + hash_alg != IMB_AUTH_AES_GMAC) { + imb_set_errno(state, IMB_ERR_HASH_ALGO); + return 1; + } + if (cipher_mode == IMB_CIPHER_GCM_SGL && + hash_alg != IMB_AUTH_GCM_SGL) { + imb_set_errno(state, IMB_ERR_HASH_ALGO); + return 1; + } + break; + case IMB_CIPHER_CUSTOM: + /* no checks here */ + if (job->cipher_func == NULL) { + imb_set_errno(state, EFAULT); + return 1; + } + break; + case IMB_CIPHER_DES: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (cipher_direction == IMB_DIR_ENCRYPT && + job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (cipher_direction == IMB_DIR_DECRYPT && + job->dec_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(8)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes == 0 || + job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes & UINT64_C(7)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(8)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_DOCSIS_DES: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (cipher_direction == IMB_DIR_ENCRYPT && + job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (cipher_direction == IMB_DIR_DECRYPT && + job->dec_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(8)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes == 0 || + job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(8)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_CCM: + if (job->msg_len_to_cipher_in_bytes != 0) { + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + } + if (job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + /* AES-CTR and CBC-MAC use only encryption keys */ + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + /* currently only AES-CCM-128 and AES-CCM-256 supported */ + if (key_len_in_bytes != UINT64_C(16) && + key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + /* + * From RFC3610: + * Nonce length = 15 - L + * Valid L values are: 2 to 8 + * Then valid nonce lengths 13 to 7 (inclusive). + */ + if (job->iv_len_in_bytes > UINT64_C(13) || + job->iv_len_in_bytes < UINT64_C(7)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (hash_alg != IMB_AUTH_AES_CCM) { + imb_set_errno(state, IMB_ERR_HASH_ALGO); + return 1; + } + break; + case IMB_CIPHER_DES3: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (key_len_in_bytes != UINT64_C(24)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes == 0 || + job->msg_len_to_cipher_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes & UINT64_C(7)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(8)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (cipher_direction == IMB_DIR_ENCRYPT) { + const void * const *ks_ptr = + (const void * const *)job->enc_keys; + + if (ks_ptr == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (ks_ptr[0] == NULL || ks_ptr[1] == NULL || + ks_ptr[2] == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + } else { + const void * const *ks_ptr = + (const void * const *)job->dec_keys; + + if (ks_ptr == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (ks_ptr[0] == NULL || ks_ptr[1] == NULL || + ks_ptr[2] == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + } + break; + case IMB_CIPHER_PON_AES_CNTR: + /* + * CRC and cipher are done together. A few assumptions: + * - CRC and cipher start offsets are the same + * - last 4 bytes (32 bits) of the buffer is CRC + * - updated CRC value is put into the source buffer + * (encryption only) + * - CRC length is msg_len_to_cipher_in_bytes - 4 bytes + * - msg_len_to_cipher_in_bytes is aligned to 4 bytes + * - If msg_len_to_cipher_in_bytes is 0, IV and key pointers + * are not required, as encryption is not done + */ + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + + /* source and destination buffer pointers cannot be the same, + * as there are always 8 bytes that are not ciphered */ + if ((job->src + job->cipher_start_src_offset_in_bytes) + != job->dst) { + imb_set_errno(state, EINVAL); + return 1; + } + if (hash_alg != IMB_AUTH_PON_CRC_BIP) { + imb_set_errno(state, IMB_ERR_HASH_ALGO); + return 1; + } + /* + * If message length to cipher != 0, AES-CTR is performed and + * key and IV require to be set properly + */ + if (job->msg_len_to_cipher_in_bytes != UINT64_C(0)) { + + /* message size needs to be aligned to 4 bytes */ + if ((job->msg_len_to_cipher_in_bytes & 3) != 0) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + + /* Subtract 8 bytes to maximum length since + * XGEM header is not ciphered */ + if ((job->msg_len_to_cipher_in_bytes > + (max_pon_len - 8))) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + + if (key_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + } + if (job->msg_len_to_cipher_in_bytes >= 4) { + const uint64_t xgem_hdr = *(const uint64_t *) + (job->src + + job->hash_start_src_offset_in_bytes); + + /* PLI is 14 MS bits of XGEM header */ + const uint16_t pli = BSWAP64(xgem_hdr) >> 50; + + /* CRC only if PLI is more than 4 bytes */ + if (pli > 4) { + const uint16_t crc_len = pli - 4; + + if (crc_len > + job->msg_len_to_cipher_in_bytes - 4) { + imb_set_errno(state, + IMB_ERR_JOB_PON_PLI); + return 1; + } + } + } + break; + case IMB_CIPHER_ZUC_EEA3: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16) && + key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bytes == 0 || + job->msg_len_to_cipher_in_bytes > ZUC_MAX_BYTELEN) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (key_len_in_bytes == UINT64_C(16)) { + if (job->iv_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + } else { + if (job->iv_len_in_bytes != UINT64_C(23) && + job->iv_len_in_bytes != UINT64_C(25)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + } + break; + case IMB_CIPHER_SNOW3G_UEA2_BITLEN: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bits == 0 || + job->msg_len_to_cipher_in_bits > SNOW3G_MAX_BITLEN) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_KASUMI_UEA1_BITLEN: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->msg_len_to_cipher_in_bits == 0 || + job->msg_len_to_cipher_in_bits > KASUMI_MAX_LEN) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(8)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_CHACHA20: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + /* Per RFC 7539, max cipher size is (2^32 - 1) x 64 */ + if (job->msg_len_to_cipher_in_bytes == 0 || + job->msg_len_to_cipher_in_bytes > ((1ULL << 38) - 64)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(12)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_CHACHA20_POLY1305: + case IMB_CIPHER_CHACHA20_POLY1305_SGL: + if (job->msg_len_to_cipher_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->msg_len_to_cipher_in_bytes != 0 && job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + /* Per RFC 7539, max cipher size is (2^32 - 1) x 64 */ + if (job->msg_len_to_cipher_in_bytes > ((1ULL << 38) - 64)) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(12)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + break; + case IMB_CIPHER_SNOW_V_AEAD: + case IMB_CIPHER_SNOW_V: + if (job->msg_len_to_cipher_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->msg_len_to_cipher_in_bytes != 0 && job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (job->iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->enc_keys == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (key_len_in_bytes != UINT64_C(32)) { + imb_set_errno(state, IMB_ERR_JOB_KEY_LEN); + return 1; + } + if (job->iv_len_in_bytes != UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (cipher_mode == IMB_CIPHER_SNOW_V_AEAD && + hash_alg != IMB_AUTH_SNOW_V_AEAD) { + imb_set_errno(state, IMB_ERR_HASH_ALGO); + return 1; + } + break; + default: + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + + switch (hash_alg) { + case IMB_AUTH_HMAC_SHA_1: + case IMB_AUTH_MD5: + case IMB_AUTH_HMAC_SHA_224: + case IMB_AUTH_HMAC_SHA_256: + case IMB_AUTH_HMAC_SHA_384: + case IMB_AUTH_HMAC_SHA_512: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg] && + job->auth_tag_output_len_in_bytes != + auth_tag_len_fips[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->msg_len_to_hash_in_bytes == 0 || + job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->u.HMAC._hashed_auth_key_xor_ipad == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_HMAC_IPAD); + return 1; + } + if (job->u.HMAC._hashed_auth_key_xor_opad == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_HMAC_OPAD); + return 1; + } + break; + case IMB_AUTH_AES_XCBC: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg] && + job->auth_tag_output_len_in_bytes != + auth_tag_len_fips[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->u.XCBC._k1_expanded == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_XCBC_K1_EXP); + return 1; + } + if (job->u.XCBC._k2 == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_XCBC_K2); + return 1; + } + if (job->u.XCBC._k3 == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_XCBC_K3); + return 1; + } + break; + case IMB_AUTH_NULL: + break; + case IMB_AUTH_CRC32_ETHERNET_FCS: + case IMB_AUTH_CRC32_SCTP: + case IMB_AUTH_CRC32_WIMAX_OFDMA_DATA: + case IMB_AUTH_CRC24_LTE_A: + case IMB_AUTH_CRC24_LTE_B: + case IMB_AUTH_CRC16_X25: + case IMB_AUTH_CRC16_FP_DATA: + case IMB_AUTH_CRC11_FP_HEADER: + case IMB_AUTH_CRC10_IUUP_DATA: + case IMB_AUTH_CRC8_WIMAX_OFDMA_HCS: + case IMB_AUTH_CRC7_FP_HEADER: + case IMB_AUTH_CRC6_IUUP_HEADER: + if (job->src == NULL && job->msg_len_to_hash_in_bytes != 0) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + break; + case IMB_AUTH_AES_GMAC: + if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || + job->auth_tag_output_len_in_bytes > UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if ((job->u.GCM.aad_len_in_bytes > 0) && + (job->u.GCM.aad == NULL)) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); + return 1; + } + if (cipher_mode != IMB_CIPHER_GCM) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + /* + * msg_len_to_hash_in_bytes not checked against zero. + * It is not used for AES-GCM & GMAC - see + * SUBMIT_JOB_AES_GCM_ENC and SUBMIT_JOB_AES_GCM_DEC functions. + */ + break; + case IMB_AUTH_GCM_SGL: + if (cipher_mode != IMB_CIPHER_GCM_SGL) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->u.GCM.ctx == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SGL_CTX); + return 1; + } + if (job->sgl_state == IMB_SGL_COMPLETE) { + if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || + job->auth_tag_output_len_in_bytes > UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + } + if (job->sgl_state == IMB_SGL_INIT) { + if ((job->u.GCM.aad_len_in_bytes > 0) && + (job->u.GCM.aad == NULL)) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); + return 1; + } + } + break; + case IMB_AUTH_AES_GMAC_128: + case IMB_AUTH_AES_GMAC_192: + case IMB_AUTH_AES_GMAC_256: + if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || + job->auth_tag_output_len_in_bytes > UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + /* This GMAC mode is to be used as stand-alone, + * not combined with GCM */ + if (cipher_mode == IMB_CIPHER_GCM) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->u.GMAC._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH_KEY); + return 1; + } + if (job->u.GMAC._iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->u.GMAC.iv_len_in_bytes == 0) { + imb_set_errno(state, IMB_ERR_JOB_IV_LEN); + return 1; + } + if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + break; + case IMB_AUTH_GHASH: + if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || + job->auth_tag_output_len_in_bytes > UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->u.GHASH._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH_KEY); + return 1; + } + if (job->u.GHASH._init_tag == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_GHASH_INIT_TAG); + return 1; + } + if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + break; + case IMB_AUTH_CUSTOM: + if (job->hash_func == NULL) { + imb_set_errno(state, EFAULT); + return 1; + } + break; + case IMB_AUTH_AES_CCM: + if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->u.CCM.aad_len_in_bytes > 46) { + /* 3 x AES_BLOCK - 2 bytes for AAD len */ + imb_set_errno(state, IMB_ERR_JOB_AAD_LEN); + return 1; + } + if ((job->u.CCM.aad_len_in_bytes > 0) && + (job->u.CCM.aad == NULL)) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); + return 1; + } + /* M can be any even number from 4 to 16 */ + if (job->auth_tag_output_len_in_bytes < UINT64_C(4) || + job->auth_tag_output_len_in_bytes > UINT64_C(16) || + ((job->auth_tag_output_len_in_bytes & 1) != 0)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (cipher_mode != IMB_CIPHER_CCM) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + /* + * AES-CCM allows for only one message for + * cipher and authentication. + * AAD can be used to extend authentication over + * clear text fields. + */ + if (job->msg_len_to_cipher_in_bytes != + job->msg_len_to_hash_in_bytes) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->cipher_start_src_offset_in_bytes != + job->hash_start_src_offset_in_bytes) { + imb_set_errno(state, IMB_ERR_JOB_SRC_OFFSET); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + break; + case IMB_AUTH_AES_CMAC: + case IMB_AUTH_AES_CMAC_BITLEN: + case IMB_AUTH_AES_CMAC_256: + /* + * WARNING: When using IMB_AUTH_AES_CMAC_BITLEN, length of + * message is passed in bits, using job->msg_len_to_hash_in_bits + * (unlike "normal" IMB_AUTH_AES_CMAC, where is passed in bytes, + * using job->msg_len_to_hash_in_bytes). + */ + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if ((job->u.CMAC._key_expanded == NULL) || + (job->u.CMAC._skey1 == NULL) || + (job->u.CMAC._skey2 == NULL)) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + /* T is 128 bits but 96 bits is also allowed due to + * IPsec use case (RFC 4494) and 32 bits for CMAC 3GPP. + * ACVP validation requires tag size of 8 bits. + */ + if (job->auth_tag_output_len_in_bytes < UINT64_C(1) || + job->auth_tag_output_len_in_bytes > UINT64_C(16)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + break; + case IMB_AUTH_SHA_1: + case IMB_AUTH_SHA_224: + case IMB_AUTH_SHA_256: + case IMB_AUTH_SHA_384: + case IMB_AUTH_SHA_512: + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + break; + case IMB_AUTH_PON_CRC_BIP: + /* + * Authentication tag in PON is BIP 32-bit value only + * CRC is done together with cipher, + * its initial value is read from the source buffer and + * updated value put into the destination buffer. + * - msg_len_to_hash_in_bytes is aligned to 4 bytes + */ + if (((job->msg_len_to_hash_in_bytes & UINT64_C(3)) != 0) || + (job->msg_len_to_hash_in_bytes < UINT64_C(8)) || + (job->msg_len_to_hash_in_bytes > max_pon_len)) { + /* + * Length aligned to 4 bytes (and at least 8 bytes, + * including 8-byte XGEM header and no more + * than max length) + */ + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + /* 64-bits: + * - BIP 32-bits + * - CRC 32-bits + */ + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (cipher_mode != IMB_CIPHER_PON_AES_CNTR) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + break; + case IMB_AUTH_ZUC_EIA3_BITLEN: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if ((job->msg_len_to_hash_in_bits < ZUC_MIN_BITLEN) || + (job->msg_len_to_hash_in_bits > ZUC_MAX_BITLEN)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->u.ZUC_EIA3._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (job->u.ZUC_EIA3._iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + break; + case IMB_AUTH_ZUC256_EIA3_BITLEN: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if ((job->msg_len_to_hash_in_bits < ZUC_MIN_BITLEN) || + (job->msg_len_to_hash_in_bits > ZUC_MAX_BITLEN)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->u.ZUC_EIA3._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (job->u.ZUC_EIA3._iv == NULL) { + /* If 25-byte IV is NULL, check 23-byte IV */ + if (job->u.ZUC_EIA3._iv23 == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + } + if ((job->auth_tag_output_len_in_bytes != 4) && + (job->auth_tag_output_len_in_bytes != 8) && + (job->auth_tag_output_len_in_bytes != 16)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + break; + case IMB_AUTH_DOCSIS_CRC32: + /** + * Use only in combination with DOCSIS_SEC_BPI. + * Assumptions about Ethernet PDU carried over DOCSIS: + * - cipher_start_src_offset_in_bytes <= + * (hash_start_src_offset_in_bytes + 12) + * - msg_len_to_cipher_in_bytes <= + * (msg_len_to_hash_in_bytes - 12 + 4) + * - @note: in-place operation allowed only + * - authentication tag size is 4 bytes + * - @note: in encrypt direction, computed CRC value is put into + * the source buffer + * - encrypt chain order: hash, cipher + * - decrypt chain order: cipher, hash + */ + if (cipher_mode != IMB_CIPHER_DOCSIS_SEC_BPI) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->msg_len_to_cipher_in_bytes && + job->msg_len_to_hash_in_bytes) { + const uint64_t ciph_adjust = + IMB_DOCSIS_CRC32_MIN_ETH_PDU_SIZE - + 2 - /* ETH TYPE */ + IMB_DOCSIS_CRC32_TAG_SIZE; + + if ((job->msg_len_to_cipher_in_bytes + ciph_adjust) > + job->msg_len_to_hash_in_bytes) { + imb_set_errno(state, IMB_ERR_JOB_CIPH_LEN); + return 1; + } + if (job->cipher_start_src_offset_in_bytes < + (job->hash_start_src_offset_in_bytes + 12)) { + imb_set_errno(state, IMB_ERR_JOB_SRC_OFFSET); + return 1; + } + } + if (job->msg_len_to_hash_in_bytes > MB_MAX_LEN16) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + /* Ethernet FCS CRC is 32-bits */ + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if ((cipher_direction == IMB_DIR_ENCRYPT && + job->chain_order != IMB_ORDER_HASH_CIPHER) || + (cipher_direction == IMB_DIR_DECRYPT && + job->chain_order != IMB_ORDER_CIPHER_HASH)) { + imb_set_errno(state, IMB_ERR_JOB_CHAIN_ORDER); + return 1; + } + break; + case IMB_AUTH_SNOW3G_UIA2_BITLEN: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if ((job->msg_len_to_hash_in_bits == 0) || + (job->msg_len_to_hash_in_bits > SNOW3G_MAX_BITLEN)) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->u.SNOW3G_UIA2._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (job->u.SNOW3G_UIA2._iv == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_IV); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + break; + case IMB_AUTH_KASUMI_UIA1: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + /* + * KASUMI-UIA1 needs to be at least 8 bytes + * (IV + direction bit + '1' + 0s to align to byte boundary) + */ + if ((job->msg_len_to_hash_in_bytes < + (IMB_KASUMI_BLOCK_SIZE + 1)) || + (job->msg_len_to_hash_in_bytes > + (KASUMI_MAX_LEN / BYTESIZE))) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_LEN); + return 1; + } + if (job->u.KASUMI_UIA1._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_KEY); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + break; + case IMB_AUTH_POLY1305: + if (job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->u.POLY1305._key == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH_KEY); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + break; + case IMB_AUTH_CHACHA20_POLY1305: + if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->msg_len_to_hash_in_bytes != 0 && job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (cipher_mode != IMB_CIPHER_CHACHA20_POLY1305) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->u.CHACHA20_POLY1305.aad == NULL && + job->u.CHACHA20_POLY1305.aad_len_in_bytes > 0) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + break; + case IMB_AUTH_CHACHA20_POLY1305_SGL: + if (job->msg_len_to_hash_in_bytes != 0 && job->src == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SRC); + return 1; + } + if (job->msg_len_to_hash_in_bytes != 0 && job->dst == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_DST); + return 1; + } + if (cipher_mode != IMB_CIPHER_CHACHA20_POLY1305_SGL) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + if (job->u.CHACHA20_POLY1305.aad == NULL && + job->u.CHACHA20_POLY1305.aad_len_in_bytes > 0) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (job->u.CHACHA20_POLY1305.ctx == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_SGL_CTX); + return 1; + } + break; + case IMB_AUTH_SNOW_V_AEAD: + if ((job->u.SNOW_V_AEAD.aad_len_in_bytes > 0) && + (job->u.SNOW_V_AEAD.aad == NULL)) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AAD); + return 1; + } + if (job->auth_tag_output == NULL) { + imb_set_errno(state, IMB_ERR_JOB_NULL_AUTH); + return 1; + } + if (job->auth_tag_output_len_in_bytes != + auth_tag_len_ipsec[hash_alg]) { + imb_set_errno(state, IMB_ERR_JOB_AUTH_TAG_LEN); + return 1; + } + if (cipher_mode != IMB_CIPHER_SNOW_V_AEAD) { + imb_set_errno(state, IMB_ERR_CIPH_MODE); + return 1; + } + break; + default: + imb_set_errno(state, IMB_ERR_HASH_ALGO); + return 1; + } + return 0; +} + +#endif /* MB_MGR_JOB_CHECK_H */ diff --git a/lib/include/memcpy.asm b/lib/include/memcpy.asm index 6c6b95b2bfe1cf7a78fe1a12af48448cfde956ab..ad85b20684ef26849f4a8b77b7d7d8a8ef64bc60 100644 --- a/lib/include/memcpy.asm +++ b/lib/include/memcpy.asm @@ -565,33 +565,26 @@ or %%SIZE, %%SIZE je %%end_load %endif - cmp %%SIZE, 1 - je %%_size_1 cmp %%SIZE, 2 + jb %%_size_1 je %%_size_2 - cmp %%SIZE, 3 - je %%_size_3 cmp %%SIZE, 4 + jb %%_size_3 je %%_size_4 - cmp %%SIZE, 5 - je %%_size_5 cmp %%SIZE, 6 + jb %%_size_5 je %%_size_6 - cmp %%SIZE, 7 - je %%_size_7 cmp %%SIZE, 8 + jb %%_size_7 je %%_size_8 - cmp %%SIZE, 9 - je %%_size_9 cmp %%SIZE, 10 + jb %%_size_9 je %%_size_10 - cmp %%SIZE, 11 - je %%_size_11 cmp %%SIZE, 12 + jb %%_size_11 je %%_size_12 - cmp %%SIZE, 13 - je %%_size_13 cmp %%SIZE, 14 + jb %%_size_13 je %%_size_14 %%_size_15: @@ -654,12 +647,29 @@ sub %%TMP, 16 %%_check_size: -%assign %%I 1 -%rep 16 - cmp %%TMP, %%I - je APPEND(%%_size_, %%I) -%assign %%I (%%I+1) -%endrep + cmp %%TMP, 2 + jb %%_size_1 + je %%_size_2 + cmp %%TMP, 4 + jb %%_size_3 + je %%_size_4 + cmp %%TMP, 6 + jb %%_size_5 + je %%_size_6 + cmp %%TMP, 8 + jb %%_size_7 + je %%_size_8 + cmp %%TMP, 10 + jb %%_size_9 + je %%_size_10 + cmp %%TMP, 12 + jb %%_size_11 + je %%_size_12 + cmp %%TMP, 14 + jb %%_size_13 + je %%_size_14 + cmp %%TMP, 15 + je %%_size_15 %%_size_16: vmovdqu XWORD(%%DST), [%%IDX] diff --git a/lib/include/memcpy.h b/lib/include/memcpy.h index 92cd2c56666ef6f1318356fb927a2abf722a5658..20e9ac353b4f7be6a41a701de18fd739aa1c71e5 100644 --- a/lib/include/memcpy.h +++ b/lib/include/memcpy.h @@ -37,4 +37,7 @@ void memcpy_fn_avx_16(void *dst, const void *src, const size_t size); /* Memcpy 128 bytes with SSE instructions */ void memcpy_fn_sse_128(void *dst, const void *src); +/* Basic memcpy that doesn't use stack */ +void safe_memcpy(void *dst, const void *src, const size_t size); + #endif /* MEMCPY_H */ diff --git a/lib/include/noaesni.h b/lib/include/noaesni.h index 57f564dd19acbe09f6a230a913c5fb3b48dc5f4b..be68ad85ac104e24e346aeacc5cb4c1abf8a77c5 100644 --- a/lib/include/noaesni.h +++ b/lib/include/noaesni.h @@ -46,6 +46,39 @@ IMB_DLL_EXPORT uint32_t queue_size_sse_no_aesni(IMB_MGR *state); IMB_DLL_EXPORT IMB_JOB *get_completed_job_sse_no_aesni(IMB_MGR *state); IMB_DLL_EXPORT IMB_JOB *get_next_job_sse_no_aesni(IMB_MGR *state); +IMB_DLL_EXPORT uint32_t +get_next_burst_sse_no_aesni(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_sse_no_aesni(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +submit_burst_nocheck_sse_no_aesni(IMB_MGR *state, const uint32_t n_jobs, + IMB_JOB **jobs); +IMB_DLL_EXPORT uint32_t +flush_burst_sse_no_aesni(IMB_MGR *state, const uint32_t max_jobs, + IMB_JOB **jobs); + +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_sse_no_aesni(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_cipher_burst_nocheck_sse_no_aesni(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_sse_no_aesni(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); +IMB_DLL_EXPORT uint32_t +submit_hash_burst_nocheck_sse_no_aesni(IMB_MGR *state, IMB_JOB *jobs, + const uint32_t n_jobs, + const IMB_HASH_ALG hash); IMB_DLL_EXPORT void aes_keyexp_128_sse_no_aesni(const void *key, void *enc_exp_keys, void *dec_exp_keys); diff --git a/lib/include/ooo_mgr_reset.h b/lib/include/ooo_mgr_reset.h new file mode 100644 index 0000000000000000000000000000000000000000..1d4c94761d2dfec67cf72c19b9bfe098a14a9eb5 --- /dev/null +++ b/lib/include/ooo_mgr_reset.h @@ -0,0 +1,82 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef OOO_MGR_RESET_H +#define OOO_MGR_RESET_H + +IMB_DLL_LOCAL void +ooo_mgr_aes_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL void +ooo_mgr_docsis_aes_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL void +ooo_mgr_cmac_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL void +ooo_mgr_ccm_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_aes_xcbc_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha1_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha224_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha256_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha384_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha512_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_hmac_md5_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_zuc_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_sha1_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_sha256_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_sha512_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_des_reset(void *p_ooo_mgr, const unsigned num_lanes); + +IMB_DLL_LOCAL +void ooo_mgr_snow3g_reset(void *p_ooo_mgr, const unsigned num_lanes); + +#endif /* OOO_MGR_RESET_H */ diff --git a/lib/include/os.asm b/lib/include/os.asm index b0f293c379e0b1ecdd764593d4f82e9fdeecde78..90d1c5ab6491393f4e37baa1566ee9e4f21bc116 100644 --- a/lib/include/os.asm +++ b/lib/include/os.asm @@ -76,4 +76,42 @@ %endif %endmacro +;; Macro to reserve stack space before function call, +;; based on number of arguments +%macro RESERVE_STACK_SPACE 1 +%define %%N_ARGS %1 ; [immediate] Number of arguments + +%ifdef LINUX +%if %%N_ARGS > 6 + sub rsp, 8*(%%N_ARGS - 6) +%endif +%else ; Windows +%if %%N_ARGS <= 4 + ; Reserve 32 bytes if number of arguments is <= 4 + sub rsp, 8*4 +%else + sub rsp, 8*%%N_ARGS +%endif +%endif ; LINUX +%endmacro + +;; Macro to restore stack pointer after function call, +;; based on number of arguments +%macro RESTORE_STACK_SPACE 1 +%define %%N_ARGS %1 ; [immediate] Number of arguments + +%ifdef LINUX +%if %%N_ARGS > 6 + add rsp, 8*(%%N_ARGS - 6) +%endif +%else ; Windows +%if %%N_ARGS <= 4 + ; Reserve 32 bytes if number of arguments is <= 4 + add rsp, 8*4 +%else + add rsp, 8*%%N_ARGS +%endif +%endif ; LINUX +%endmacro + %endif ; OS_ASM_FILE diff --git a/lib/include/sha_mb_mgr.h b/lib/include/sha_mb_mgr.h new file mode 100644 index 0000000000000000000000000000000000000000..8b4ce7216797e37ab1f1baf369c696686dd22137 --- /dev/null +++ b/lib/include/sha_mb_mgr.h @@ -0,0 +1,707 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "include/sha_generic.h" +#include "ipsec_ooo_mgr.h" +#include "constants.h" +#include "include/arch_sse_type1.h" +#include "include/arch_sse_type2.h" +#include "include/arch_avx_type1.h" +#include "include/arch_avx2_type1.h" +#include "include/arch_avx512_type1.h" + +__forceinline +void copy_bswap4_array_mb(void *dst, const void *src, const size_t num, + const size_t offset, const unsigned lane) +{ + uint32_t *outp = (uint32_t *) dst; + const uint32_t *inp = (const uint32_t *) src; + size_t i; + + for (i = 0; i < num; i++) + outp[i] = bswap4(inp[lane + i*offset]); +} + +__forceinline +void copy_bswap4_array_mb_ni(void *dst, const void *src, const size_t num, + const unsigned lane, const int digest_row_sz) +{ + uint32_t *outp = (uint32_t *) dst; + const uint32_t *inp = (const uint32_t *) src; + size_t i; + + for (i = 0; i < num; i++) + outp[i] = bswap4(inp[digest_row_sz*lane + i]); +} + +__forceinline +void copy_bswap8_array_mb(void *dst, const void *src, const size_t num, + const size_t offset, const unsigned lane) +{ + uint64_t *outp = (uint64_t *) dst; + const uint64_t *inp = (const uint64_t *) src; + size_t i; + + for (i = 0; i < num; i++) + outp[i] = bswap8(inp[lane + i*offset]); +} + +__forceinline +void sha1_mb_init_digest(uint32_t *digest, const unsigned lane) +{ + digest[lane + 0*16] = H0; + digest[lane + 1*16] = H1; + digest[lane + 2*16] = H2; + digest[lane + 3*16] = H3; + digest[lane + 4*16] = H4; +} + +__forceinline +void sha1_ni_mb_init_digest(uint32_t *digest, const unsigned lane) +{ + digest[5*lane + 0] = H0; + digest[5*lane + 1] = H1; + digest[5*lane + 2] = H2; + digest[5*lane + 3] = H3; + digest[5*lane + 4] = H4; +} + +__forceinline +void sha224_mb_init_digest(uint32_t *digest, const unsigned lane) +{ + digest[lane + 0*16] = SHA224_H0; + digest[lane + 1*16] = SHA224_H1; + digest[lane + 2*16] = SHA224_H2; + digest[lane + 3*16] = SHA224_H3; + digest[lane + 4*16] = SHA224_H4; + digest[lane + 5*16] = SHA224_H5; + digest[lane + 6*16] = SHA224_H6; + digest[lane + 7*16] = SHA224_H7; +} + +__forceinline +void sha224_ni_mb_init_digest(uint32_t *digest, const unsigned lane) +{ + digest[8*lane + 0] = SHA224_H0; + digest[8*lane + 1] = SHA224_H1; + digest[8*lane + 2] = SHA224_H2; + digest[8*lane + 3] = SHA224_H3; + digest[8*lane + 4] = SHA224_H4; + digest[8*lane + 5] = SHA224_H5; + digest[8*lane + 6] = SHA224_H6; + digest[8*lane + 7] = SHA224_H7; +} + +__forceinline +void sha256_mb_init_digest(uint32_t *digest, const unsigned lane) +{ + digest[lane + 0*16] = SHA256_H0; + digest[lane + 1*16] = SHA256_H1; + digest[lane + 2*16] = SHA256_H2; + digest[lane + 3*16] = SHA256_H3; + digest[lane + 4*16] = SHA256_H4; + digest[lane + 5*16] = SHA256_H5; + digest[lane + 6*16] = SHA256_H6; + digest[lane + 7*16] = SHA256_H7; +} + +__forceinline +void sha256_ni_mb_init_digest(uint32_t *digest, const unsigned lane) +{ + digest[8*lane + 0] = SHA256_H0; + digest[8*lane + 1] = SHA256_H1; + digest[8*lane + 2] = SHA256_H2; + digest[8*lane + 3] = SHA256_H3; + digest[8*lane + 4] = SHA256_H4; + digest[8*lane + 5] = SHA256_H5; + digest[8*lane + 6] = SHA256_H6; + digest[8*lane + 7] = SHA256_H7; +} + +__forceinline +void sha384_mb_init_digest(uint64_t *digest, const unsigned lane) +{ + digest[lane + 0*8] = SHA384_H0; + digest[lane + 1*8] = SHA384_H1; + digest[lane + 2*8] = SHA384_H2; + digest[lane + 3*8] = SHA384_H3; + digest[lane + 4*8] = SHA384_H4; + digest[lane + 5*8] = SHA384_H5; + digest[lane + 6*8] = SHA384_H6; + digest[lane + 7*8] = SHA384_H7; +} + +__forceinline +void sha512_mb_init_digest(uint64_t *digest, const unsigned lane) +{ + digest[lane + 0*8] = SHA512_H0; + digest[lane + 1*8] = SHA512_H1; + digest[lane + 2*8] = SHA512_H2; + digest[lane + 3*8] = SHA512_H3; + digest[lane + 4*8] = SHA512_H4; + digest[lane + 5*8] = SHA512_H5; + digest[lane + 6*8] = SHA512_H6; + digest[lane + 7*8] = SHA512_H7; +} + +__forceinline +void +sha_mb_generic_init(void *digest, const int sha_type, const unsigned lane) +{ + if (sha_type == 1) + sha1_mb_init_digest(digest, lane); + else if (sha_type == 224) + sha224_mb_init_digest(digest, lane); + else if (sha_type == 256) + sha256_mb_init_digest(digest, lane); + else if (sha_type == 384) + sha384_mb_init_digest(digest, lane); + else /* sha_type == 512 */ + sha512_mb_init_digest(digest, lane); +} + +__forceinline +void +sha_ni_mb_generic_init(void *digest, const int sha_type, const unsigned lane) +{ + if (sha_type == 1) + sha1_ni_mb_init_digest(digest, lane); + else if (sha_type == 224) + sha224_ni_mb_init_digest(digest, lane); + else if (sha_type == 256) + sha256_ni_mb_init_digest(digest, lane); +} + +__forceinline +void sha_mb_generic_write_digest(void *dst, const void *src, + const int sha_type, const size_t offset, + const unsigned lane) +{ + if (sha_type == 1) + copy_bswap4_array_mb(dst, src, NUM_SHA_DIGEST_WORDS, offset, + lane); + else if (sha_type == 224) + copy_bswap4_array_mb(dst, src, NUM_SHA_224_DIGEST_WORDS, offset, + lane); + else if (sha_type == 256) + copy_bswap4_array_mb(dst, src, NUM_SHA_256_DIGEST_WORDS, offset, + lane); + else if (sha_type == 384) + copy_bswap8_array_mb(dst, src, NUM_SHA_384_DIGEST_WORDS, offset, + lane); + else /* sha_type == 512 */ + copy_bswap8_array_mb(dst, src, NUM_SHA_512_DIGEST_WORDS, offset, + lane); +} + +__forceinline +void sha_ni_mb_generic_write_digest(void *dst, const void *src, + const int sha_type, const unsigned lane) +{ + if (sha_type == 1) + copy_bswap4_array_mb_ni(dst, src, NUM_SHA_DIGEST_WORDS, + lane, 5); + else if (sha_type == 224) + copy_bswap4_array_mb_ni(dst, src, NUM_SHA_224_DIGEST_WORDS, + lane, 8); + else if (sha_type == 256) + copy_bswap4_array_mb_ni(dst, src, NUM_SHA_256_DIGEST_WORDS, + lane, 8); +} + +__forceinline +void sha1_create_extra_blocks(MB_MGR_SHA_1_OOO *state, + const uint64_t blk_size, const uint64_t r, + const unsigned min_idx) +{ + HMAC_SHA1_LANE_DATA *ld = &state->ldata[min_idx]; + const uint64_t xblk_size = blk_size*state->ldata[min_idx].extra_blocks; + + memset(ld->extra_block, 0, sizeof(ld->extra_block)); + + var_memcpy(ld->extra_block, state->args.data_ptr[min_idx], r); + ld->extra_block[r] = 0x80; + + store8_be(&ld->extra_block[xblk_size - 8], + ld->job_in_lane->msg_len_to_hash_in_bytes * 8); + + state->args.data_ptr[min_idx] = &ld->extra_block[0]; + + state->lens[min_idx] = (uint16_t)xblk_size; + + state->ldata[min_idx].extra_blocks = 0; +} + +__forceinline +void sha256_create_extra_blocks(MB_MGR_SHA_256_OOO *state, + const uint64_t blk_size, const uint64_t r, + const unsigned min_idx) +{ + HMAC_SHA1_LANE_DATA *ld = &state->ldata[min_idx]; + const uint64_t xblk_size = blk_size*state->ldata[min_idx].extra_blocks; + + memset(ld->extra_block, 0, sizeof(ld->extra_block)); + + var_memcpy(ld->extra_block, state->args.data_ptr[min_idx], r); + ld->extra_block[r] = 0x80; + + store8_be(&ld->extra_block[xblk_size - 8], + ld->job_in_lane->msg_len_to_hash_in_bytes * 8); + + state->args.data_ptr[min_idx] = &ld->extra_block[0]; + + state->lens[min_idx] = (uint16_t)xblk_size; + + state->ldata[min_idx].extra_blocks = 0; +} + +__forceinline +void sha512_create_extra_blocks(MB_MGR_SHA_512_OOO *state, + const uint64_t blk_size, const uint64_t r, + const unsigned min_idx) +{ + HMAC_SHA512_LANE_DATA *ld = &state->ldata[min_idx]; + const uint64_t xblk_size = blk_size*state->ldata[min_idx].extra_blocks; + + memset(ld->extra_block, 0, sizeof(ld->extra_block)); + + var_memcpy(ld->extra_block, state->args.data_ptr[min_idx], r); + ld->extra_block[r] = 0x80; + + store8_be(&ld->extra_block[xblk_size - 8], + ld->job_in_lane->msg_len_to_hash_in_bytes * 8); + + state->args.data_ptr[min_idx] = &ld->extra_block[0]; + + state->lens[min_idx] = (uint16_t)xblk_size; + + state->ldata[min_idx].extra_blocks = 0; +} + +__forceinline +IMB_JOB * +submit_flush_job_sha_1(MB_MGR_SHA_1_OOO *state, IMB_JOB *job, + const unsigned max_jobs, const int is_submit, + const int sha_type, const uint64_t blk_size, + const uint64_t pad_size, + void (*fn)(SHA1_ARGS *, uint32_t), const int shani) +{ + unsigned lane, min_idx; + IMB_JOB *ret_job = NULL; + + if (is_submit) { + /* + * SUBMIT + * - get a free lane id + */ + + lane = state->unused_lanes & 15; + state->unused_lanes >>= 4; + state->num_lanes_inuse++; + state->args.data_ptr[lane] = + job->src + job->hash_start_src_offset_in_bytes; + + if (shani) + sha_ni_mb_generic_init(state->args.digest, sha_type, + lane); + else + sha_mb_generic_init(state->args.digest, sha_type, + lane); + + /* copy job data in and set up initial blocks */ + state->ldata[lane].job_in_lane = job; + state->lens[lane] = job->msg_len_to_hash_in_bytes; + state->ldata[lane].extra_blocks = 1; + + /* enough jobs to start processing? */ + if (state->num_lanes_inuse != max_jobs) + return NULL; + } else { + /* + * FLUSH + * - find 1st non null job + */ + for (lane = 0; lane < max_jobs; lane++) + if (state->ldata[lane].job_in_lane != NULL) + break; + if (lane >= max_jobs) + return NULL; /* no not null job */ + } + + do { + uint64_t min_len; + unsigned i; + + if (is_submit) { + /* + * SUBMIT + * - find min common length to process + */ + min_idx = 0; + min_len = state->lens[0]; + + for (i = 1; i < max_jobs; i++) { + if (min_len > state->lens[i]) { + min_idx = i; + min_len = state->lens[i]; + } + } + } else { + /* + * FLUSH + * - copy good (not null) lane onto empty lanes + * - find min common length to process across + * - not null lanes + */ + min_idx = lane; + min_len = state->lens[lane]; + + for (i = 0; i < max_jobs; i++) { + if (i == lane) + continue; + + if (state->ldata[i].job_in_lane != NULL) { + if (min_len > state->lens[i]) { + min_idx = i; + min_len = state->lens[i]; + } + } else { + state->args.data_ptr[i] = + state->args.data_ptr[lane]; + state->lens[i] = UINT64_MAX; + } + } + } + + /* subtract min len from all lanes */ + const uint64_t min_len_blk = min_len & (~(blk_size - 1)); + + for (i = 0; i < max_jobs; i++) + state->lens[i] -= min_len_blk; + + const uint64_t r = min_len % blk_size; + + if (r >= (blk_size - pad_size)) + state->ldata[min_idx].extra_blocks = 2; + + /* run the algorithmic code on full selected blocks */ + if(min_len >= blk_size) + (*fn)(&state->args, + (uint32_t)(min_len/blk_size)); + + /* create extra blocks */ + if (state->ldata[min_idx].extra_blocks != 0) + sha1_create_extra_blocks(state, blk_size, r, min_idx); + + } while(state->lens[min_idx] != 0); + + ret_job = state->ldata[min_idx].job_in_lane; +#ifdef SAFE_DATA + if (ret_job->msg_len_to_hash_in_bytes % blk_size) + memset(state->ldata[min_idx].extra_block, 0, blk_size); +#endif + /* put back processed packet into unused lanes, set job as complete */ + state->unused_lanes = (state->unused_lanes << 4) | min_idx; + state->num_lanes_inuse--; + if (shani) + sha_ni_mb_generic_write_digest(ret_job->auth_tag_output, + state->args.digest, sha_type, + min_idx); + else + sha_mb_generic_write_digest(ret_job->auth_tag_output, + state->args.digest, sha_type, 16, + min_idx); + ret_job->status |= IMB_STATUS_COMPLETED_AUTH; + state->ldata[min_idx].job_in_lane = NULL; + return ret_job; +} + +__forceinline +IMB_JOB * +submit_flush_job_sha_256(MB_MGR_SHA_256_OOO *state, IMB_JOB *job, + const unsigned max_jobs, const int is_submit, + const int sha_type, const uint64_t blk_size, + const uint64_t pad_size, + void (*fn)(SHA256_ARGS *, uint32_t), const int shani) +{ + unsigned lane, min_idx; + IMB_JOB *ret_job = NULL; + + if (is_submit) { + /* + * SUBMIT + * - get a free lane id + */ + + lane = state->unused_lanes & 15; + state->unused_lanes >>= 4; + state->num_lanes_inuse++; + state->args.data_ptr[lane] = + job->src + job->hash_start_src_offset_in_bytes; + + if (shani) + sha_ni_mb_generic_init(state->args.digest, sha_type, + lane); + else + sha_mb_generic_init(state->args.digest, sha_type, + lane); + + /* copy job data in and set up initial blocks */ + state->ldata[lane].job_in_lane = job; + state->lens[lane] = job->msg_len_to_hash_in_bytes; + state->ldata[lane].extra_blocks = 1; + + /* enough jobs to start processing? */ + if (state->num_lanes_inuse != max_jobs) + return NULL; + } else { + /* + * FLUSH + * - find 1st non null job + */ + for (lane = 0; lane < max_jobs; lane++) + if (state->ldata[lane].job_in_lane != NULL) + break; + if (lane >= max_jobs) + return NULL; /* no not null job */ + } + + do { + uint64_t min_len; + unsigned i; + + if (is_submit) { + /* + * SUBMIT + * - find min common length to process + */ + min_idx = 0; + min_len = state->lens[0]; + + for (i = 1; i < max_jobs; i++) { + if (min_len > state->lens[i]) { + min_idx = i; + min_len = state->lens[i]; + } + } + } else { + /* + * FLUSH + * - copy good (not null) lane onto empty lanes + * - find min common length to process across + * - not null lanes + */ + min_idx = lane; + min_len = state->lens[lane]; + + for (i = 0; i < max_jobs; i++) { + if (i == lane) + continue; + + if (state->ldata[i].job_in_lane != NULL) { + if (min_len > state->lens[i]) { + min_idx = i; + min_len = state->lens[i]; + } + } else { + state->args.data_ptr[i] = + state->args.data_ptr[lane]; + state->lens[i] = UINT64_MAX; + } + } + } + + /* subtract min len from all lanes */ + const uint64_t min_len_blk = min_len & (~(blk_size - 1)); + + for (i = 0; i < max_jobs; i++) + state->lens[i] -= min_len_blk; + + const uint64_t r = min_len % blk_size; + + if (r >= (blk_size - pad_size)) + state->ldata[min_idx].extra_blocks = 2; + + /* run the algorithmic code on full selected blocks */ + if(min_len >= blk_size) + (*fn)(&state->args, + (uint32_t)(min_len/blk_size)); + + /* create extra blocks */ + if (state->ldata[min_idx].extra_blocks != 0) + sha256_create_extra_blocks(state, blk_size, r, min_idx); + + } while(state->lens[min_idx] != 0); + + ret_job = state->ldata[min_idx].job_in_lane; +#ifdef SAFE_DATA + if (ret_job->msg_len_to_hash_in_bytes % blk_size) + memset(state->ldata[min_idx].extra_block, 0, blk_size); +#endif + /* put back processed packet into unused lanes, set job as complete */ + state->unused_lanes = (state->unused_lanes << 4) | min_idx; + state->num_lanes_inuse--; + if (shani) + sha_ni_mb_generic_write_digest(ret_job->auth_tag_output, + state->args.digest, sha_type, + min_idx); + else + sha_mb_generic_write_digest(ret_job->auth_tag_output, + state->args.digest, sha_type, 16, + min_idx); + + ret_job->status |= IMB_STATUS_COMPLETED_AUTH; + state->ldata[min_idx].job_in_lane = NULL; + return ret_job; +} + +__forceinline +IMB_JOB * +submit_flush_job_sha_512(MB_MGR_SHA_512_OOO *state, IMB_JOB *job, + const unsigned max_jobs, const int is_submit, + const int sha_type, const uint64_t blk_size, + const uint64_t pad_size, + void (*fn)(SHA512_ARGS *, uint64_t)) +{ + unsigned lane, min_idx; + IMB_JOB *ret_job = NULL; + + if (is_submit) { + /* + * SUBMIT + * - get a free lane id + */ + + lane = state->unused_lanes & 15; + state->unused_lanes >>= 4; + state->num_lanes_inuse++; + state->args.data_ptr[lane] = + job->src + job->hash_start_src_offset_in_bytes; + + sha_mb_generic_init(state->args.digest, sha_type, lane); + + /* copy job data in and set up initial blocks */ + state->ldata[lane].job_in_lane = job; + state->lens[lane] = (uint16_t)job->msg_len_to_hash_in_bytes; + state->ldata[lane].extra_blocks = 1; + + /* enough jobs to start processing? */ + if (state->num_lanes_inuse != max_jobs) + return NULL; + } else { + /* + * FLUSH + * - find 1st non null job + */ + for (lane = 0; lane < max_jobs; lane++) + if (state->ldata[lane].job_in_lane != NULL) + break; + if (lane >= max_jobs) + return NULL; /* no not null job */ + } + + do { + uint64_t min_len; + unsigned i; + + if (is_submit) { + /* + * SUBMIT + * - find min common length to process + */ + min_idx = 0; + min_len = state->lens[0]; + + for (i = 1; i < max_jobs; i++) { + if (min_len > state->lens[i]) { + min_idx = i; + min_len = state->lens[i]; + } + } + } else { + /* + * FLUSH + * - copy good (not null) lane onto empty lanes + * - find min common length to process across + * - not null lanes + */ + min_idx = lane; + min_len = state->lens[lane]; + + for (i = 0; i < max_jobs; i++) { + if (i == lane) + continue; + + if (state->ldata[i].job_in_lane != NULL) { + if (min_len > state->lens[i]) { + min_idx = i; + min_len = state->lens[i]; + } + } else { + state->args.data_ptr[i] = + state->args.data_ptr[lane]; + state->lens[i] = UINT64_MAX; + } + } + } + + /* subtract min len from all lanes */ + const uint64_t min_len_blk = min_len & (~(blk_size - 1)); + + for (i = 0; i < max_jobs; i++) + state->lens[i] -= min_len_blk; + + const uint64_t r = min_len % blk_size; + + if (r >= (blk_size - pad_size)) + state->ldata[min_idx].extra_blocks = 2; + + /* run the algorithmic code on full selected blocks */ + if(min_len >= blk_size) + (*fn)(&state->args, + (uint64_t)(min_len/blk_size)); + + /* create extra blocks */ + if (state->ldata[min_idx].extra_blocks != 0) + sha512_create_extra_blocks(state, blk_size, r, min_idx); + + } while(state->lens[min_idx] != 0); + + ret_job = state->ldata[min_idx].job_in_lane; +#ifdef SAFE_DATA + if (ret_job->msg_len_to_hash_in_bytes % blk_size) + memset(state->ldata[min_idx].extra_block, 0, blk_size); +#endif + /* put back processed packet into unused lanes, set job as complete */ + state->unused_lanes = (state->unused_lanes << 4) | min_idx; + state->num_lanes_inuse--; + sha_mb_generic_write_digest(ret_job->auth_tag_output, + state->args.digest, sha_type, 8, min_idx); + ret_job->status |= IMB_STATUS_COMPLETED_AUTH; + state->ldata[min_idx].job_in_lane = NULL; + return ret_job; +} diff --git a/lib/include/snow3g_common.h b/lib/include/snow3g_common.h index e1994a4083bb9727769697e50b42cf48327fbf5a..75fd952cd4497d24bdf5e5b34740e8fa6247f1d8 100644 --- a/lib/include/snow3g_common.h +++ b/lib/include/snow3g_common.h @@ -1689,6 +1689,63 @@ static inline __m256i snow3g_keystream_8_4(snow3gKeyState8_t *pCtx) return keyStream; } +/* + * @brief 8x8 uint32_t matrix tranpose. + * + * @param[in/clobbered] in Array of rows to transpose + * @param[out] out Array of transposed columns + */ +static inline void +transpose8xu32_avx2(__m256i in[8], __m256i out[8]) +{ + __m256i tmp[2]; + + tmp[0] = (__m256i) _mm256_shuffle_ps((__m256)in[0], (__m256)in[1], + 0x44); + in[0] = (__m256i) _mm256_shuffle_ps((__m256)in[0], (__m256)in[1], + 0xEE); + tmp[1] = (__m256i) _mm256_shuffle_ps((__m256)in[2], (__m256)in[3], + 0x44); + in[2] = (__m256i) _mm256_shuffle_ps((__m256)in[2], (__m256)in[3], + 0xEE); + + in[3] = (__m256i) _mm256_shuffle_ps((__m256)tmp[0],(__m256) tmp[1], + 0xDD); + in[1] = (__m256i) _mm256_shuffle_ps((__m256)in[0], (__m256) in[2], + 0x88); + in[0] = (__m256i) _mm256_shuffle_ps((__m256)in[0], (__m256) in[2], + 0xDD); + tmp[0] = (__m256i) _mm256_shuffle_ps((__m256)tmp[0],(__m256) tmp[1], + 0x88); + + in[2] = (__m256i) _mm256_shuffle_ps((__m256)in[4], (__m256)in[5], + 0x44); + in[4] = (__m256i) _mm256_shuffle_ps((__m256)in[4], (__m256)in[5], + 0xEE); + tmp[1] = (__m256i) _mm256_shuffle_ps((__m256)in[6], (__m256)in[7], + 0x44); + in[6] = (__m256i) _mm256_shuffle_ps((__m256)in[6], (__m256)in[7], + 0xEE); + + in[7] = (__m256i) _mm256_shuffle_ps((__m256)in[2],(__m256) tmp[1], + 0xDD); + in[5] = (__m256i) _mm256_shuffle_ps((__m256)in[4], (__m256) in[6], + 0x88); + in[4] = (__m256i) _mm256_shuffle_ps((__m256)in[4], (__m256) in[6], + 0xDD); + tmp[1] = (__m256i) _mm256_shuffle_ps((__m256)in[2],(__m256) tmp[1], + 0x88); + + out[6] = _mm256_permute2f128_si256(in[5], in[1], 0x13); + out[2] = _mm256_permute2f128_si256(in[5], in[1], 0x02); + out[5] = _mm256_permute2f128_si256(in[7], in[3], 0x13); + out[1] = _mm256_permute2f128_si256(in[7], in[3], 0x02); + out[7] = _mm256_permute2f128_si256(in[4], in[0], 0x13); + out[3] = _mm256_permute2f128_si256(in[4], in[0], 0x02); + out[4] = _mm256_permute2f128_si256(tmp[1], tmp[0], 0x13); + out[0] = _mm256_permute2f128_si256(tmp[1], tmp[0], 0x02); +} + /** * @brief Generates 32 bytes of key stream 8 buffers at a time * @@ -1698,81 +1755,24 @@ static inline __m256i snow3g_keystream_8_4(snow3gKeyState8_t *pCtx) static inline void snow3g_keystream_8_32(snow3gKeyState8_t *pCtx, __m256i *pKeyStream) { + __m256i in[8]; + unsigned int i; - __m256i temp[8]; + /** Byte reversal on each KS */ + static const __m256i mask = { + 0x0405060700010203ULL, 0x0c0d0e0f08090a0bULL, + 0x0405060700010203ULL, 0x0c0d0e0f08090a0bULL + }; /** produces the next 4 bytes for each buffer */ - int i; + for (i = 0; i < 8; i++) + in[i] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask); - /** Byte reversal on each KS */ - static const __m256i mask1 = { - 0x0001020304050607ULL, 0x08090a0b0c0d0e0fULL, - 0x0001020304050607ULL, 0x08090a0b0c0d0e0fULL - }; - /** Reversal, shifted 4 bytes right */ - static const __m256i mask2 = { - 0x0405060708090a0bULL, 0x0c0d0e0f00010203ULL, - 0x0405060708090a0bULL, 0x0c0d0e0f00010203ULL - }; - /** Reversal, shifted 8 bytes right */ - static const __m256i mask3 = { - 0x08090a0b0c0d0e0fULL, 0x0001020304050607ULL, - 0x08090a0b0c0d0e0fULL, 0x0001020304050607ULL - }; - /** Reversal, shifted 12 bytes right */ - static const __m256i mask4 = { - 0x0c0d0e0f00010203ULL, 0x0405060708090a0bULL, - 0x0c0d0e0f00010203ULL, 0x0405060708090a0bULL - }; + /* Transposes the dwords of KS for all buffers into + * 32 consecutive KS bytes for each buffer */ + transpose8xu32_avx2(in, pKeyStream); - temp[0] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask1); - temp[1] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask2); - temp[2] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask3); - temp[3] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask4); - temp[4] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask1); - temp[5] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask2); - temp[6] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask3); - temp[7] = _mm256_shuffle_epi8(snow3g_keystream_8_4(pCtx), mask4); - - __m256i blended[8]; - /* blends KS together: 128bit slice consists - of 4 32-bit words for one packet */ - blended[0] = _mm256_blend_epi32(temp[0], temp[1], 0xaa); - blended[1] = _mm256_blend_epi32(temp[0], temp[1], 0x55); - blended[2] = _mm256_blend_epi32(temp[2], temp[3], 0xaa); - blended[3] = _mm256_blend_epi32(temp[2], temp[3], 0x55); - blended[4] = _mm256_blend_epi32(temp[4], temp[5], 0xaa); - blended[5] = _mm256_blend_epi32(temp[4], temp[5], 0x55); - blended[6] = _mm256_blend_epi32(temp[6], temp[7], 0xaa); - blended[7] = _mm256_blend_epi32(temp[6], temp[7], 0x55); - - temp[0] = _mm256_blend_epi32(blended[0], blended[2], 0xcc); - temp[1] = _mm256_blend_epi32(blended[1], blended[3], 0x99); - temp[2] = _mm256_blend_epi32(blended[0], blended[2], 0x33); - temp[3] = _mm256_blend_epi32(blended[1], blended[3], 0x66); - temp[4] = _mm256_blend_epi32(blended[4], blended[6], 0xcc); - temp[5] = _mm256_blend_epi32(blended[5], blended[7], 0x99); - temp[6] = _mm256_blend_epi32(blended[4], blended[6], 0x33); - temp[7] = _mm256_blend_epi32(blended[5], blended[7], 0x66); - - /** sorts 32 bit words back into order */ - blended[0] = temp[0]; - blended[1] = _mm256_shuffle_epi32(temp[1], 0x39); - blended[2] = _mm256_shuffle_epi32(temp[2], 0x4e); - blended[3] = _mm256_shuffle_epi32(temp[3], 0x93); - blended[4] = temp[4]; - blended[5] = _mm256_shuffle_epi32(temp[5], 0x39); - blended[6] = _mm256_shuffle_epi32(temp[6], 0x4e); - blended[7] = _mm256_shuffle_epi32(temp[7], 0x93); - for (i = 0; i < 4; i++) { - pKeyStream[i] = - _mm256_permute2x128_si256(blended[i], - blended[i + 4], 0x20); - pKeyStream[i + 4] = - _mm256_permute2x128_si256(blended[i], - blended[i + 4], 0x31); - } } #endif /* AVX2 */ diff --git a/lib/include/transpose_sse.asm b/lib/include/transpose_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..74797788aa359f9f8c9caa69650afc09c233db30 --- /dev/null +++ b/lib/include/transpose_sse.asm @@ -0,0 +1,70 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%ifndef _TRANSPOSE_SSE_ASM_ +%define _TRANSPOSE_SSE_ASM_ + +;; transpose r0, r1, r2, r3, t0, t1 +;; "transpose" data in {r0..r3} using temps {t0..t3} +;; Input looks like: {r0 r1 r2 r3} +;; r0 = {a3 a2 a1 a0} +;; r1 = {b3 b2 b1 b0} +;; r2 = {c3 c2 c1 c0} +;; r3 = {d3 d2 d1 d0} +;; +;; output looks like: {t0 r1 r0 r3} +;; t0 = {d0 c0 b0 a0} +;; r1 = {d1 c1 b1 a1} +;; r0 = {d2 c2 b2 a2} +;; r3 = {d3 c3 b3 a3} + +%macro TRANSPOSE4_U32 6 +%define %%r0 %1 +%define %%r1 %2 +%define %%r2 %3 +%define %%r3 %4 +%define %%t0 %5 +%define %%t1 %6 + movdqa %%t0, %%r0 + shufps %%t0, %%r1, 0x44 ; t0 = {b1 b0 a1 a0} + shufps %%r0, %%r1, 0xEE ; r0 = {b3 b2 a3 a2} + + movdqa %%t1, %%r2 + shufps %%t1, %%r3, 0x44 ; t1 = {d1 d0 c1 c0} + shufps %%r2, %%r3, 0xEE ; r2 = {d3 d2 c3 c2} + + movdqa %%r1, %%t0 + shufps %%r1, %%t1, 0xDD ; r1 = {d1 c1 b1 a1} + + movdqa %%r3, %%r0 + shufps %%r3, %%r2, 0xDD ; r3 = {d3 c3 b3 a3} + + shufps %%r0, %%r2, 0x88 ; r0 = {d2 c2 b2 a2} + shufps %%t0, %%t1, 0x88 ; t0 = {d0 c0 b0 a0} +%endmacro + +%endif ;; _TRANSPOSE_SSE_ASM_ \ No newline at end of file diff --git a/lib/include/zuc_internal.h b/lib/include/zuc_internal.h index cad67d6f72a9552df2350acfcc3a01b665bcf6de..8c5bad213583033a5469b587b4bea357e96bd36d 100755 --- a/lib/include/zuc_internal.h +++ b/lib/include/zuc_internal.h @@ -316,6 +316,7 @@ IMB_DLL_LOCAL void asm_Zuc256Initialization_4_gfni_sse(ZucKey4_t *pKeys, IMB_DLL_LOCAL void asm_Zuc256Initialization_4_avx(ZucKey4_t *pKeys, const uint8_t *ivs, ZucState4_t *pState, + void *tags, const uint64_t tag_sz); @@ -362,6 +363,12 @@ IMB_DLL_LOCAL void asm_ZucInitialization_8_avx2(ZucKey8_t *pKeys, * @param[in,out] pState Pointer to a ZUC state structure of type * @ref ZucState8_t that will be populated * with the initialized ZUC state. + * @param[in,out] tags Array of 4 pointers to authentication tags + * (up to 16 bytes each, only for ZUC-EIA3) + * @param[in] tag_sz Tag size (0, 4, 8 or 16), to select the + * constants used to initialize the LFSR + * the LFSR registers (0 is used in case of + * encryption). * * @pre * None @@ -370,7 +377,8 @@ IMB_DLL_LOCAL void asm_ZucInitialization_8_avx2(ZucKey8_t *pKeys, IMB_DLL_LOCAL void asm_Zuc256Initialization_8_avx2(ZucKey8_t *pKeys, const uint8_t *ivs, ZucState8_t *pState, - const unsigned tag_sz); + void *tags, + const uint64_t tag_sz); /** ****************************************************************************** @@ -657,6 +665,44 @@ IMB_DLL_LOCAL void asm_ZucGenKeystream64B_16_avx512(ZucState16_t *pState, IMB_DLL_LOCAL void asm_ZucGenKeystream64B_16_gfni_avx512(ZucState16_t *pState, uint32_t *pKeyStr, const unsigned key_off); +/** + ****************************************************************************** + * + * @description + * Definition of the external function that implements the working + * stage of the ZUC algorithm. The function will generate 64 bytes of + * keystream for 16 packets in parallel, except for selected lanes, + * which will have 48 bytes of keystream generated instead. + * + * @param[in] pState Pointer to a ZUC state structure of type + * @ref ZucState16_t + * + * @param[in,out] pKeyStr Array of pointers to 16 input buffers + * that will contain the generated keystream + * for these 16 packets. + * + * @param[in] key_off Starting offset for writing KS. + * + * @param[in] lane_mask Mask containing lanes which will have 64 + * bytes of KS generated (16 bytes less for + * the rest) + * + * @pre + * A successful call to @ref asm_ZucInitialization_16_avx512 to initialize + * the ZUC state. + * + *****************************************************************************/ +IMB_DLL_LOCAL void +asm_ZucGenKeystream64B_16_skip16_avx512(ZucState16_t *pState, + uint32_t *pKeyStr, + const unsigned key_off, + const uint16_t lane_mask); + +IMB_DLL_LOCAL void +asm_ZucGenKeystream64B_16_skip16_gfni_avx512(ZucState16_t *pState, + uint32_t *pKeyStr, + const unsigned key_off, + const uint16_t lane_mask); /** ****************************************************************************** * @@ -684,16 +730,55 @@ IMB_DLL_LOCAL void asm_ZucGenKeystream64B_16_gfni_avx512(ZucState16_t *pState, * the ZUC state. * *****************************************************************************/ -IMB_DLL_LOCAL void asm_ZucGenKeystream64B_16_skip8_avx512(ZucState16_t *pState, - uint32_t *pKeyStr, - const unsigned key_off, - const uint16_t lane_mask); +IMB_DLL_LOCAL void +asm_ZucGenKeystream64B_16_skip8_avx512(ZucState16_t *pState, + uint32_t *pKeyStr, + const unsigned key_off, + const uint16_t lane_mask); IMB_DLL_LOCAL void asm_ZucGenKeystream64B_16_skip8_gfni_avx512(ZucState16_t *pState, uint32_t *pKeyStr, const unsigned key_off, const uint16_t lane_mask); +/** + ****************************************************************************** + * + * @description + * Definition of the external function that implements the working + * stage of the ZUC algorithm. The function will generate 64 bytes of + * keystream for 16 packets in parallel, except for selected lanes, + * which will have 60 bytes of keystream generated instead. + * + * @param[in] pState Pointer to a ZUC state structure of type + * @ref ZucState16_t + * + * @param[in,out] pKeyStr Array of pointers to 16 input buffers + * that will contain the generated keystream + * for these 16 packets. + * + * @param[in] key_off Starting offset for writing KS. + * + * @param[in] lane_mask Mask containing lanes which will have 64 + * bytes of KS generated (4 bytes less for + * the rest) + * + * @pre + * A successful call to @ref asm_ZucInitialization_16_avx512 to initialize + * the ZUC state. + * + *****************************************************************************/ +IMB_DLL_LOCAL void +asm_ZucGenKeystream64B_16_skip4_avx512(ZucState16_t *pState, + uint32_t *pKeyStr, + const unsigned key_off, + const uint16_t lane_mask); + +IMB_DLL_LOCAL void +asm_ZucGenKeystream64B_16_skip4_gfni_avx512(ZucState16_t *pState, + uint32_t *pKeyStr, + const unsigned key_off, + const uint16_t lane_mask); /** ****************************************************************************** * @@ -758,6 +843,29 @@ IMB_DLL_LOCAL void asm_ZucGenKeystream4B_4_gfni_sse(ZucState4_t *pState, IMB_DLL_LOCAL void asm_ZucGenKeystream4B_4_avx(ZucState4_t *pState, uint32_t *pKeyStr[4]); +/** + ****************************************************************************** + * + * @description + * Definition of the external function that implements the working + * stage of the ZUC algorithm. The function will generate 16 bytes of + * keystream for eight packets in parallel. + * + * @param[in] pState Pointer to a ZUC state structure of type + * @ref ZucState8_t + * + * @param[in,out] pKeyStr Array of pointers to 8 input buffers that + * will contain the generated keystream for + * these 8 packets. + * + * @pre + * A successful call to @ref asm_ZucInitialization_8 to initialize the ZUC + * state. + * + *****************************************************************************/ +IMB_DLL_LOCAL void asm_ZucGenKeystream16B_8_avx2(ZucState8_t *pState, + uint32_t *pKeyStr[8]); + /** ****************************************************************************** * @@ -839,31 +947,36 @@ IMB_DLL_LOCAL void asm_ZucGenKeystream8B_16_gfni_avx512(ZucState16_t *pState, * * @description * Definition of the external function that implements the working - * stage of the ZUC algorithm. The function will generate 4 bytes of + * stage of the ZUC algorithm. The function will generate N*4 bytes of * keystream for sixteen packets in parallel. * * @param[in] pState Pointer to a ZUC state structure of type * @ref ZucState16_t * - * @param[in,out] pKeyStr Pointer to buffer to write consecutively 4 - * bytes of keystream for the 16 input buffers + * @param[in,out] pKeyStr Array of pointers to 16 input buffers + * that will contain the generated keystream + * for these 16 packets. + * + * @param[in] key_off Starting offset for writing KS. + * + * @param[in] numRounds Number of 4-byte rounds (1 to 16 rounds) * - * @param[in] lane_mask Mask containing lanes which will have 4 - * bytes of KS generated (no bytes generated - * for the rest) * @pre - * A successful call to @ref asm_ZucInitialization_16 to initialize the ZUC + * A successful call to @ref asm_ZucInitialization to initialize the ZUC * state. * *****************************************************************************/ -IMB_DLL_LOCAL void asm_ZucGenKeystream4B_16_avx512(ZucState16_t *pState, - uint32_t pKeyStr[16], - const uint32_t lane_mask); - -IMB_DLL_LOCAL void asm_ZucGenKeystream4B_16_gfni_avx512(ZucState16_t *pState, - uint32_t pKeyStr[16], - const uint32_t lane_mask); +IMB_DLL_LOCAL void +asm_ZucGenKeystream_16_avx512(ZucState16_t *pState, + uint32_t *pKstr, + const unsigned key_off, + const uint32_t numRounds); +IMB_DLL_LOCAL void +asm_ZucGenKeystream_16_gfni_avx512(ZucState16_t *pState, + uint32_t *pKstr, + const unsigned key_off, + const uint32_t numRounds); /** ****************************************************************************** * @@ -881,24 +994,30 @@ IMB_DLL_LOCAL void asm_ZucGenKeystream4B_16_gfni_avx512(ZucState16_t *pState, * * @param[in] key_off Starting offset for writing KS. * + * @param[in] lane_mask Mask containing lanes which will have N*4 + * bytes of KS generated (16 bytes less for + * the rest) + * * @param[in] numRounds Number of 4-byte rounds (1 to 16 rounds) * * @pre - * A successful call to @ref asm_ZucInitialization to initialize the ZUC - * state. + * A successful call to @ref asm_ZucInitialization_16_avx512 to initialize + * ZUC state. * *****************************************************************************/ IMB_DLL_LOCAL void -asm_ZucGenKeystream_16_avx512(ZucState16_t *pState, - uint32_t *pKstr, - const unsigned key_off, - const uint32_t numRounds); +asm_ZucGenKeystream_16_skip16_avx512(ZucState16_t *pState, + uint32_t *pKstr, + const unsigned key_off, + const uint16_t lane_mask, + const uint32_t numRounds); IMB_DLL_LOCAL void -asm_ZucGenKeystream_16_gfni_avx512(ZucState16_t *pState, - uint32_t *pKstr, - const unsigned key_off, - const uint32_t numRounds); +asm_ZucGenKeystream_16_skip16_gfni_avx512(ZucState16_t *pState, + uint32_t *pKstr, + const unsigned key_off, + const uint16_t lane_mask, + const uint32_t numRounds); /** ****************************************************************************** @@ -924,8 +1043,8 @@ asm_ZucGenKeystream_16_gfni_avx512(ZucState16_t *pState, * @param[in] numRounds Number of 4-byte rounds (1 to 16 rounds) * * @pre - * A successful call to @ref asm_ZucInitialization to initialize the ZUC - * state. + * A successful call to @ref asm_ZucInitialization_16_avx512 to initialize + * ZUC state. * *****************************************************************************/ IMB_DLL_LOCAL void @@ -941,6 +1060,48 @@ asm_ZucGenKeystream_16_skip8_gfni_avx512(ZucState16_t *pState, const unsigned key_off, const uint16_t lane_mask, const uint32_t numRounds); + +/** + ****************************************************************************** + * + * @description + * Definition of the external function that implements the working + * stage of the ZUC algorithm. The function will generate N*4 bytes of + * keystream for sixteen packets in parallel. + * + * @param[in] pState Pointer to a ZUC state structure of type + * @ref ZucState16_t + * + * @param[in,out] pKeyStr Array of pointers to 16 input buffers + * that will contain the generated keystream + * for these 16 packets. + * + * @param[in] key_off Starting offset for writing KS. + * + * @param[in] lane_mask Mask containing lanes which will have N*4 + * bytes of KS generated (4 bytes less for + * the rest) + * + * @param[in] numRounds Number of 4-byte rounds (1 to 16 rounds) + * + * @pre + * A successful call to @ref asm_ZucInitialization_16_avx512 to initialize + * ZUC state. + * + *****************************************************************************/ +IMB_DLL_LOCAL void +asm_ZucGenKeystream_16_skip4_avx512(ZucState16_t *pState, + uint32_t *pKstr, + const unsigned key_off, + const uint16_t lane_mask, + const uint32_t numRounds); + +IMB_DLL_LOCAL void +asm_ZucGenKeystream_16_skip4_gfni_avx512(ZucState16_t *pState, + uint32_t *pKstr, + const unsigned key_off, + const uint16_t lane_mask, + const uint32_t numRounds); /** ****************************************************************************** * @@ -1088,10 +1249,12 @@ IMB_DLL_LOCAL void asm_Eia3Round16B_gfni_sse(void *T, const void *ks, const uint64_t tag_sz); IMB_DLL_LOCAL void asm_Eia3Round16B_avx(void *T, const void *ks, - const void *data); + const void *data, + const uint64_t tag_sz); IMB_DLL_LOCAL void asm_Eia3Round32B_avx(void *T, const void *ks, - const void *data); + const void *data, + const uint64_t tag_sz); /** ****************************************************************************** @@ -1127,56 +1290,48 @@ IMB_DLL_LOCAL void asm_Eia3Remainder_gfni_sse(void *T, const void *ks, const uint64_t key_size, const uint64_t tag_size); +IMB_DLL_LOCAL void asm_Eia3Remainder_avx(void *T, const void *ks, + const void *data, + const uint64_t n_bits, + const uint64_t key_size, + const uint64_t tag_size); + /** ****************************************************************************** * @description * Definition of the external function to update the authentication tag * based on keystream and data (AVX variant) * - * @param[in] T Authentication tag + * @param[in] T Array of authentication tags for 16 buffers * - * @param[in] ks Pointer to key stream + * @param[in] ks Array of keystreams for 16 buffers * - * @param[in] data Pointer to the data + * @param[in] data Array of pointers to the data for 16 buffers + * + * @param[in] len Array of remaining lengths for 16 buffers + * + * @param[in] tag_sz Tag size (4, 8 or 16 bytes) * * @pre * None * *****************************************************************************/ -IMB_DLL_LOCAL void asm_Eia3Round64BAVX512_16(uint32_t *T, +IMB_DLL_LOCAL void asm_Eia3Round64BAVX512_16(void *T, const uint32_t *ks, const void **data, - uint16_t *len); + uint16_t *len, + const uint64_t tag_sz); -IMB_DLL_LOCAL void asm_Eia3Round64B_16_VPCLMUL(uint32_t *T, +IMB_DLL_LOCAL void asm_Eia3Round64B_16_VPCLMUL(void *T, const uint32_t *ks, const void **data, - uint16_t *len); + uint16_t *len, + const uint64_t tag_sz); IMB_DLL_LOCAL void asm_Eia3Round64BAVX512(uint32_t *T, const void *ks, const void *data); -/** - ****************************************************************************** - * @description - * Definition of the external function to return the authentication - * update value to be XOR'ed with current authentication tag (AVX variant) - * - * @param[in] ks Pointer to key stream - * - * @param[in] data Pointer to the data - * - * @param[in] n_bits Number of data bits to be processed - * - * @pre - * None - * - *****************************************************************************/ -IMB_DLL_LOCAL void asm_Eia3Remainder_avx(void *T, const void *ks, - const void *data, - const uint64_t n_bits); - /** ****************************************************************************** * @description @@ -1209,7 +1364,8 @@ IMB_DLL_LOCAL uint32_t asm_Eia3_256_RemainderAVX512_16(uint32_t *T, const uint32_t *ks, const void **data, uint16_t *lens, - const uint32_t commonBits); + const uint64_t commonBits, + const uint64_t tag_size); IMB_DLL_LOCAL uint32_t asm_Eia3RemainderAVX512_16_VPCLMUL(uint32_t *T, const uint32_t *ks, @@ -1221,7 +1377,8 @@ IMB_DLL_LOCAL uint32_t asm_Eia3_256_RemainderAVX512_16_VPCLMUL(uint32_t *T, const uint32_t *ks, const void **data, uint16_t *lens, - const uint32_t commonBits); + const uint64_t commonBits, + const uint64_t tag_size); /** ****************************************************************************** @@ -1234,6 +1391,7 @@ IMB_DLL_LOCAL uint32_t asm_Eia3_256_RemainderAVX512_16_VPCLMUL(uint32_t *T, * @param[in] data Array of 16 pointers to data for 16 buffers * @param[in] len Array of lengths for 16 buffers * @param[in] numRounds Number of 64B rounds to perform + * @param[in] tagSize Tag size (4 or 8 bytes) * *****************************************************************************/ IMB_DLL_LOCAL @@ -1242,14 +1400,16 @@ void asm_Eia3_Nx64B_AVX512_16(ZucState16_t *pState, uint32_t *T, const void **data, uint16_t *len, - const uint32_t numRounds); + const uint64_t numRounds, + const uint64_t tagSize); IMB_DLL_LOCAL void asm_Eia3_Nx64B_AVX512_16_VPCLMUL(ZucState16_t *pState, uint32_t *pKeyStr, uint32_t *T, const void **data, uint16_t *len, - const uint32_t numRounds); + const uint64_t numRounds, + const uint64_t tagSize); IMB_DLL_LOCAL void zuc_eia3_4_buffer_job_gfni_sse(const void * const pKey[4], const uint8_t *ivs, @@ -1313,9 +1473,10 @@ IMB_DLL_LOCAL void zuc256_eia3_4_buffer_job_avx(const void * const pKey[4], const uint8_t *ivs, const void * const pBufferIn[4], - uint32_t *pMacI[4], + void *pMacI[4], const uint16_t lengthInBits[4], - const void * const job_in_lane[4]); + const void * const job_in_lane[4], + const uint64_t tag_size); IMB_DLL_LOCAL void zuc_eia3_8_buffer_job_avx2(const void * const pKey[8], @@ -1329,9 +1490,10 @@ IMB_DLL_LOCAL void zuc256_eia3_8_buffer_job_avx2(const void * const pKey[8], const uint8_t *ivs, const void * const pBufferIn[8], - uint32_t *pMacI[8], + void *pMacI[8], const uint16_t lengthInBits[8], - const void * const job_in_lane[8]); + const void * const job_in_lane[8], + const uint64_t tag_size); /* the s-boxes */ extern const uint8_t S0[256]; diff --git a/lib/ipsec-mb.h b/lib/ipsec-mb.h index acdca5afdb202cf5802433951f51d1c815c22da4..e1b8130d47c981e7b71332a926a2703463efdf90 100644 --- a/lib/ipsec-mb.h +++ b/lib/ipsec-mb.h @@ -91,8 +91,8 @@ typedef struct { /** * Library version */ -#define IMB_VERSION_STR "1.2.0" -#define IMB_VERSION_NUM 0x10200 +#define IMB_VERSION_STR "1.4.0-dev" +#define IMB_VERSION_NUM 0x10400 /** * Macro to translate version number @@ -178,7 +178,6 @@ typedef enum { */ typedef enum { IMB_ERR_MIN = 2000, - /* job api */ IMB_ERR_NULL_MBMGR, IMB_ERR_JOB_NULL_SRC, IMB_ERR_JOB_NULL_DST, @@ -200,7 +199,6 @@ typedef enum { IMB_ERR_JOB_NULL_SGL_CTX, IMB_ERR_JOB_NULL_NEXT_IV, IMB_ERR_JOB_PON_PLI, - /* direct api */ IMB_ERR_NULL_SRC, IMB_ERR_NULL_DST, IMB_ERR_NULL_KEY, @@ -223,6 +221,15 @@ typedef enum { IMB_ERR_JOB_NULL_XCBC_K1_EXP, IMB_ERR_JOB_NULL_XCBC_K2, IMB_ERR_JOB_NULL_XCBC_K3, + IMB_ERR_JOB_CIPH_DIR, + IMB_ERR_JOB_NULL_GHASH_INIT_TAG, + IMB_ERR_MISSING_CPUFLAGS_INIT_MGR, + IMB_ERR_NULL_JOB, + IMB_ERR_QUEUE_SPACE, + IMB_ERR_NULL_BURST, + IMB_ERR_BURST_SIZE, + IMB_ERR_BURST_OOO, + IMB_ERR_SELFTEST, /* add new error types above this comment */ IMB_ERR_MAX /* don't move this one */ } IMB_ERR; @@ -429,6 +436,7 @@ typedef enum { IMB_AUTH_CRC8_WIMAX_OFDMA_HCS, /**< CRC8-WIMAX-OFDMA-HCS */ IMB_AUTH_CRC7_FP_HEADER, /**< CRC7-FP-HEADER */ IMB_AUTH_CRC6_IUUP_HEADER, /**< CRC6-IUUP-HEADER */ + IMB_AUTH_GHASH, /**< GHASH */ IMB_AUTH_NUM } IMB_HASH_ALG; @@ -446,9 +454,19 @@ typedef enum { typedef enum { IMB_SGL_INIT = 0, IMB_SGL_UPDATE, - IMB_SGL_COMPLETE + IMB_SGL_COMPLETE, + IMB_SGL_ALL } IMB_SGL_STATE; +/** + * Input/output SGL segment structure. + */ +struct IMB_SGL_IOV { + const void *in; /**< Input segment */ + void *out; /**< Output segment */ + uint64_t len; /** Length of segment */ +}; + /** * Job structure. * @@ -472,10 +490,20 @@ typedef struct IMB_JOB { const void *enc_keys; /**< Encryption key pointer */ const void *dec_keys; /**< Decryption key pointer */ uint64_t key_len_in_bytes; /**< Key length in bytes */ - const uint8_t *src; /**< Input buffer. May be ciphertext or plaintext. - In-place ciphering allowed. */ - uint8_t *dst; /**< Output buffer. May be ciphertext or plaintext. - In-place ciphering allowed, i.e. dst = src. */ + union { + const uint8_t *src; /**< Input buffer. + May be ciphertext or plaintext. + In-place ciphering allowed. */ + const struct IMB_SGL_IOV *sgl_io_segs; + /**< Pointer to array of input/output SGL segments */ + }; + union { + uint8_t *dst; /**< Output buffer. + May be ciphertext or plaintext. + In-place ciphering allowed, i.e. dst = src. */ + uint64_t num_sgl_io_segs; + /**< Number of input/output SGL segments */ + }; union { uint64_t cipher_start_src_offset_in_bytes; /**< Offset into input buffer to start ciphering (in bytes) */ @@ -565,6 +593,11 @@ typedef struct IMB_JOB { uint64_t iv_len_in_bytes; /**< Authentication IV length in bytes */ } GMAC; /**< AES-GMAC specific fields */ + struct _GHASH_specific_fields { + const struct gcm_key_data *_key; + /**< Expanded GHASH key */ + const void *_init_tag; /**< initial tag value */ + } GHASH; /**< GHASH specific fields */ struct _POLY1305_specific_fields { const void *_key; /**< Poly1305 key */ @@ -603,7 +636,8 @@ typedef struct IMB_JOB { /**< Customer hash function */ IMB_SGL_STATE sgl_state; - /**< SGL state (IMB_SGL_INIT/IMB_SGL_UPDATE/IMB_SGL_COMPLETE) */ + /**< SGL state (IMB_SGL_INIT/IMB_SGL_UPDATE/IMB_SGL_COMPLETE/ + IMB_SGL_ALL) */ union { struct _CBCS_specific_fields { @@ -754,6 +788,19 @@ typedef IMB_JOB *(*submit_job_t)(struct IMB_MGR *); typedef IMB_JOB *(*get_completed_job_t)(struct IMB_MGR *); typedef IMB_JOB *(*flush_job_t)(struct IMB_MGR *); typedef uint32_t (*queue_size_t)(struct IMB_MGR *); +typedef uint32_t (*burst_fn_t)(struct IMB_MGR *, + const uint32_t, + struct IMB_JOB **); +typedef uint32_t (*submit_cipher_burst_t)(struct IMB_MGR *, + struct IMB_JOB *, + const uint32_t, + const IMB_CIPHER_MODE cipher, + const IMB_CIPHER_DIRECTION dir, + const IMB_KEY_SIZE_BYTES key_size); +typedef uint32_t (*submit_hash_burst_t)(struct IMB_MGR *, + struct IMB_JOB *, + const uint32_t, + const IMB_HASH_ALG hash); typedef void (*keyexp_t)(const void *, void *, void *); typedef void (*cmac_subkey_gen_t)(const void *, void *, void *); typedef void (*hash_one_block_t)(const void *, void *); @@ -807,7 +854,7 @@ typedef void (*chacha_poly_enc_dec_update_t)(const void *, void *, const void *, const uint64_t); typedef void (*chacha_poly_finalize_t)(struct chacha20_poly1305_context_data *, void *, const uint64_t); -typedef void (*ghash_t)(struct gcm_key_data *, const void *, +typedef void (*ghash_t)(const struct gcm_key_data *, const void *, const uint64_t, void *, const uint64_t); typedef void (*zuc_eea3_1_buffer_t)(const void *, const void *, const void *, @@ -958,6 +1005,7 @@ typedef uint32_t (*crc32_fn_t)(const void *, const uint64_t); #define IMB_FLAG_SHANI_OFF (1ULL << 0) /**< disable use of SHANI extension */ #define IMB_FLAG_AESNI_OFF (1ULL << 1) /**< disable use of AESNI extension */ +#define IMB_FLAG_GFNI_OFF (1ULL << 2) /**< disable use of GFNI extension */ /** * Multi-buffer manager detected features @@ -989,6 +1037,29 @@ typedef uint32_t (*crc32_fn_t)(const void *, const uint64_t); #define IMB_FEATURE_AVX512_IFMA (1ULL << 17) #define IMB_FEATURE_BMI2 (1ULL << 18) #define IMB_FEATURE_AESNI_EMU (1ULL << 19) +#define IMB_FEATURE_SELF_TEST (1ULL << 20) /* self-test feature present */ +#define IMB_FEATURE_SELF_TEST_PASS (1ULL << 21) /* self-test passed */ + +/** + * CPU flags needed for each implementation + */ +#define IMB_CPUFLAGS_NO_AESNI (IMB_FEATURE_SSE4_2 | IMB_FEATURE_CMOV) +#define IMB_CPUFLAGS_SSE (IMB_CPUFLAGS_NO_AESNI | IMB_FEATURE_AESNI | \ + IMB_FEATURE_PCLMULQDQ) +#define IMB_CPUFLAGS_SSE_T2 (IMB_CPUFLAGS_SSE | IMB_FEATURE_SHANI) +#define IMB_CPUFLAGS_SSE_T3 (IMB_CPUFLAGS_SSE_T2 | IMB_FEATURE_GFNI) +#define IMB_CPUFLAGS_AVX (IMB_CPUFLAGS_SSE | IMB_FEATURE_AVX) +#define IMB_CPUFLAGS_AVX2 (IMB_CPUFLAGS_AVX | IMB_FEATURE_AVX2 | \ + IMB_FEATURE_BMI2) +#define IMB_CPUFLAGS_AVX512 (IMB_CPUFLAGS_AVX2 | IMB_FEATURE_AVX512_SKX) +#define IMB_CPUFLAGS_AVX512_T2 (IMB_CPUFLAGS_AVX512 | IMB_FEATURE_VAES | \ + IMB_FEATURE_VPCLMULQDQ | IMB_FEATURE_GFNI | \ + IMB_FEATURE_AVX512_IFMA | IMB_FEATURE_SHANI) +#define IMB_CPUFLAGS_AVX2_T2 (IMB_CPUFLAGS_AVX2 | IMB_FEATURE_SHANI | \ + IMB_FEATURE_VAES | IMB_FEATURE_VPCLMULQDQ | \ + IMB_FEATURE_GFNI) +#define IMB_CPUFLAGS_AVX_T2 (IMB_CPUFLAGS_AVX | IMB_FEATURE_SHANI | \ + IMB_FEATURE_GFNI) #define IMB_FEATURE_AARCH64 (1ULL << 32) #define IMB_FEATURE_ASIMD (1ULL << 33) @@ -996,7 +1067,8 @@ typedef uint32_t (*crc32_fn_t)(const void *, const uint64_t); /* TOP LEVEL (IMB_MGR) Data structure fields */ -#define IMB_MAX_JOBS 128 +#define IMB_MAX_BURST_SIZE 128 +#define IMB_MAX_JOBS (IMB_MAX_BURST_SIZE * 2) typedef struct IMB_MGR { @@ -1135,6 +1207,15 @@ typedef struct IMB_MGR { chacha_poly_enc_dec_update_t chacha20_poly1305_dec_update; chacha_poly_finalize_t chacha20_poly1305_finalize; + burst_fn_t get_next_burst; + burst_fn_t submit_burst; + burst_fn_t submit_burst_nocheck; + burst_fn_t flush_burst; + submit_cipher_burst_t submit_cipher_burst; + submit_cipher_burst_t submit_cipher_burst_nocheck; + submit_hash_burst_t submit_hash_burst; + submit_hash_burst_t submit_hash_burst_nocheck; + /* in-order scheduler fields */ int earliest_job; /**< byte offset, -1 if none */ int next_job; /**< byte offset */ @@ -1173,6 +1254,12 @@ typedef struct IMB_MGR { void *aes256_cmac_ooo; void *snow3g_uea2_ooo; void *snow3g_uia2_ooo; + void *sha_1_ooo; + void *sha_224_ooo; + void *sha_256_ooo; + void *sha_384_ooo; + void *sha_512_ooo; + void *end_ooo; /* add new out-of-order managers above this line */ } IMB_MGR; /** @@ -1235,8 +1322,10 @@ IMB_DLL_EXPORT const char *imb_get_strerror(int errnum); * IMB_FLAG_SHANI_OFF - disable use (and detection) of SHA extensions, * currently SHANI is only available for SSE * IMB_FLAG_AESNI_OFF - disable use (and detection) of AES extensions. + * IMB_FLAG_GFNI_OFF - disable use (and detection) of + * Galois Field extensions. * - * @return Pointer to allocated memory for MB_MGR structure + * @return Pointer to allocated memory for IMB_MGR structure * @retval NULL on allocation error */ IMB_DLL_EXPORT IMB_MGR *alloc_mb_mgr(uint64_t flags); @@ -1271,6 +1360,8 @@ IMB_DLL_EXPORT size_t imb_get_mb_mgr_size(void); * IMB_FLAG_SHANI_OFF - disable use (and detection) of SHA extensions, * currently SHANI is only available for SSE * IMB_FLAG_AESNI_OFF - disable use (and detection) of AES extensions. + * IMB_FLAG_GFNI_OFF - disable use (and detection) + * of Galois Field extensions. * * @param [in] reset_mgr if 0, IMB_MGR structure is not cleared, else it is. * @@ -1287,36 +1378,159 @@ IMB_DLL_EXPORT IMB_MGR *imb_set_pointers_mb_mgr(void *ptr, const uint64_t flags, */ IMB_DLL_EXPORT uint64_t imb_get_feature_flags(void); +/** + * @brief Initialize Multi-Buffer Manager structure. + * + * Must be called before calling JOB/BURST API. + * + * @param [in,out] state Pointer to IMB_MGR structure + * For binary compatibility between library versions, it + * is recommended to allocate the IMB_MGR structure using + * the alloc_mb_mgr() API + */ IMB_DLL_EXPORT void init_mb_mgr_avx(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *submit_job_avx(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *flush_job_avx(IMB_MGR *state); -IMB_DLL_EXPORT uint32_t queue_size_avx(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *get_next_job_avx(IMB_MGR *state); - +/** + * @copydoc init_mb_mgr_avx + */ IMB_DLL_EXPORT void init_mb_mgr_avx2(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *submit_job_avx2(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx2(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *flush_job_avx2(IMB_MGR *state); -IMB_DLL_EXPORT uint32_t queue_size_avx2(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx2(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *get_next_job_avx2(IMB_MGR *state); - +/** + * @copydoc init_mb_mgr_avx + */ IMB_DLL_EXPORT void init_mb_mgr_avx512(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *submit_job_avx512(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx512(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *flush_job_avx512(IMB_MGR *state); -IMB_DLL_EXPORT uint32_t queue_size_avx512(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx512(IMB_MGR *state); -IMB_DLL_EXPORT IMB_JOB *get_next_job_avx512(IMB_MGR *state); - +/** + * @copydoc init_mb_mgr_avx + */ IMB_DLL_EXPORT void init_mb_mgr_sse(IMB_MGR *state); + + +/** + * @brief Submit job for processing after validating. + * + * @param [in,out] state Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if no job completed + * If NULL, imb_get_errno() can be used to check for potential + * error conditions + */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx(IMB_MGR *state); +/** + * @copydoc submit_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx2(IMB_MGR *state); +/** + * @copydoc submit_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *submit_job_avx512(IMB_MGR *state); +/** + * @copydoc submit_job_avx + */ IMB_DLL_EXPORT IMB_JOB *submit_job_sse(IMB_MGR *state); + +/** + * @brief Submit job for processing without validating. + * + * This is more performant but less secure than submit_job_xxx() + * + * @param [in,out] state Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if no job completed + */ +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx(IMB_MGR *state); +/** + * @copydoc submit_job_nocheck_avx + */ +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx2(IMB_MGR *state); +/** + * @copydoc submit_job_nocheck_avx + */ +IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_avx512(IMB_MGR *state); +/** + * @copydoc submit_job_nocheck_avx + */ IMB_DLL_EXPORT IMB_JOB *submit_job_nocheck_sse(IMB_MGR *state); + +/** + * @brief Force processing until next job in queue is completed. + * + * @param [in,out] state Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if no more jobs to process + */ +IMB_DLL_EXPORT IMB_JOB *flush_job_avx(IMB_MGR *state); +/** + * @copydoc flush_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *flush_job_avx2(IMB_MGR *state); +/** + * @copydoc flush_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *flush_job_avx512(IMB_MGR *state); +/** + * @copydoc flush_job_avx + */ IMB_DLL_EXPORT IMB_JOB *flush_job_sse(IMB_MGR *state); + +/** + * @brief Get number of jobs queued to be processed. + * + * @param [in,out] state Pointer to initialized IMB_MGR structure + * + * @return Number of jobs in the queue + */ +IMB_DLL_EXPORT uint32_t queue_size_avx(IMB_MGR *state); +/** + * @copydoc queue_size_avx + */ +IMB_DLL_EXPORT uint32_t queue_size_avx2(IMB_MGR *state); +/** + * @copydoc queue_size_avx + */ +IMB_DLL_EXPORT uint32_t queue_size_avx512(IMB_MGR *state); +/** + * @copydoc queue_size_avx + */ IMB_DLL_EXPORT uint32_t queue_size_sse(IMB_MGR *state); + +/** + * @brief Get next completed job. + * + * @param [in,out] state Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if next job not complete + */ +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx(IMB_MGR *state); +/** + * @copydoc get_completed_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx2(IMB_MGR *state); +/** + * @copydoc get_completed_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *get_completed_job_avx512(IMB_MGR *state); +/** + * @copydoc get_completed_job_avx + */ IMB_DLL_EXPORT IMB_JOB *get_completed_job_sse(IMB_MGR *state); + +/** + * @brief Get next available job. + * + * @param [in,out] state Pointer to initialized IMB_MGR structure + * + * @return Pointer to next free IMB_JOB in the queue + */ +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx(IMB_MGR *state); +/** + * @copydoc get_next_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx2(IMB_MGR *state); +/** + * @copydoc get_next_job_avx + */ +IMB_DLL_EXPORT IMB_JOB *get_next_job_avx512(IMB_MGR *state); +/** + * @copydoc get_next_job_avx + */ IMB_DLL_EXPORT IMB_JOB *get_next_job_sse(IMB_MGR *state); IMB_DLL_EXPORT void init_mb_mgr_aarch64(IMB_MGR *state); @@ -1337,7 +1551,7 @@ IMB_DLL_EXPORT IMB_JOB *get_next_job_aarch64(IMB_MGR *state); */ IMB_DLL_EXPORT void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch); -/** +/* * Wrapper macros to call arch API's set up * at init phase of multi-buffer manager. * @@ -1359,27 +1573,246 @@ IMB_DLL_EXPORT void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch); * it can simplify application implementation. * The test app provides example of using the indirect interface. */ -#define IMB_GET_NEXT_JOB(_mgr) ((_mgr)->get_next_job((_mgr))) -#define IMB_SUBMIT_JOB(_mgr) ((_mgr)->submit_job((_mgr))) + +/** + * @brief Get next available job. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * + * @return Pointer to next free IMB_JOB in the queue + */ +#define IMB_GET_NEXT_JOB(_mgr) ((_mgr)->get_next_job((_mgr))) + +/** + * @brief Submit job for processing after validating. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if no job completed + * If NULL, imb_get_errno() can be used to check for potential + * error conditions + */ +#define IMB_SUBMIT_JOB(_mgr) ((_mgr)->submit_job((_mgr))) + +/** + * @brief Submit job for processing without validating. + * + * This is more performant but less secure than submit_job_xxx() + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if no job completed + */ #define IMB_SUBMIT_JOB_NOCHECK(_mgr) ((_mgr)->submit_job_nocheck((_mgr))) + +/** + * @brief Get next completed job. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if next job not complete + */ #define IMB_GET_COMPLETED_JOB(_mgr) ((_mgr)->get_completed_job((_mgr))) + +/** + * @brief Force processing until next job in queue is completed. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * + * @return Pointer to completed IMB_JOB or NULL if no more jobs to process + */ #define IMB_FLUSH_JOB(_mgr) ((_mgr)->flush_job((_mgr))) + +/** + * @brief Get number of jobs queued to be processed. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * + * @return Number of jobs in the queue + */ #define IMB_QUEUE_SIZE(_mgr) ((_mgr)->queue_size((_mgr))) +/** + * @brief Get next available burst + * (list of pointers to available IMB_JOB structures). + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * @param [in] _n_jobs Requested number of burst jobs + * @param [out] _jobs List of pointers to returned jobs + * + * @return Number of returned jobs. + * May be less than number of requested jobs if not enough space in + * queue. IMB_FLUSH_BURST() can be used to free up space. + */ +#define IMB_GET_NEXT_BURST(_mgr, _n_jobs, _jobs) \ + ((_mgr)->get_next_burst((_mgr), (_n_jobs), (_jobs))) + +/** + * Submit multiple jobs to be processed after validating. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * @param [in] _n_jobs Number of jobs to submit for processing + * @param [in,out] _jobs In: List of pointers to jobs for submission + * Out: List of pointers to completed jobs + * + * @return Number of completed jobs or zero on error. + * If zero, imb_get_errno() can be used to check for potential + * error conditions and _jobs[0] contains pointer to invalid job + */ +#define IMB_SUBMIT_BURST(_mgr, _n_jobs, _jobs) \ + ((_mgr)->submit_burst((_mgr), (_n_jobs), (_jobs))) + +/** + * Submit multiple jobs to be processed without validating. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * @param [in] _n_jobs Number of jobs to submit for processing + * @param [in,out] _jobs In: List of pointers to jobs for submission + * Out: List of pointers to completed jobs + * + * @return Number of completed jobs or zero on error + */ +#define IMB_SUBMIT_BURST_NOCHECK(_mgr, _n_jobs, _jobs) \ + ((_mgr)->submit_burst_nocheck((_mgr), (_n_jobs), (_jobs))) + +/** + * @brief Force up to \a max_jobs outstanding jobs to completion. + * + * @param [in,out] _mgr Pointer to initialized IMB_MGR structure + * @param [in] _max_jobs Maximum number of jobs to flush + * @param [out] _jobs List of pointers to completed jobs + * + * @return Number of completed jobs + */ +#define IMB_FLUSH_BURST(_mgr, _max_jobs, _jobs) \ + ((_mgr)->flush_burst((_mgr), (_max_jobs), (_jobs))) + +/** + * Submit multiple cipher jobs to be processed synchronously after validating. + * + * @param [in] _mgr Pointer to initialized IMB_MGR structure + * @param [in,out] _jobs Pointer to array of IMB_JOB structures + * @param [in] _n_jobs Number of jobs to process + * @param [in] _cipher Cipher algorithm of type #IMB_CIPHER_MODE + * @param [in] _dir Cipher direction of type #IMB_CIPHER_DIRECTION + * @param [in] _key_size Key size in bytes of type #IMB_KEY_SIZE_BYTES + * + * @return Number of completed jobs + */ +#define IMB_SUBMIT_CIPHER_BURST(_mgr, _jobs, _n_jobs, _cipher, \ + _dir, _key_size) \ + ((_mgr)->submit_cipher_burst((_mgr), (_jobs), (_n_jobs), \ + (_cipher), (_dir), (_key_size))) +/** + * Submit multiple cipher jobs to be processed synchronously without validating. + * + * This is more performant but less secure than IMB_SUBMIT_CIPHER_BURST(). + * + * @param [in] _mgr Pointer to initialized IMB_MGR structure + * @param [in,out] _jobs Pointer to array of IMB_JOB structures + * @param [in] _n_jobs Number of jobs to process + * @param [in] _cipher Cipher algorithm of type #IMB_CIPHER_MODE + * @param [in] _dir Cipher direction of type #IMB_CIPHER_DIRECTION + * @param [in] _key_size Key size in bytes of type #IMB_KEY_SIZE_BYTES + * + * @return Number of completed jobs + */ +#define IMB_SUBMIT_CIPHER_BURST_NOCHECK(_mgr, _jobs, _n_jobs, _cipher, \ + _dir, _key_size) \ + ((_mgr)->submit_cipher_burst_nocheck((_mgr), (_jobs), (_n_jobs),\ + (_cipher), (_dir), (_key_size))) +/** + * Submit multiple hash jobs to be processed synchronously after validating. + * + * @param [in] _mgr Pointer to initialized IMB_MGR structure + * @param [in,out] _jobs Pointer to array of IMB_JOB structures + * @param [in] _n_jobs Number of jobs to process + * @param [in] _hash Hash algorithm of type #IMB_HASH_ALG + * + * @return Number of completed jobs + */ +#define IMB_SUBMIT_HASH_BURST(_mgr, _jobs, _n_jobs, _hash) \ + ((_mgr)->submit_hash_burst((_mgr), (_jobs), (_n_jobs), (_hash))) + +/** + * Submit multiple hash jobs to be processed synchronously without validating. + * + * This is more performant but less secure than IMB_SUBMIT_HASH_BURST(). + * + * @param [in] _mgr Pointer to initialized IMB_MGR structure + * @param [in,out] _jobs Pointer to array of IMB_JOB structures + * @param [in] _n_jobs Number of jobs to process + * @param [in] _hash Hash algorithm of type #IMB_HASH_ALG + * + * @return Number of completed jobs + */ +#define IMB_SUBMIT_HASH_BURST_NOCHECK(_mgr, _jobs, _n_jobs, _hash) \ + ((_mgr)->submit_hash_burst_nocheck((_mgr), (_jobs), (_n_jobs), (_hash))) + /* Key expansion and generation API's */ + +/** + * Generate encryption/decryption AES-128 expansion keys. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _key AES-128 key + * @param[out] _enc_exp_key AES-128 encryption expansion key + * @param[out] _dec_exp_key AES-128 decryption expansion key + */ #define IMB_AES_KEYEXP_128(_mgr, _key, _enc_exp_key, _dec_exp_key) \ ((_mgr)->keyexp_128((_key), (_enc_exp_key), (_dec_exp_key))) +/** + * Generate encryption/decryption AES-192 expansion keys. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _key AES-192 key + * @param[out] _enc_exp_key AES-192 encryption expansion key + * @param[out] _dec_exp_key AES-192 decryption expansion key + */ #define IMB_AES_KEYEXP_192(_mgr, _key, _enc_exp_key, _dec_exp_key) \ ((_mgr)->keyexp_192((_key), (_enc_exp_key), (_dec_exp_key))) +/** + * Generate encryption/decryption AES-256 expansion keys. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _key AES-256 key + * @param[out] _enc_exp_key AES-256 encryption expansion key + * @param[out] _dec_exp_key AES-256 decryption expansion key + */ #define IMB_AES_KEYEXP_256(_mgr, _key, _enc_exp_key, _dec_exp_key) \ ((_mgr)->keyexp_256((_key), (_enc_exp_key), (_dec_exp_key))) +/** + * Generate AES-128-CMAC subkeys. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _exp_key Input expanded AES-128-CMAC key + * @param[out] _key1 Subkey 1 + * @param[out] _key2 Subkey 2 + */ #define IMB_AES_CMAC_SUBKEY_GEN_128(_mgr, _exp_key, _key1, _key2) \ ((_mgr)->cmac_subkey_gen_128((_exp_key), (_key1), (_key2))) +/** + * Generate AES-256-CMAC subkeys. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _exp_key Input expanded AES-256-CMAC key + * @param[out] _key1 Subkey 1 + * @param[out] _key2 Subkey 2 + */ #define IMB_AES_CMAC_SUBKEY_GEN_256(_mgr, _exp_key, _key1, _key2) \ ((_mgr)->cmac_subkey_gen_256((_exp_key), (_key1), (_key2))) +/** + * Generate AES-128-XCBC expansion keys. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _key AES-128-XCBC key + * @param[out] _exp_key k1 expansion key + * @param[out] _exp_key2 k2 expansion key + * @param[out] _exp_key3 k3 expansion key + */ #define IMB_AES_XCBC_KEYEXP(_mgr, _key, _exp_key, _exp_key2, _exp_key3) \ ((_mgr)->xcbc_keyexp((_key), (_exp_key), (_exp_key2), (_exp_key3))) @@ -1387,33 +1820,130 @@ IMB_DLL_EXPORT void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch); ((_mgr)->des_key_sched((_exp_key), (_key))) /* Hash API's */ + +/** + * Authenticate 64-byte data buffer with SHA1. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src 64-byte data buffer + * @param[out] _tag Digest output (20 bytes) + */ #define IMB_SHA1_ONE_BLOCK(_mgr, _src, _tag) \ ((_mgr)->sha1_one_block((_src), (_tag))) + +/** + * Authenticate variable sized data with SHA1. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src Data buffer + * @param[in] _length Length of data in bytes for authentication. + * @param[out] _tag Digest output (20 bytes) + */ #define IMB_SHA1(_mgr, _src, _length, _tag) \ ((_mgr)->sha1((_src), (_length), (_tag))) +/** + * Authenticate 64-byte data buffer with SHA224. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src 64-byte data buffer + * @param[out] _tag Digest output (28 bytes) + */ #define IMB_SHA224_ONE_BLOCK(_mgr, _src, _tag) \ ((_mgr)->sha224_one_block((_src), (_tag))) + +/** + * Authenticate variable sized data with SHA224. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src Data buffer + * @param[in] _length Length of data in bytes for authentication. + * @param[out] _tag Digest output (28 bytes) + */ #define IMB_SHA224(_mgr, _src, _length, _tag) \ ((_mgr)->sha224((_src), (_length), (_tag))) +/** + * Authenticate 64-byte data buffer with SHA256. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src 64-byte data buffer + * @param[out] _tag Digest output (32 bytes) + */ #define IMB_SHA256_ONE_BLOCK(_mgr, _src, _tag) \ ((_mgr)->sha256_one_block((_src), (_tag))) +/** + * Authenticate variable sized data with SHA256. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src Data buffer + * @param[in] _length Length of data in bytes for authentication. + * @param[out] _tag Digest output (32 bytes) + */ #define IMB_SHA256(_mgr, _src, _length, _tag) \ ((_mgr)->sha256((_src), (_length), (_tag))) +/** + * Authenticate 128-byte data buffer with SHA384. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src 128-byte data buffer + * @param[out] _tag Digest output (48 bytes) + */ #define IMB_SHA384_ONE_BLOCK(_mgr, _src, _tag) \ ((_mgr)->sha384_one_block((_src), (_tag))) +/** + * Authenticate variable sized data with SHA384. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src Data buffer + * @param[in] _length Length of data in bytes for authentication. + * @param[out] _tag Digest output (48 bytes) + */ #define IMB_SHA384(_mgr, _src, _length, _tag) \ ((_mgr)->sha384((_src), (_length), (_tag))) +/** + * Authenticate 128-byte data buffer with SHA512. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src 128-byte data buffer + * @param[out] _tag Digest output (64 bytes) + */ #define IMB_SHA512_ONE_BLOCK(_mgr, _src, _tag) \ ((_mgr)->sha512_one_block((_src), (_tag))) +/** + * Authenticate variable sized data with SHA512. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src Data buffer + * @param[in] _length Length of data in bytes for authentication. + * @param[out] _tag Digest output (20 bytes) + */ #define IMB_SHA512(_mgr, _src, _length, _tag) \ ((_mgr)->sha512((_src), (_length), (_tag))) +/** + * Authenticate 64-byte data buffer with MD5. + * + * @param[in] _mgr Pointer to multi-buffer structure + * @param[in] _src 64-byte data buffer + * @param[out] _tag Digest output (16 bytes) + */ #define IMB_MD5_ONE_BLOCK(_mgr, _src, _tag) \ ((_mgr)->md5_one_block((_src), (_tag))) -/* AES-CFB API */ -#define IMB_AES128_CFB_ONE(_mgr, _dst, _src, _iv, _enc_exp_key, _len) \ - ((_mgr)->aes128_cfb_one((_dst), (_src), (_iv), (_enc_exp_key), (_len))) - +/** + * @brief AES-CFB-128 Encrypt/Decrypt up to one block. + * + * Processes only one buffer at a time. + * Designed to manage partial blocks of DOCSIS 3.1 SEC BPI. + * + * @param [in] _mgr Pointer to multi-buffer structure + * @param [out] _dst Plaintext/Ciphertext output + * @param [in] _src Plaintext/Ciphertext input + * @param [in] _iv Pointer to 16 byte IV + * @param [in] _exp_key Pointer to expanded AES keys + * @param [in] _len Length of data in bytes + */ +#define IMB_AES128_CFB_ONE(_mgr, _dst, _src, _iv, _exp_key, _len) \ + ((_mgr)->aes128_cfb_one((_dst), (_src), (_iv), (_exp_key), (_len))) + /* AES-GCM API's */ #define IMB_AES128_GCM_ENC(_mgr, _exp_key, _ctx, _dst, _src, _len, _iv, _aad, \ _aadl, _tag, _tagl) \ @@ -1777,8 +2307,8 @@ IMB_DLL_EXPORT void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch); * KASUMI F9 key schedule init function. * * @param[in] _mgr Pointer to multi-buffer structure - * @param[in] _exp_key Integrity key (expected in LE format) - * @param[out] _ctx Key schedule context to be initialised + * @param[in] _key Integrity key (expected in LE format) + * @param[out] _exp_key Key schedule context to be initialised * @return 0 on success, -1 on failure * ******************************************************************************/ @@ -2133,12 +2663,12 @@ IMB_DLL_EXPORT void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch); /* Auxiliary functions */ /** - * @brief DES key schedule set up + * @brief DES key schedule set up. * * \a ks buffer needs to accommodate \a DES_KEY_SCHED_SIZE (128) bytes of data. * - * @param ks destination buffer to accommodate DES key schedule - * @param key a pointer to an 8 byte DES key + * @param[out] ks Destination buffer to accommodate DES key schedule + * @param[in] key Pointer to an 8 byte DES key * * @return Operation status * @retval 0 success @@ -2147,181 +2677,516 @@ IMB_DLL_EXPORT void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch); IMB_DLL_EXPORT int des_key_schedule(uint64_t *ks, const void *key); -/* SSE */ +/** + * Authenticate variable sized data with SHA1. + * + * @param[in] data Data buffer + * @param[in] length Length of data in bytes for authentication. + * @param[out] digest Digest output (20 bytes) + */ IMB_DLL_EXPORT void sha1_sse(const void *data, const uint64_t length, void *digest); + +/** + * @copydoc sha1_sse + */ +IMB_DLL_EXPORT void sha1_avx(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha1_sse + */ +IMB_DLL_EXPORT void sha1_avx2(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha1_sse + */ +IMB_DLL_EXPORT void sha1_avx512(const void *data, const uint64_t length, + void *digest); + +/** + * Authenticate 64-byte data buffer with SHA1. + * + * @param[in] data 64-byte data buffer + * @param[out] digest Digest output (20 bytes) + */ IMB_DLL_EXPORT void sha1_one_block_sse(const void *data, void *digest); +/** + * @copydoc sha1_one_block_sse + */ +IMB_DLL_EXPORT void sha1_one_block_avx(const void *data, void *digest); +/** + * @copydoc sha1_one_block_sse + */ +IMB_DLL_EXPORT void sha1_one_block_avx2(const void *data, void *digest); +/** + * @copydoc sha1_one_block_sse + */ +IMB_DLL_EXPORT void sha1_one_block_avx512(const void *data, void *digest); + +/** + * Authenticate variable sized data with SHA224. + * + * @param[in] data Data buffer + * @param[in] length Length of data in bytes for authentication. + * @param[out] digest Digest output (28 bytes) + */ IMB_DLL_EXPORT void sha224_sse(const void *data, const uint64_t length, void *digest); +/** + * @copydoc sha224_sse + */ +IMB_DLL_EXPORT void sha224_avx(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha224_sse + */ +IMB_DLL_EXPORT void sha224_avx2(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha224_sse + */ +IMB_DLL_EXPORT void sha224_avx512(const void *data, const uint64_t length, + void *digest); + +/** + * Authenticate 64-byte data buffer with SHA224. + * + * @param[in] data 64-byte data buffer + * @param[out] digest Digest output (28 bytes) + */ IMB_DLL_EXPORT void sha224_one_block_sse(const void *data, void *digest); +/** + * @copydoc sha224_one_block_sse + */ +IMB_DLL_EXPORT void sha224_one_block_avx(const void *data, void *digest); +/** + * @copydoc sha224_one_block_sse + */ +IMB_DLL_EXPORT void sha224_one_block_avx2(const void *data, void *digest); +/** + * @copydoc sha224_one_block_sse + */ +IMB_DLL_EXPORT void sha224_one_block_avx512(const void *data, void *digest); + +/** + * Authenticate variable sized data with SHA256. + * + * @param[in] data Data buffer + * @param[in] length Length of data in bytes for authentication. + * @param[out] digest Digest output (32 bytes) + */ IMB_DLL_EXPORT void sha256_sse(const void *data, const uint64_t length, void *digest); +/** + * @copydoc sha256_sse + */ +IMB_DLL_EXPORT void sha256_avx(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha256_sse + */ +IMB_DLL_EXPORT void sha256_avx2(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha256_sse + */ +IMB_DLL_EXPORT void sha256_avx512(const void *data, const uint64_t length, + void *digest); + +/** + * Authenticate 64-byte data buffer with SHA256. + * + * @param[in] data 64-byte data buffer + * @param[out] digest Digest output (32 bytes) + */ IMB_DLL_EXPORT void sha256_one_block_sse(const void *data, void *digest); +/** + * @copydoc sha256_one_block_sse + */ +IMB_DLL_EXPORT void sha256_one_block_avx(const void *data, void *digest); +/** + * @copydoc sha256_one_block_sse + */ +IMB_DLL_EXPORT void sha256_one_block_avx2(const void *data, void *digest); +/** + * @copydoc sha256_one_block_sse + */ +IMB_DLL_EXPORT void sha256_one_block_avx512(const void *data, void *digest); + +/** + * Authenticate variable sized data with SHA384. + * + * @param[in] data Data buffer + * @param[in] length Length of data in bytes for authentication. + * @param[out] digest Digest output (48 bytes) + */ IMB_DLL_EXPORT void sha384_sse(const void *data, const uint64_t length, void *digest); +/** + * @copydoc sha384_sse + */ +IMB_DLL_EXPORT void sha384_avx(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha384_sse + */ +IMB_DLL_EXPORT void sha384_avx2(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha384_sse + */ +IMB_DLL_EXPORT void sha384_avx512(const void *data, const uint64_t length, + void *digest); + +/** + * Authenticate 128-byte data buffer with SHA384. + * + * @param[in] data 64-byte data buffer + * @param[out] digest Digest output (48 bytes) + */ IMB_DLL_EXPORT void sha384_one_block_sse(const void *data, void *digest); +/** + * @copydoc sha384_one_block_sse + */ +IMB_DLL_EXPORT void sha384_one_block_avx(const void *data, void *digest); +/** + * @copydoc sha384_one_block_sse + */ +IMB_DLL_EXPORT void sha384_one_block_avx2(const void *data, void *digest); +/** + * @copydoc sha384_one_block_sse + */ +IMB_DLL_EXPORT void sha384_one_block_avx512(const void *data, void *digest); + +/** + * Authenticate variable sized data with SHA512. + * + * @param[in] data Data buffer + * @param[in] length Length of data in bytes for authentication. + * @param[out] digest Digest output (64 bytes) + */ IMB_DLL_EXPORT void sha512_sse(const void *data, const uint64_t length, void *digest); +/** + * @copydoc sha512_sse + */ +IMB_DLL_EXPORT void sha512_avx(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha512_sse + */ +IMB_DLL_EXPORT void sha512_avx2(const void *data, const uint64_t length, + void *digest); +/** + * @copydoc sha512_sse + */ +IMB_DLL_EXPORT void sha512_avx512(const void *data, const uint64_t length, + void *digest); + +/** + * Authenticate 64-byte data buffer with SHA512. + * + * @param[in] data 128-byte data buffer + * @param[out] digest Digest output (64 bytes) + */ IMB_DLL_EXPORT void sha512_one_block_sse(const void *data, void *digest); +/** + * @copydoc sha512_one_block_sse + */ +IMB_DLL_EXPORT void sha512_one_block_avx(const void *data, void *digest); +/** + * @copydoc sha512_one_block_sse + */ +IMB_DLL_EXPORT void sha512_one_block_avx2(const void *data, void *digest); +/** + * @copydoc sha512_one_block_sse + */ +IMB_DLL_EXPORT void sha512_one_block_avx512(const void *data, void *digest); + +/** + * Authenticate 64-byte data buffer with MD5. + * + * @param[in] data 64-byte data buffer + * @param[out] digest Digest output (16 bytes) + */ IMB_DLL_EXPORT void md5_one_block_sse(const void *data, void *digest); +/** + * @copydoc md5_one_block_sse + */ +IMB_DLL_EXPORT void md5_one_block_avx(const void *data, void *digest); +/** + * @copydoc md5_one_block_sse + */ +IMB_DLL_EXPORT void md5_one_block_avx2(const void *data, void *digest); +/** + * @copydoc md5_one_block_sse + */ +IMB_DLL_EXPORT void md5_one_block_avx512(const void *data, void *digest); + + +/** + * Generate encryption/decryption AES-128 expansion keys. + * + * @param[in] key AES-128 key + * @param[out] enc_exp_keys AES-128 encryption expansion key + * @param[out] dec_exp_keys AES-128 decryption expansion key + */ IMB_DLL_EXPORT void aes_keyexp_128_sse(const void *key, void *enc_exp_keys, void *dec_exp_keys); +/** + * @copydoc aes_keyexp_128_sse + */ +IMB_DLL_EXPORT void aes_keyexp_128_avx(const void *key, void *enc_exp_keys, + void *dec_exp_keys); +/** + * @copydoc aes_keyexp_128_sse + */ +IMB_DLL_EXPORT void aes_keyexp_128_avx2(const void *key, void *enc_exp_keys, + void *dec_exp_keys); +/** + * @copydoc aes_keyexp_128_sse + */ +IMB_DLL_EXPORT void aes_keyexp_128_avx512(const void *key, void *enc_exp_keys, + void *dec_exp_keys); + +/** + * Generate encryption/decryption AES-192 expansion keys. + * + * @param[in] key AES-192 key + * @param[out] enc_exp_keys AES-192 encryption expansion key + * @param[out] dec_exp_keys AES-192 decryption expansion key + */ IMB_DLL_EXPORT void aes_keyexp_192_sse(const void *key, void *enc_exp_keys, void *dec_exp_keys); +/** + * @copydoc aes_keyexp_256_sse + */ +IMB_DLL_EXPORT void aes_keyexp_192_avx(const void *key, void *enc_exp_keys, + void *dec_exp_keys); +/** + * @copydoc aes_keyexp_256_sse + */ +IMB_DLL_EXPORT void aes_keyexp_192_avx2(const void *key, void *enc_exp_keys, + void *dec_exp_keys); +/** + * @copydoc aes_keyexp_256_sse + */ +IMB_DLL_EXPORT void aes_keyexp_192_avx512(const void *key, void *enc_exp_keys, + void *dec_exp_keys); + +/** + * Generate encryption/decryption AES-256 expansion keys. + * + * @param[in] key AES-256 key + * @param[out] enc_exp_keys AES-256 encryption expansion key + * @param[out] dec_exp_keys AES-256 decryption expansion key + */ IMB_DLL_EXPORT void aes_keyexp_256_sse(const void *key, void *enc_exp_keys, void *dec_exp_keys); -IMB_DLL_EXPORT void aes_xcbc_expand_key_sse(const void *key, void *k1_exp, - void *k2, void *k3); +/** + * @copydoc aes_keyexp_256_sse + */ +IMB_DLL_EXPORT void aes_keyexp_256_avx(const void *key, void *enc_exp_keys, + void *dec_exp_keys); +/** + * @copydoc aes_keyexp_256_sse + */ +IMB_DLL_EXPORT void aes_keyexp_256_avx2(const void *key, void *enc_exp_keys, + void *dec_exp_keys); +/** + * @copydoc aes_keyexp_256_sse + */ +IMB_DLL_EXPORT void aes_keyexp_256_avx512(const void *key, void *enc_exp_keys, + void *dec_exp_keys); + +/** + * Generate encryption AES-128 expansion keys. + * + * @param[in] key AES-128 key + * @param[out] enc_exp_keys AES-128 encryption expansion key + */ IMB_DLL_EXPORT void aes_keyexp_128_enc_sse(const void *key, void *enc_exp_keys); +/** + * @copydoc aes_keyexp_128_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_128_enc_avx(const void *key, + void *enc_exp_keys); +/** + * @copydoc aes_keyexp_128_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_128_enc_avx2(const void *key, + void *enc_exp_keys); +/** + * @copydoc aes_keyexp_128_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_128_enc_avx512(const void *key, + void *enc_exp_keys); + +/** + * Generate encryption AES-192 expansion keys. + * + * @param[in] key AES-192 key + * @param[out] enc_exp_keys AES-192 encryption expansion key + */ IMB_DLL_EXPORT void aes_keyexp_192_enc_sse(const void *key, void *enc_exp_keys); +/** + * @copydoc aes_keyexp_192_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_192_enc_avx(const void *key, + void *enc_exp_keys); +/** + * @copydoc aes_keyexp_192_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_192_enc_avx2(const void *key, + void *enc_exp_keys); +/** + * @copydoc aes_keyexp_192_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_192_enc_avx512(const void *key, + void *enc_exp_keys); + +/** + * Generate encryption AES-256 expansion keys. + * + * @param[in] key AES-256 key + * @param[out] enc_exp_keys AES-256 encryption expansion key + */ IMB_DLL_EXPORT void aes_keyexp_256_enc_sse(const void *key, void *enc_exp_keys); -IMB_DLL_EXPORT void aes_cmac_subkey_gen_sse(const void *key_exp, void *key1, - void *key2); -IMB_DLL_EXPORT void aes_cfb_128_one_sse(void *out, const void *in, - const void *iv, const void *keys, - uint64_t len); -/* AVX */ -IMB_DLL_EXPORT void sha1_avx(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha1_one_block_avx(const void *data, void *digest); -IMB_DLL_EXPORT void sha224_avx(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha224_one_block_avx(const void *data, void *digest); -IMB_DLL_EXPORT void sha256_avx(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha256_one_block_avx(const void *data, void *digest); -IMB_DLL_EXPORT void sha384_avx(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha384_one_block_avx(const void *data, void *digest); -IMB_DLL_EXPORT void sha512_avx(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha512_one_block_avx(const void *data, void *digest); -IMB_DLL_EXPORT void md5_one_block_avx(const void *data, void *digest); -IMB_DLL_EXPORT void aes_keyexp_128_avx(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_192_avx(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_256_avx(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_xcbc_expand_key_avx(const void *key, void *k1_exp, - void *k2, void *k3); -IMB_DLL_EXPORT void aes_keyexp_128_enc_avx(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_192_enc_avx(const void *key, - void *enc_exp_keys); +/** + * @copydoc aes_keyexp_256_enc_sse + */ IMB_DLL_EXPORT void aes_keyexp_256_enc_avx(const void *key, void *enc_exp_keys); +/** + * @copydoc aes_keyexp_256_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_256_enc_avx2(const void *key, + void *enc_exp_keys); +/** + * @copydoc aes_keyexp_256_enc_sse + */ +IMB_DLL_EXPORT void aes_keyexp_256_enc_avx512(const void *key, + void *enc_exp_keys); + +/** + * Generate AES-128-XCBC expansion keys. + * + * @param[in] key Input AES-128-XCBC key + * @param[out] k1_exp k1 expansion key + * @param[out] k2 k2 key + * @param[out] k3 k3 key + */ +IMB_DLL_EXPORT void aes_xcbc_expand_key_sse(const void *key, void *k1_exp, + void *k2, void *k3); +/** + * @copydoc aes_xcbc_expand_key_sse + */ +IMB_DLL_EXPORT void aes_xcbc_expand_key_avx(const void *key, void *k1_exp, + void *k2, void *k3); +/** + * @copydoc aes_xcbc_expand_key_sse + */ +IMB_DLL_EXPORT void aes_xcbc_expand_key_avx2(const void *key, void *k1_exp, + void *k2, void *k3); +/** + * @copydoc aes_xcbc_expand_key_sse + */ +IMB_DLL_EXPORT void aes_xcbc_expand_key_avx512(const void *key, void *k1_exp, + void *k2, void *k3); + +/** + * Generate AES-128-CMAC subkeys. + * + * @param[in] key_exp Input expanded AES-128-CMAC key + * @param[out] key1 Subkey 1 + * @param[out] key2 Subkey 2 + */ +IMB_DLL_EXPORT void aes_cmac_subkey_gen_sse(const void *key_exp, void *key1, + void *key2); +/** + * @copydoc aes_cmac_subkey_gen_sse + */ IMB_DLL_EXPORT void aes_cmac_subkey_gen_avx(const void *key_exp, void *key1, void *key2); +/** + * @copydoc aes_cmac_subkey_gen_sse + */ +IMB_DLL_EXPORT void aes_cmac_subkey_gen_avx2(const void *key_exp, void *key1, + void *key2); +/** + * @copydoc aes_cmac_subkey_gen_sse + */ +IMB_DLL_EXPORT void aes_cmac_subkey_gen_avx512(const void *key_exp, void *key1, + void *key2); +/** + * @brief AES-CFB-128 Encrypt/Decrypt up to one block. + * + * Processes only one buffer at a time. + * Designed to manage partial blocks of DOCSIS 3.1 SEC BPI. + * + * @param [out] out Plaintext/Ciphertext output + * @param [in] in Plaintext/Ciphertext input + * @param [in] iv Pointer to 16 byte IV + * @param [in] keys Pointer to expanded AES keys + * @param [in] len Length of data in bytes + */ +IMB_DLL_EXPORT void aes_cfb_128_one_sse(void *out, const void *in, + const void *iv, const void *keys, + uint64_t len); +/** + * @copydoc aes_cfb_128_one_sse + */ IMB_DLL_EXPORT void aes_cfb_128_one_avx(void *out, const void *in, const void *iv, const void *keys, uint64_t len); -/* AVX2 */ -IMB_DLL_EXPORT void sha1_avx2(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha1_one_block_avx2(const void *data, void *digest); -IMB_DLL_EXPORT void sha224_avx2(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha224_one_block_avx2(const void *data, void *digest); -IMB_DLL_EXPORT void sha256_avx2(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha256_one_block_avx2(const void *data, void *digest); -IMB_DLL_EXPORT void sha384_avx2(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha384_one_block_avx2(const void *data, void *digest); -IMB_DLL_EXPORT void sha512_avx2(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha512_one_block_avx2(const void *data, void *digest); -IMB_DLL_EXPORT void md5_one_block_avx2(const void *data, void *digest); -IMB_DLL_EXPORT void aes_keyexp_128_avx2(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_192_avx2(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_256_avx2(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_xcbc_expand_key_avx2(const void *key, void *k1_exp, - void *k2, void *k3); -IMB_DLL_EXPORT void aes_keyexp_128_enc_avx2(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_192_enc_avx2(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_256_enc_avx2(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_cmac_subkey_gen_avx2(const void *key_exp, void *key1, - void *key2); +/** + * @copydoc aes_cfb_128_one_sse + */ IMB_DLL_EXPORT void aes_cfb_128_one_avx2(void *out, const void *in, const void *iv, const void *keys, uint64_t len); - -/* AVX512 */ -IMB_DLL_EXPORT void sha1_avx512(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha1_one_block_avx512(const void *data, void *digest); -IMB_DLL_EXPORT void sha224_avx512(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha224_one_block_avx512(const void *data, void *digest); -IMB_DLL_EXPORT void sha256_avx512(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha256_one_block_avx512(const void *data, void *digest); -IMB_DLL_EXPORT void sha384_avx512(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha384_one_block_avx512(const void *data, void *digest); -IMB_DLL_EXPORT void sha512_avx512(const void *data, const uint64_t length, - void *digest); -IMB_DLL_EXPORT void sha512_one_block_avx512(const void *data, void *digest); -IMB_DLL_EXPORT void md5_one_block_avx512(const void *data, void *digest); -IMB_DLL_EXPORT void aes_keyexp_128_avx512(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_192_avx512(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_256_avx512(const void *key, void *enc_exp_keys, - void *dec_exp_keys); -IMB_DLL_EXPORT void aes_xcbc_expand_key_avx512(const void *key, void *k1_exp, - void *k2, void *k3); -IMB_DLL_EXPORT void aes_keyexp_128_enc_avx512(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_192_enc_avx512(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_keyexp_256_enc_avx512(const void *key, - void *enc_exp_keys); -IMB_DLL_EXPORT void aes_cmac_subkey_gen_avx512(const void *key_exp, void *key1, - void *key2); +/** + * @copydoc aes_cfb_128_one_sse + */ IMB_DLL_EXPORT void aes_cfb_128_one_avx512(void *out, const void *in, const void *iv, const void *keys, uint64_t len); -/** +/* * Direct GCM API. * Note that GCM is also available through job API. */ /** - * @brief GCM-AES Encryption + * @brief AES-GCM-128 Encryption. * - * @param key_data GCM expanded key data - * @param context_data GCM operation context data - * @param out Ciphertext output. Encrypt in-place is allowed. - * @param in Plaintext input. - * @param len Length of data in Bytes for encryption. - * @param iv pointer to 12 byte IV structure. Internally, library - * concates 0x00000001 value to it. - * @param aad Additional Authentication Data (AAD). - * @param aad_len Length of AAD. - * @param auth_tag Authenticated Tag output. - * @param auth_tag_len Authenticated Tag Length in bytes (must be - * a multiple of 4 bytes). Valid values are - * 16 (most likely), 12 or 8. + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Ciphertext output. Encrypt in-place is allowed + * @param [in] in Plaintext input + * @param [in] len Length of data in bytes for encryption + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authentication Data (AAD) + * @param [in] aad_len Length of AAD in bytes + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are 16 + * (most likely), 12 or 8 */ - IMB_DLL_EXPORT void aes_gcm_enc_128_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, uint8_t const *in, uint64_t len, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_128_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_128_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2329,6 +3194,9 @@ aes_gcm_enc_128_avx_gen2(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_128_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_128_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2336,13 +3204,33 @@ aes_gcm_enc_128_avx_gen4(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); - +/** + * @brief AES-GCM-192 Encryption. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Ciphertext output. Encrypt in-place is allowed + * @param [in] in Plaintext input + * @param [in] len Length of data in bytes for encryption + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authentication Data (AAD) + * @param [in] aad_len Length of AAD in bytes + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are 16 + * (most likely), 12 or 8 + */ IMB_DLL_EXPORT void aes_gcm_enc_192_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, uint8_t const *in, uint64_t len, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_192_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_192_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2350,6 +3238,9 @@ aes_gcm_enc_192_avx_gen2(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_192_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_192_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2358,6 +3249,24 @@ aes_gcm_enc_192_avx_gen4(const struct gcm_key_data *key_data, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief AES-GCM-256 Encryption. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Ciphertext output. Encrypt in-place is allowed + * @param [in] in Plaintext input + * @param [in] len Length of data in bytes for encryption + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authentication Data (AAD) + * @param [in] aad_len Length of AAD in bytes + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are 16 + * (most likely), 12 or 8 + */ IMB_DLL_EXPORT void aes_gcm_enc_256_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2365,6 +3274,9 @@ aes_gcm_enc_256_sse(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_256_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_256_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2372,6 +3284,9 @@ aes_gcm_enc_256_avx_gen2(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_256_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_256_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2381,21 +3296,22 @@ aes_gcm_enc_256_avx_gen4(const struct gcm_key_data *key_data, uint8_t *auth_tag, uint64_t auth_tag_len); /** - * @brief GCM-AES Decryption + * @brief AES-GCM-128 Decryption. * - * @param key_data GCM expanded keys data - * @param context_data GCM operation context data - * @param out Plaintext output. Decrypt in-place is allowed. - * @param in Ciphertext input. - * @param len Length of data in Bytes for decryption. - * @param iv pointer to 12 byte IV structure. Internally, library - * concates 0x00000001 value to it. - * @param aad Additional Authentication Data (AAD). - * @param aad_len Length of AAD. - * @param auth_tag Authenticated Tag output. - * @param auth_tag_len Authenticated Tag Length in bytes (must be - * a multiple of 4 bytes). Valid values are - * 16 (most likely), 12 or 8. + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Plaintext output. Decrypt in-place is allowed + * @param [in] in Ciphertext input + * @param [in] len Length of data in bytes for decryption + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authentication Data (AAD) + * @param [in] aad_len Length of AAD in bytes + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are 16 + * (most likely), 12 or 8 */ IMB_DLL_EXPORT void aes_gcm_dec_128_sse(const struct gcm_key_data *key_data, @@ -2403,6 +3319,9 @@ aes_gcm_dec_128_sse(const struct gcm_key_data *key_data, uint8_t *out, uint8_t const *in, uint64_t len, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_128_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_128_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2410,6 +3329,9 @@ aes_gcm_dec_128_avx_gen2(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_128_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_128_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2418,12 +3340,33 @@ aes_gcm_dec_128_avx_gen4(const struct gcm_key_data *key_data, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief AES-GCM-192 Decryption. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Plaintext output. Decrypt in-place is allowed + * @param [in] in Ciphertext input + * @param [in] len Length of data in bytes for decryption + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authentication Data (AAD) + * @param [in] aad_len Length of AAD in bytes + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are 16 + * (most likely), 12 or 8 + */ IMB_DLL_EXPORT void aes_gcm_dec_192_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, uint8_t const *in, uint64_t len, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_192_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_192_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2431,6 +3374,9 @@ aes_gcm_dec_192_avx_gen2(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_192_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_192_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2439,12 +3385,33 @@ aes_gcm_dec_192_avx_gen4(const struct gcm_key_data *key_data, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief AES-GCM-256 Decryption. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Plaintext output. Decrypt in-place is allowed + * @param [in] in Ciphertext input + * @param [in] len Length of data in bytes for decryption + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authentication Data (AAD) + * @param [in] aad_len Length of AAD in bytes + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are 16 + * (most likely), 12 or 8 + */ IMB_DLL_EXPORT void aes_gcm_dec_256_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, uint8_t const *in, uint64_t len, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_256_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_256_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2452,6 +3419,9 @@ aes_gcm_dec_256_avx_gen2(const struct gcm_key_data *key_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_256_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_256_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2461,55 +3431,96 @@ aes_gcm_dec_256_avx_gen4(const struct gcm_key_data *key_data, uint8_t *auth_tag, uint64_t auth_tag_len); /** - * @brief Start a AES-GCM Encryption message - * - * @param key_data GCM expanded key data - * @param context_data GCM operation context data - * @param iv pointer to 12 byte IV structure. Internally, library - * concates 0x00000001 value to it. - * @param aad Additional Authentication Data (AAD). - * @param aad_len Length of AAD. + * @brief Initialize a gcm_context_data structure to prepare for + * AES-GCM-128 Encryption. * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authenticated Data (AAD) + * @param [in] aad_len Length of AAD in bytes */ IMB_DLL_EXPORT void aes_gcm_init_128_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); +/** + * @copydoc aes_gcm_init_128_sse + */ IMB_DLL_EXPORT void aes_gcm_init_128_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); +/** + * @copydoc aes_gcm_init_128_sse + */ IMB_DLL_EXPORT void aes_gcm_init_128_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); - +/** + * @brief Initialize a gcm_context_data structure to prepare for + * AES-GCM-192 Encryption. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authenticated Data (AAD) + * @param [in] aad_len Length of AAD in bytes + */ IMB_DLL_EXPORT void aes_gcm_init_192_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); +/** + * @copydoc aes_gcm_init_192_sse + */ IMB_DLL_EXPORT void aes_gcm_init_192_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); +/** + * @copydoc aes_gcm_init_192_sse + */ IMB_DLL_EXPORT void aes_gcm_init_192_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); - +/** + * @brief Initialize a gcm_context_data structure to prepare for + * AES-GCM-256 Encryption. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [in] iv Pointer to 12 byte IV structure + * Internally, the library concatenates 0x00000001 + * to the IV + * @param [in] aad Additional Authenticated Data (AAD) + * @param [in] aad_len Length of AAD in bytes + */ IMB_DLL_EXPORT void aes_gcm_init_256_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); +/** + * @copydoc aes_gcm_init_256_sse + */ IMB_DLL_EXPORT void aes_gcm_init_256_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, const uint8_t *iv, uint8_t const *aad, uint64_t aad_len); +/** + * @copydoc aes_gcm_init_256_sse + */ IMB_DLL_EXPORT void aes_gcm_init_256_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, @@ -2517,333 +3528,568 @@ aes_gcm_init_256_avx_gen4(const struct gcm_key_data *key_data, uint8_t const *aad, uint64_t aad_len); /** - * @brief encrypt a block of a AES-GCM Encryption message + * @brief Encrypt a block of a AES-GCM-128 encryption message. * - * @param key_data GCM expanded key data - * @param context_data GCM operation context data - * @param out Ciphertext output. Encrypt in-place is allowed. - * @param in Plaintext input. - * @param len Length of data in Bytes for decryption. + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Ciphertext output. Encrypt in-place is allowed + * @param [in] in Plaintext input + * @param [in] len Length of data in bytes for encryption */ IMB_DLL_EXPORT void aes_gcm_enc_128_update_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_enc_128_update_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_128_update_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_enc_128_update_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_128_update_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @brief Encrypt a block of a AES-GCM-192 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Ciphertext output. Encrypt in-place is allowed + * @param [in] in Plaintext input + * @param [in] len Length of data in bytes for encryption + */ IMB_DLL_EXPORT void aes_gcm_enc_192_update_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_enc_192_update_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_192_update_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_enc_192_update_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_192_update_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @brief Encrypt a block of a AES-GCM-256 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Ciphertext output. Encrypt in-place is allowed + * @param [in] in Plaintext input + * @param [in] len Length of data in bytes for encryption + */ IMB_DLL_EXPORT void aes_gcm_enc_256_update_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_enc_256_update_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_256_update_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_enc_256_update_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_256_update_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); /** - * @brief decrypt a block of a AES-GCM Encryption message + * @brief Decrypt a block of a AES-GCM-128 encryption message. * - * @param key_data GCM expanded key data - * @param context_data GCM operation context data - * @param out Plaintext output. Decrypt in-place is allowed. - * @param in Ciphertext input. - * @param len Length of data in Bytes for decryption. + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Plaintext output. Decrypt in-place is allowed + * @param [in] in Ciphertext input + * @param [in] len Length of data in bytes for decryption */ IMB_DLL_EXPORT void aes_gcm_dec_128_update_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_dec_128_update_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_128_update_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_dec_128_update_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_128_update_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @brief Decrypt a block of a AES-GCM-192 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Plaintext output. Decrypt in-place is allowed + * @param [in] in Ciphertext input + * @param [in] len Length of data in bytes for decryption + */ IMB_DLL_EXPORT void aes_gcm_dec_192_update_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_dec_192_update_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_192_update_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_dec_192_update_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_192_update_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @brief Decrypt a block of a AES-GCM-256 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] out Plaintext output. Decrypt in-place is allowed + * @param [in] in Ciphertext input + * @param [in] len Length of data in bytes for decryption + */ IMB_DLL_EXPORT void aes_gcm_dec_256_update_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_dec_256_update_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_256_update_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); +/** + * @copydoc aes_gcm_dec_256_update_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_256_update_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *out, const uint8_t *in, uint64_t len); /** - * @brief End encryption of a AES-GCM Encryption message + * @brief End encryption of a AES-GCM-128 encryption message. * - * @param key_data GCM expanded key data - * @param context_data GCM operation context data - * @param auth_tag Authenticated Tag output. - * @param auth_tag_len Authenticated Tag Length in bytes (must be - * a multiple of 4 bytes). Valid values are - * 16 (most likely), 12 or 8. + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are + * 16 (most likely), 12 or 8. */ IMB_DLL_EXPORT void aes_gcm_enc_128_finalize_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_128_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_128_finalize_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_128_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_128_finalize_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief End encryption of a AES-GCM-192 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are + * 16 (most likely), 12 or 8. + */ IMB_DLL_EXPORT void aes_gcm_enc_192_finalize_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_192_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_192_finalize_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_192_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_192_finalize_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief End encryption of a AES-GCM-256 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are + * 16 (most likely), 12 or 8. + */ IMB_DLL_EXPORT void aes_gcm_enc_256_finalize_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_256_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_256_finalize_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_enc_256_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_enc_256_finalize_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); /** - * @brief End decryption of a AES-GCM Encryption message + * @brief End decryption of a AES-GCM-128 encryption message. * - * @param key_data GCM expanded key data - * @param context_data GCM operation context data - * @param auth_tag Authenticated Tag output. - * @param auth_tag_len Authenticated Tag Length in bytes (must be - * a multiple of 4 bytes). Valid values are - * 16 (most likely), 12 or 8. + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are + * 16 (most likely), 12 or 8. */ IMB_DLL_EXPORT void aes_gcm_dec_128_finalize_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_128_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_128_finalize_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_128_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_128_finalize_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief End decryption of a AES-GCM-192 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are + * 16 (most likely), 12 or 8. + */ IMB_DLL_EXPORT void aes_gcm_dec_192_finalize_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_192_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_192_finalize_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_192_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_192_finalize_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @brief End decryption of a AES-GCM-256 encryption message. + * + * @param [in] key_data GCM expanded key data + * @param [in,out] context_data GCM operation context data + * @param [out] auth_tag Authenticated Tag output + * @param [in] auth_tag_len Authenticated Tag Length in bytes (must be + * a multiple of 4 bytes). Valid values are + * 16 (most likely), 12 or 8. + */ IMB_DLL_EXPORT void aes_gcm_dec_256_finalize_sse(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_256_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_256_finalize_avx_gen2(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); +/** + * @copydoc aes_gcm_dec_256_finalize_sse + */ IMB_DLL_EXPORT void aes_gcm_dec_256_finalize_avx_gen4(const struct gcm_key_data *key_data, struct gcm_context_data *context_data, uint8_t *auth_tag, uint64_t auth_tag_len); /** - * @brief Precomputation of HashKey constants + * @brief Precomputation of AES-GCM-128 HashKey constants. * * Precomputation of HashKey<<1 mod poly constants (shifted_hkey_X and * shifted_hkey_X_k). * - * @param [in] key_data GCM key data + * @param [in,out] key_data GCM key data */ IMB_DLL_EXPORT void aes_gcm_precomp_128_sse(struct gcm_key_data *key_data); + +/** + * @copydoc aes_gcm_precomp_128_sse + */ IMB_DLL_EXPORT void aes_gcm_precomp_128_avx_gen2(struct gcm_key_data *key_data); + +/** + * @copydoc aes_gcm_precomp_128_sse + */ IMB_DLL_EXPORT void aes_gcm_precomp_128_avx_gen4(struct gcm_key_data *key_data); +/** + * @brief Precomputation of AES-GCM-192 HashKey constants. + * + * Precomputation of HashKey<<1 mod poly constants (shifted_hkey_X and + * shifted_hkey_X_k). + * + * @param [in,out] key_data GCM key data + */ IMB_DLL_EXPORT void aes_gcm_precomp_192_sse(struct gcm_key_data *key_data); + +/** + * @copydoc aes_gcm_precomp_192_sse + */ IMB_DLL_EXPORT void aes_gcm_precomp_192_avx_gen2(struct gcm_key_data *key_data); + +/** + * @copydoc aes_gcm_precomp_192_sse + */ IMB_DLL_EXPORT void aes_gcm_precomp_192_avx_gen4(struct gcm_key_data *key_data); +/** + * @brief Precomputation of AES-GCM-256 HashKey constants. + * + * Precomputation of HashKey<<1 mod poly constants (shifted_hkey_X and + * shifted_hkey_X_k). + * + * @param [in,out] key_data GCM key data + */ IMB_DLL_EXPORT void aes_gcm_precomp_256_sse(struct gcm_key_data *key_data); + +/** + * @copydoc aes_gcm_precomp_256_sse + */ IMB_DLL_EXPORT void aes_gcm_precomp_256_avx_gen2(struct gcm_key_data *key_data); + +/** + * @copydoc aes_gcm_precomp_256_sse + */ IMB_DLL_EXPORT void aes_gcm_precomp_256_avx_gen4(struct gcm_key_data *key_data); /** - * @brief Pre-processes GCM key data + * @brief Pre-processes AES-GCM-128 key data. * * Prefills the gcm key data with key values for each round and * the initial sub hash key for tag encoding * - * @param key pointer to key data - * @param key_data GCM expanded key data - * + * @param [in] key Pointer to key data + * @param [out] key_data GCM expanded key data */ IMB_DLL_EXPORT void aes_gcm_pre_128_sse(const void *key, struct gcm_key_data *key_data); +/** + * @copydoc aes_gcm_pre_128_sse + */ IMB_DLL_EXPORT void aes_gcm_pre_128_avx_gen2(const void *key, struct gcm_key_data *key_data); +/** + * @copydoc aes_gcm_pre_128_sse + */ IMB_DLL_EXPORT void aes_gcm_pre_128_avx_gen4(const void *key, struct gcm_key_data *key_data); +/** + * @brief Pre-processes AES-GCM-192 key data. + * + * Prefills the gcm key data with key values for each round and + * the initial sub hash key for tag encoding + * + * @param [in] key Pointer to key data + * @param [out] key_data GCM expanded key data + */ IMB_DLL_EXPORT void aes_gcm_pre_192_sse(const void *key, struct gcm_key_data *key_data); +/** + * @copydoc aes_gcm_pre_192_sse + */ IMB_DLL_EXPORT void aes_gcm_pre_192_avx_gen2(const void *key, struct gcm_key_data *key_data); +/** + * @copydoc aes_gcm_pre_192_sse + */ IMB_DLL_EXPORT void aes_gcm_pre_192_avx_gen4(const void *key, struct gcm_key_data *key_data); +/** + * @brief Pre-processes AES-GCM-256 key data. + * + * Prefills the gcm key data with key values for each round and + * the initial sub hash key for tag encoding + * + * @param [in] key Pointer to key data + * @param [out] key_data GCM expanded key data + */ IMB_DLL_EXPORT void aes_gcm_pre_256_sse(const void *key, struct gcm_key_data *key_data); +/** + * @copydoc aes_gcm_pre_256_sse + */ IMB_DLL_EXPORT void aes_gcm_pre_256_avx_gen2(const void *key, struct gcm_key_data *key_data); +/** + * @copydoc aes_gcm_pre_256_sse + */ IMB_DLL_EXPORT void aes_gcm_pre_256_avx_gen4(const void *key, struct gcm_key_data *key_data); /** - * @brief Generation of ZUC Initialization Vectors (for EEA3 and EIA3) + * @brief Generation of ZUC-EEA3 Initialization Vector. * - * @param [in] count COUNT (4 bytes in Little Endian) - * @param [in] bearer BEARER (5 bits) - * @param [in] dir DIRECTION (1 bit) + * @param [in] count COUNT (4 bytes in Little Endian) + * @param [in] bearer BEARER (5 bits) + * @param [in] dir DIRECTION (1 bit) * @param [out] iv_ptr Pointer to generated IV (16 bytes) * - * @return - * - 0 if success - * - 1 if one or more parameters are wrong + * @return Operation status + * @retval 0 success + * @retval -1 if one or more parameters are invalid */ IMB_DLL_EXPORT int zuc_eea3_iv_gen(const uint32_t count, const uint8_t bearer, const uint8_t dir, void *iv_ptr); +/** + * @brief Generation of ZUC-EIA3 Initialization Vector. + * + * @param [in] count COUNT (4 bytes in Little Endian) + * @param [in] bearer BEARER (5 bits) + * @param [in] dir DIRECTION (1 bit) + * @param [out] iv_ptr Pointer to generated IV (16 bytes) + * + * @return Operation status + * @retval 0 success + * @retval -1 if one or more parameters are invalid + */ IMB_DLL_EXPORT int zuc_eia3_iv_gen(const uint32_t count, const uint8_t bearer, const uint8_t dir, void *iv_ptr); /** - * @brief Generation of KASUMI F8 Initialization Vector + * @brief Generation of KASUMI F8 Initialization Vector. * - * @param [in] count COUNT (4 bytes in Little Endian) - * @param [in] bearer BEARER (5 bits) - * @param [in] dir DIRECTION (1 bit) + * @param [in] count COUNT (4 bytes in Little Endian) + * @param [in] bearer BEARER (5 bits) + * @param [in] dir DIRECTION (1 bit) * @param [out] iv_ptr Pointer to generated IV (16 bytes) * - * @return - * - 0 if success - * - 1 if one or more parameters are wrong + * @return Operation status + * @retval 0 success + * @retval -1 if one or more parameters are invalid */ IMB_DLL_EXPORT int kasumi_f8_iv_gen(const uint32_t count, const uint8_t bearer, const uint8_t dir, void *iv_ptr); /** - * @brief Generation of KASUMI F9 Initialization Vector + * @brief Generation of KASUMI F9 Initialization Vector. * - * @param [in] count COUNT (4 bytes in Little Endian) - * @param [in] fresh FRESH (4 bytes in Little Endian) + * @param [in] count COUNT (4 bytes in Little Endian) + * @param [in] fresh FRESH (4 bytes in Little Endian) * @param [out] iv_ptr Pointer to generated IV (16 bytes) * - * @return - * - 0 if success - * - 1 if one or more parameters are wrong + * @return Operation status + * @retval 0 success + * @retval -1 if one or more parameters are invalid */ IMB_DLL_EXPORT int kasumi_f9_iv_gen(const uint32_t count, const uint32_t fresh, void *iv_ptr); /** - * @brief Generation of SNOW3G F8 Initialization Vector + * @brief Generation of SNOW3G F8 Initialization Vector. * * Parameters are passed in Little Endian format and - * used to generate the IV in Big Endian format + * used to generate the IV in Big Endian format. * - * @param [in] count COUNT (4 bytes in Little Endian) - * @param [in] bearer BEARER (5 bits) - * @param [in] dir DIRECTION (1 bit) + * @param [in] count COUNT (4 bytes in Little Endian) + * @param [in] bearer BEARER (5 bits) + * @param [in] dir DIRECTION (1 bit) * @param [out] iv_ptr Pointer to generated IV (16 bytes) in Big Endian format * - * @return - * - 0 if success - * - 1 if one or more parameters are wrong + * @return Operation status + * @retval 0 success + * @retval -1 if one or more parameters are invalid */ IMB_DLL_EXPORT int snow3g_f8_iv_gen(const uint32_t count, const uint8_t bearer, const uint8_t dir, void *iv_ptr); /** - * @brief Generation of SNOW3G F9 Initialization Vector + * @brief Generation of SNOW3G F9 Initialization Vector. * * Parameters are passed in Little Endian format and - * used to generate the IV in Big Endian format + * used to generate the IV in Big Endian format. * - * @param [in] count COUNT (4 bytes in Little Endian) - * @param [in] fresh FRESH (4 bytes in Little Endian) - * @param [in] dir DIRECTION (1 bit) + * @param [in] count COUNT (4 bytes in Little Endian) + * @param [in] fresh FRESH (4 bytes in Little Endian) + * @param [in] dir DIRECTION (1 bit) * @param [out] iv_ptr Pointer to generated IV (16 bytes) in Big Endian format * - * @return - * - 0 if success - * - 1 if one or more parameters are wrong + * @return Operation status + * @retval 0 success + * @retval -1 if one or more parameters are invalid */ IMB_DLL_EXPORT int snow3g_f9_iv_gen(const uint32_t count, const uint32_t fresh, diff --git a/lib/libIPSec_MB.def b/lib/libIPSec_MB.def index 258cf8cf700b26b2dad6d5208c43ad894d1b2aac..903b3368826d3574ba38393a8a7a6c29627c6fe3 100644 --- a/lib/libIPSec_MB.def +++ b/lib/libIPSec_MB.def @@ -538,9 +538,143 @@ EXPORTS crc7_fp_header_sse_no_aesni @512 crc7_fp_header_avx @513 crc7_fp_header_avx512 @514 - imb_get_errno @515 - imb_get_strerror @516 - init_mb_mgr_auto @517 + imb_get_errno @515 + imb_get_strerror @516 + init_mb_mgr_auto @517 imb_get_mb_mgr_size @518 imb_set_pointers_mb_mgr @519 imb_get_feature_flags @520 + get_next_burst_sse_t1 @521 + get_next_burst_sse_t2 @522 + get_next_burst_sse_t3 @523 + get_next_burst_sse_no_aesni @524 + get_next_burst_avx_t1 @525 + get_next_burst_avx_t2 @526 + get_next_burst_avx2_t1 @527 + get_next_burst_avx2_t2 @528 + get_next_burst_avx512_t1 @529 + get_next_burst_avx512_t2 @530 + submit_burst_sse_t1 @531 + submit_burst_sse_t2 @532 + submit_burst_sse_t3 @533 + submit_burst_sse_no_aesni @534 + submit_burst_avx_t1 @535 + submit_burst_avx_t2 @536 + submit_burst_avx2_t1 @537 + submit_burst_avx2_t2 @538 + submit_burst_avx512_t1 @539 + submit_burst_avx512_t2 @540 + submit_burst_nocheck_sse_t1 @541 + submit_burst_nocheck_sse_t2 @542 + submit_burst_nocheck_sse_t3 @543 + submit_burst_nocheck_sse_no_aesni @544 + submit_burst_nocheck_avx_t1 @545 + submit_burst_nocheck_avx_t2 @546 + submit_burst_nocheck_avx2_t1 @547 + submit_burst_nocheck_avx2_t2 @548 + submit_burst_nocheck_avx512_t1 @549 + submit_burst_nocheck_avx512_t2 @550 + flush_burst_sse_t1 @551 + flush_burst_sse_t2 @552 + flush_burst_sse_t3 @553 + flush_burst_sse_no_aesni @554 + flush_burst_avx_t1 @555 + flush_burst_avx_t2 @556 + flush_burst_avx2_t1 @557 + flush_burst_avx2_t2 @558 + flush_burst_avx512_t1 @559 + flush_burst_avx512_t2 @560 + submit_cipher_burst_sse_t1 @561 + submit_cipher_burst_sse_t2 @562 + submit_cipher_burst_sse_t3 @563 + submit_cipher_burst_sse_no_aesni @564 + submit_cipher_burst_avx_t1 @565 + submit_cipher_burst_avx_t2 @566 + submit_cipher_burst_avx2_t1 @567 + submit_cipher_burst_avx2_t2 @568 + submit_cipher_burst_avx512_t1 @569 + submit_cipher_burst_avx512_t2 @570 + submit_cipher_burst_nocheck_sse_t1 @571 + submit_cipher_burst_nocheck_sse_t2 @572 + submit_cipher_burst_nocheck_sse_t3 @573 + submit_cipher_burst_nocheck_sse_no_aesni @574 + submit_cipher_burst_nocheck_avx_t1 @575 + submit_cipher_burst_nocheck_avx_t2 @576 + submit_cipher_burst_nocheck_avx2_t1 @577 + submit_cipher_burst_nocheck_avx2_t2 @578 + submit_cipher_burst_nocheck_avx512_t1 @579 + submit_cipher_burst_nocheck_avx512_t2 @580 + submit_hash_burst_sse_t1 @581 + submit_hash_burst_sse_t2 @582 + submit_hash_burst_sse_t3 @583 + submit_hash_burst_sse_no_aesni @584 + submit_hash_burst_avx_t1 @585 + submit_hash_burst_avx_t2 @586 + submit_hash_burst_avx2_t1 @587 + submit_hash_burst_avx2_t2 @588 + submit_hash_burst_avx512_t1 @589 + submit_hash_burst_avx512_t2 @590 + submit_hash_burst_nocheck_sse_t1 @591 + submit_hash_burst_nocheck_sse_t2 @592 + submit_hash_burst_nocheck_sse_t3 @593 + submit_hash_burst_nocheck_sse_no_aesni @594 + submit_hash_burst_nocheck_avx_t1 @595 + submit_hash_burst_nocheck_avx_t2 @596 + submit_hash_burst_nocheck_avx2_t1 @597 + submit_hash_burst_nocheck_avx2_t2 @598 + submit_hash_burst_nocheck_avx512_t1 @599 + submit_hash_burst_nocheck_avx512_t2 @600 + flush_job_sse_t1 @601 + flush_job_sse_t2 @602 + flush_job_sse_t3 @603 + flush_job_avx_t1 @604 + flush_job_avx_t2 @605 + flush_job_avx2_t1 @606 + flush_job_avx2_t2 @607 + flush_job_avx512_t1 @608 + flush_job_avx512_t2 @609 + queue_size_sse_t1 @610 + queue_size_sse_t2 @611 + queue_size_sse_t3 @612 + queue_size_avx_t1 @613 + queue_size_avx_t2 @614 + queue_size_avx2_t1 @615 + queue_size_avx2_t2 @616 + queue_size_avx512_t1 @617 + queue_size_avx512_t2 @618 + submit_job_sse_t1 @619 + submit_job_sse_t2 @620 + submit_job_sse_t3 @621 + submit_job_avx_t1 @622 + submit_job_avx_t2 @623 + submit_job_avx2_t1 @624 + submit_job_avx2_t2 @625 + submit_job_avx512_t1 @626 + submit_job_avx512_t2 @627 + submit_job_nocheck_sse_t1 @628 + submit_job_nocheck_sse_t2 @629 + submit_job_nocheck_sse_t3 @630 + submit_job_nocheck_avx_t1 @631 + submit_job_nocheck_avx_t2 @632 + submit_job_nocheck_avx2_t1 @633 + submit_job_nocheck_avx2_t2 @634 + submit_job_nocheck_avx512_t1 @635 + submit_job_nocheck_avx512_t2 @636 + get_next_job_sse_t1 @637 + get_next_job_sse_t2 @638 + get_next_job_sse_t3 @639 + get_next_job_avx_t1 @640 + get_next_job_avx_t2 @641 + get_next_job_avx2_t1 @642 + get_next_job_avx2_t2 @643 + get_next_job_avx512_t1 @644 + get_next_job_avx512_t2 @645 + get_completed_job_sse_t1 @646 + get_completed_job_sse_t2 @647 + get_completed_job_sse_t3 @648 + get_completed_job_avx_t1 @649 + get_completed_job_avx_t2 @650 + get_completed_job_avx2_t1 @651 + get_completed_job_avx2_t2 @652 + get_completed_job_avx512_t1 @653 + get_completed_job_avx512_t2 @654 \ No newline at end of file diff --git a/lib/no-aesni/aes128_cbc_dec_by4_sse_no_aesni.asm b/lib/no-aesni/aes128_cbc_dec_by4_sse_no_aesni.asm index 6e710bfef1f540d9a46cac899dad0cd7e429a130..7fef27fe1373ff517de35d2e5f36ef55dbef0765 100644 --- a/lib/no-aesni/aes128_cbc_dec_by4_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cbc_dec_by4_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CBC_DEC_128 aes_cbc_dec_128_sse_no_aesni -%include "sse/aes128_cbc_dec_by4_sse.asm" +%include "sse_t1/aes128_cbc_dec_by4_sse.asm" diff --git a/lib/no-aesni/aes128_cbc_enc_x4_sse_no_aesni.asm b/lib/no-aesni/aes128_cbc_enc_x4_sse_no_aesni.asm index c36f1b1bdbc6082d00e9564fb5b400e0aeda2428..c925419ac617026f57a2f28e8c2dfb4b89764477 100644 --- a/lib/no-aesni/aes128_cbc_enc_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cbc_enc_x4_sse_no_aesni.asm @@ -35,4 +35,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes128_cbc_mac_x4_sse_no_aesni.asm b/lib/no-aesni/aes128_cbc_mac_x4_sse_no_aesni.asm index df586a7bb86a1a7d14e720115f81ad93704dc415..999176ea6252541ea3f27a15b468949f7c065b77 100644 --- a/lib/no-aesni/aes128_cbc_mac_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cbc_mac_x4_sse_no_aesni.asm @@ -36,4 +36,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes128_cbcs_1_9_dec_by4_sse_no_aesni.asm b/lib/no-aesni/aes128_cbcs_1_9_dec_by4_sse_no_aesni.asm index 2a3286a8829658d71e9b7b4cbc96ca23d208fbe6..c30441adda3ad7277811e730fe0d9a01abdc1e24 100644 --- a/lib/no-aesni/aes128_cbcs_1_9_dec_by4_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cbcs_1_9_dec_by4_sse_no_aesni.asm @@ -32,4 +32,4 @@ %define CBCS %endif -%include "sse/aes128_cbc_dec_by4_sse.asm" +%include "sse_t1/aes128_cbc_dec_by4_sse.asm" diff --git a/lib/no-aesni/aes128_cbcs_1_9_enc_x4_sse_no_aesni.asm b/lib/no-aesni/aes128_cbcs_1_9_enc_x4_sse_no_aesni.asm index 91a001c14ec694e6a1d16ea907641e48aa61aeed..ce0f0cb11d634d10c563228a40200b2e92be1d36 100644 --- a/lib/no-aesni/aes128_cbcs_1_9_enc_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cbcs_1_9_enc_x4_sse_no_aesni.asm @@ -51,4 +51,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes128_cntr_by8_sse_no_aesni.asm b/lib/no-aesni/aes128_cntr_by8_sse_no_aesni.asm index 257ba64b76365c6842b82229f0ae2e01a28f3feb..5e001722cfefbd38edcd6b617bae3a65baf6a255 100644 --- a/lib/no-aesni/aes128_cntr_by8_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cntr_by8_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define AES_CNTR_128 aes_cntr_128_sse_no_aesni %define AES_CNTR_BIT_128 aes_cntr_bit_128_sse_no_aesni -%include "sse/aes128_cntr_by8_sse.asm" +%include "sse_t1/aes128_cntr_by8_sse.asm" diff --git a/lib/no-aesni/aes128_cntr_ccm_by8_sse_no_aesni.asm b/lib/no-aesni/aes128_cntr_ccm_by8_sse_no_aesni.asm index 4702c4bb592ca2a4e399458753e0e032abaf258f..e3e5d0f4989551c536c6c51ca3ba4da2b8c8d995 100644 --- a/lib/no-aesni/aes128_cntr_ccm_by8_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_cntr_ccm_by8_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CNTR_CCM_128 aes_cntr_ccm_128_sse_no_aesni -%include "sse/aes128_cntr_ccm_by8_sse.asm" +%include "sse_t1/aes128_cntr_ccm_by8_sse.asm" diff --git a/lib/no-aesni/aes128_xcbc_mac_x4_sse_no_aesni.asm b/lib/no-aesni/aes128_xcbc_mac_x4_sse_no_aesni.asm index 4c109a51f805551f2a892a338ed4205f96556def..20c4ff6c04a671605f04cd70f7c2d923fc7fd741 100644 --- a/lib/no-aesni/aes128_xcbc_mac_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes128_xcbc_mac_x4_sse_no_aesni.asm @@ -34,4 +34,4 @@ %define ARG_KEYS _aesxcbcarg_keys %define ARG_IV _aesxcbcarg_ICV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes192_cbc_dec_by4_sse_no_aesni.asm b/lib/no-aesni/aes192_cbc_dec_by4_sse_no_aesni.asm index 349e06bffc0e2ddf90801196abe12ba4a8941291..b4d9c7aed3538e24ae5eea468b52d38094126366 100644 --- a/lib/no-aesni/aes192_cbc_dec_by4_sse_no_aesni.asm +++ b/lib/no-aesni/aes192_cbc_dec_by4_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CBC_DEC_192 aes_cbc_dec_192_sse_no_aesni -%include "sse/aes192_cbc_dec_by4_sse.asm" +%include "sse_t1/aes192_cbc_dec_by4_sse.asm" diff --git a/lib/no-aesni/aes192_cbc_enc_x4_sse_no_aesni.asm b/lib/no-aesni/aes192_cbc_enc_x4_sse_no_aesni.asm index 0d74ea19cff9e7215dd616bf626132920fb5ff12..720ccb507515a457001b095ba16bd9c9a55da1d9 100644 --- a/lib/no-aesni/aes192_cbc_enc_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes192_cbc_enc_x4_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CBC_ENC_X4 aes_cbc_enc_192_x4_no_aesni -%include "sse/aes192_cbc_enc_x4_sse.asm" +%include "sse_t1/aes192_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes192_cntr_by8_sse_no_aesni.asm b/lib/no-aesni/aes192_cntr_by8_sse_no_aesni.asm index 6707acee5653ee8eb6ddbb9dc167ef2699a208b9..4f9cb9adb34df53ef3785dff8cbcdc97f8c7ad01 100644 --- a/lib/no-aesni/aes192_cntr_by8_sse_no_aesni.asm +++ b/lib/no-aesni/aes192_cntr_by8_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define AES_CNTR_192 aes_cntr_192_sse_no_aesni %define AES_CNTR_BIT_192 aes_cntr_bit_192_sse_no_aesni -%include "sse/aes192_cntr_by8_sse.asm" +%include "sse_t1/aes192_cntr_by8_sse.asm" diff --git a/lib/no-aesni/aes256_cbc_dec_by4_sse_no_aesni.asm b/lib/no-aesni/aes256_cbc_dec_by4_sse_no_aesni.asm index 57b7b5a3d37076c6935e36d7ee9787b4612b3dd3..a4f1d010295af8841e6ef49c1e5f0ae266f68492 100644 --- a/lib/no-aesni/aes256_cbc_dec_by4_sse_no_aesni.asm +++ b/lib/no-aesni/aes256_cbc_dec_by4_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CBC_DEC_256 aes_cbc_dec_256_sse_no_aesni -%include "sse/aes256_cbc_dec_by4_sse.asm" +%include "sse_t1/aes256_cbc_dec_by4_sse.asm" diff --git a/lib/no-aesni/aes256_cbc_enc_x4_sse_no_aesni.asm b/lib/no-aesni/aes256_cbc_enc_x4_sse_no_aesni.asm index cc6a9479d5dd2387e1674762006c58dc814cf50c..4c6ae009cba2417d87e66f270dd792352c6994f6 100644 --- a/lib/no-aesni/aes256_cbc_enc_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes256_cbc_enc_x4_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CBC_ENC_X4 aes_cbc_enc_256_x4_no_aesni -%include "sse/aes256_cbc_enc_x4_sse.asm" +%include "sse_t1/aes256_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes256_cbc_mac_x4_sse_no_aesni.asm b/lib/no-aesni/aes256_cbc_mac_x4_sse_no_aesni.asm index a7bf17ade0531c5aec521026ce8c64a17beb12cc..ee31c351202497111dbef0731542d2c5519995a6 100644 --- a/lib/no-aesni/aes256_cbc_mac_x4_sse_no_aesni.asm +++ b/lib/no-aesni/aes256_cbc_mac_x4_sse_no_aesni.asm @@ -30,4 +30,4 @@ %include "include/aesni_emu.inc" %define AES_CBC_ENC_X4 %define CBC_MAC -%include "sse/aes256_cbc_enc_x4_sse.asm" +%include "sse_t1/aes256_cbc_enc_x4_sse.asm" diff --git a/lib/no-aesni/aes256_cntr_by8_sse_no_aesni.asm b/lib/no-aesni/aes256_cntr_by8_sse_no_aesni.asm index 403b1b82c123799b6f392c3a296cfe30def1529d..00811ff12f21244b698278bbc227cbf2116ee1bd 100644 --- a/lib/no-aesni/aes256_cntr_by8_sse_no_aesni.asm +++ b/lib/no-aesni/aes256_cntr_by8_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define AES_CNTR_256 aes_cntr_256_sse_no_aesni %define AES_CNTR_BIT_256 aes_cntr_bit_256_sse_no_aesni -%include "sse/aes256_cntr_by8_sse.asm" +%include "sse_t1/aes256_cntr_by8_sse.asm" diff --git a/lib/no-aesni/aes256_cntr_ccm_by8_sse_no_aesni.asm b/lib/no-aesni/aes256_cntr_ccm_by8_sse_no_aesni.asm index 0d4f3ef8eac26b9ff95772ed3a4232793f346b46..8057968fe7b7dd56bacce40ad7de77b8360ac75e 100644 --- a/lib/no-aesni/aes256_cntr_ccm_by8_sse_no_aesni.asm +++ b/lib/no-aesni/aes256_cntr_ccm_by8_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define AES_CNTR_CCM_256 aes_cntr_ccm_256_sse_no_aesni -%include "sse/aes256_cntr_ccm_by8_sse.asm" +%include "sse_t1/aes256_cntr_ccm_by8_sse.asm" diff --git a/lib/no-aesni/aes_cfb_sse_no_aesni.asm b/lib/no-aesni/aes_cfb_sse_no_aesni.asm index 9444682eba4483ac32e87b78e4eb340e684b4cb6..8c033ce44a96c63cfcdc010eb7ca819743374373 100644 --- a/lib/no-aesni/aes_cfb_sse_no_aesni.asm +++ b/lib/no-aesni/aes_cfb_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define AES_CFB_128_ONE aes_cfb_128_one_sse_no_aesni %define AES_CFB_256_ONE aes_cfb_256_one_sse_no_aesni -%include "sse/aes_cfb_sse.asm" +%include "sse_t1/aes_cfb_sse.asm" diff --git a/lib/no-aesni/aes_ecb_by4_sse_no_aesni.asm b/lib/no-aesni/aes_ecb_by4_sse_no_aesni.asm index 63c6300faf1d5dd8053f471342679c151d63ed3b..f5072f67fc5eb6dcf74b20494b0defb1c053f95b 100644 --- a/lib/no-aesni/aes_ecb_by4_sse_no_aesni.asm +++ b/lib/no-aesni/aes_ecb_by4_sse_no_aesni.asm @@ -32,4 +32,4 @@ %define AES_ECB_DEC_128 aes_ecb_dec_128_sse_no_aesni %define AES_ECB_DEC_192 aes_ecb_dec_192_sse_no_aesni %define AES_ECB_DEC_256 aes_ecb_dec_256_sse_no_aesni -%include "sse/aes128_ecb_by4_sse.asm" +%include "sse_t1/aes128_ecb_by4_sse.asm" diff --git a/lib/no-aesni/crc16_x25_sse_no_aesni.asm b/lib/no-aesni/crc16_x25_sse_no_aesni.asm index 4fff62114103efa329e4b34290f0b40e731ece06..9c46b805608070de418e999a53469d59daa37cf8 100644 --- a/lib/no-aesni/crc16_x25_sse_no_aesni.asm +++ b/lib/no-aesni/crc16_x25_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define CRC16_X25_FN crc16_x25_sse_no_aesni %define CRC32_REFL_FN crc32_refl_by8_sse_no_aesni -%include "sse/crc16_x25_sse.asm" +%include "sse_t1/crc16_x25_sse.asm" diff --git a/lib/no-aesni/crc32_by8_sse_no_aesni.asm b/lib/no-aesni/crc32_by8_sse_no_aesni.asm index 9076e0f98d097d688dc944b16bc3016d006f59f2..141c626bbefa177b2bf5e8b52563641585f01361 100644 --- a/lib/no-aesni/crc32_by8_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_by8_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define CRC32_FN crc32_by8_sse_no_aesni -%include "sse/crc32_by8_sse.asm" +%include "sse_t1/crc32_by8_sse.asm" diff --git a/lib/no-aesni/crc32_fp_sse_no_aesni.asm b/lib/no-aesni/crc32_fp_sse_no_aesni.asm index 20b40ad0af851ad32e941105f1330988cb6ab5d4..4dccb4e9c94d0ecdc20e54f2aa6ea4b0688c2c95 100644 --- a/lib/no-aesni/crc32_fp_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_fp_sse_no_aesni.asm @@ -30,4 +30,4 @@ %define CRC11_FP_HEADER_FN crc11_fp_header_sse_no_aesni %define CRC7_FP_HEADER_FN crc7_fp_header_sse_no_aesni %define CRC32_FN crc32_by8_sse_no_aesni -%include "sse/crc32_fp_sse.asm" +%include "sse_t1/crc32_fp_sse.asm" diff --git a/lib/no-aesni/crc32_iuup_sse_no_aesni.asm b/lib/no-aesni/crc32_iuup_sse_no_aesni.asm index 9aa730477ca9325b4cd255230a97aa306709246b..663acc2b7f0067ead491117c5156efb9d29e3092 100644 --- a/lib/no-aesni/crc32_iuup_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_iuup_sse_no_aesni.asm @@ -29,4 +29,4 @@ %define CRC10_IUUP_DATA_FN crc10_iuup_data_sse_no_aesni %define CRC6_IUUP_HEADER_FN crc6_iuup_header_sse_no_aesni %define CRC32_FN crc32_by8_sse_no_aesni -%include "sse/crc32_iuup_sse.asm" +%include "sse_t1/crc32_iuup_sse.asm" diff --git a/lib/no-aesni/crc32_lte_sse_no_aesni.asm b/lib/no-aesni/crc32_lte_sse_no_aesni.asm index 8e7fac8c32900ff9ce05d3669fb8cfafe42d7d9d..9b21b49e0901ebc3184c8b30928b2e8bf3cb3069 100644 --- a/lib/no-aesni/crc32_lte_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_lte_sse_no_aesni.asm @@ -29,4 +29,4 @@ %define CRC32_LTE24A_FN crc24_lte_a_sse_no_aesni %define CRC32_LTE24B_FN crc24_lte_b_sse_no_aesni %define CRC32_FN crc32_by8_sse_no_aesni -%include "sse/crc32_lte_sse.asm" +%include "sse_t1/crc32_lte_sse.asm" diff --git a/lib/no-aesni/crc32_refl_by8_sse_no_aesni.asm b/lib/no-aesni/crc32_refl_by8_sse_no_aesni.asm index 1d7c3c6802048fd01f0ad4921f123ac2fb600e41..53027208dfbfb42238a89cb2748ce0c5fe8b6b0e 100644 --- a/lib/no-aesni/crc32_refl_by8_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_refl_by8_sse_no_aesni.asm @@ -27,4 +27,4 @@ %include "include/aesni_emu.inc" %define CRC32_REFL_FN crc32_refl_by8_sse_no_aesni -%include "sse/crc32_refl_by8_sse.asm" +%include "sse_t1/crc32_refl_by8_sse.asm" diff --git a/lib/no-aesni/crc32_sctp_sse_no_aesni.asm b/lib/no-aesni/crc32_sctp_sse_no_aesni.asm index 3b438b5516936affcb344459c85cad33dd5c5036..2f7d9b6b3f4573bc24926aa9811e43fc4e9e5593 100644 --- a/lib/no-aesni/crc32_sctp_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_sctp_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define CRC32_SCTP_FN crc32_sctp_sse_no_aesni %define CRC32_FN crc32_by8_sse_no_aesni -%include "sse/crc32_sctp_sse.asm" +%include "sse_t1/crc32_sctp_sse.asm" diff --git a/lib/no-aesni/crc32_wimax_sse_no_aesni.asm b/lib/no-aesni/crc32_wimax_sse_no_aesni.asm index 3c503913a337c6c91d195794b6c51bf9a7b3a479..0c6948296e881cee74666e5c947d292fa138d94c 100644 --- a/lib/no-aesni/crc32_wimax_sse_no_aesni.asm +++ b/lib/no-aesni/crc32_wimax_sse_no_aesni.asm @@ -29,4 +29,4 @@ %define CRC32_WIMAX_DATA_FN crc32_wimax_ofdma_data_sse_no_aesni %define CRC8_WIMAX_HCS_FN crc8_wimax_ofdma_hcs_sse_no_aesni %define CRC32_FN crc32_by8_sse_no_aesni -%include "sse/crc32_wimax_sse.asm" +%include "sse_t1/crc32_wimax_sse.asm" diff --git a/lib/no-aesni/ethernet_fcs_sse_no_aesni.asm b/lib/no-aesni/ethernet_fcs_sse_no_aesni.asm index 8e5bbcdbbd725b60678796c36b6302ff5f81c4a5..c0f9f0fcbdea421ef0ec691807ad599f787dcab1 100644 --- a/lib/no-aesni/ethernet_fcs_sse_no_aesni.asm +++ b/lib/no-aesni/ethernet_fcs_sse_no_aesni.asm @@ -29,4 +29,4 @@ %define ETHERNET_FCS_FN ethernet_fcs_sse_no_aesni %define ETHERNET_FCS_FN_LOCAL ethernet_fcs_sse_no_aesni_local %define CRC32_REFL_FN crc32_refl_by8_sse_no_aesni -%include "sse/ethernet_fcs_sse.asm" +%include "sse_t1/ethernet_fcs_sse.asm" diff --git a/lib/no-aesni/gcm128_api_sse_no_aesni.asm b/lib/no-aesni/gcm128_api_sse_no_aesni.asm index 4d15cc6a207eb32e95fa2c0fa64671cc93a040bc..e116bcf6c4671fc13acdb35f4c191b3f1eca4798 100644 --- a/lib/no-aesni/gcm128_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm128_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm128_api_by8_sse.asm" +%include "sse_t1/gcm128_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm128_gmac_api_sse_no_aesni.asm b/lib/no-aesni/gcm128_gmac_api_sse_no_aesni.asm index 4ad41f80d8f7b06c316b36b353bc52a7c23d6f96..8fa5c283519de20a6a1e783729c8cf68bd328a81 100644 --- a/lib/no-aesni/gcm128_gmac_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm128_gmac_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm128_gmac_api_by8_sse.asm" +%include "sse_t1/gcm128_gmac_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm128_sgl_api_sse_no_aesni.asm b/lib/no-aesni/gcm128_sgl_api_sse_no_aesni.asm index 40c336a2e2d0b7f2e236d18d7d4613a370c4fec6..a6750c2fb39d4539eb56cf511b864782ce7a8253 100644 --- a/lib/no-aesni/gcm128_sgl_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm128_sgl_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm128_sgl_api_by8_sse.asm" +%include "sse_t1/gcm128_sgl_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm192_api_sse_no_aesni.asm b/lib/no-aesni/gcm192_api_sse_no_aesni.asm index 0be37a4df3f854c8e619dc36cbfe4498b0703c21..efe16366de1e49c461c6f10d450a48281426a27c 100644 --- a/lib/no-aesni/gcm192_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm192_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm192_api_by8_sse.asm" +%include "sse_t1/gcm192_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm192_gmac_api_sse_no_aesni.asm b/lib/no-aesni/gcm192_gmac_api_sse_no_aesni.asm index e6cce7c7e68bda1fff9cc175a3e7119e8589bc89..8c6345beeed76c4283a73113a4e9cd875ebc5112 100644 --- a/lib/no-aesni/gcm192_gmac_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm192_gmac_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm192_gmac_api_by8_sse.asm" +%include "sse_t1/gcm192_gmac_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm192_sgl_api_sse_no_aesni.asm b/lib/no-aesni/gcm192_sgl_api_sse_no_aesni.asm index d1bf7f07ef6a3f6dba381a6cfec5c7ece0505461..7a0a962ecc4491a6e9f5fbf626160c1378d3d1e0 100644 --- a/lib/no-aesni/gcm192_sgl_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm192_sgl_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm192_sgl_api_by8_sse.asm" +%include "sse_t1/gcm192_sgl_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm256_api_sse_no_aesni.asm b/lib/no-aesni/gcm256_api_sse_no_aesni.asm index 7aeae36931d94262f272ec95abed199cc6a40010..320ec91e99e13406ee75232963d4e7b5e4e9f666 100644 --- a/lib/no-aesni/gcm256_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm256_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm256_api_by8_sse.asm" +%include "sse_t1/gcm256_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm256_gmac_api_sse_no_aesni.asm b/lib/no-aesni/gcm256_gmac_api_sse_no_aesni.asm index a484dfd44082b237922133dc453a369f8f790ad4..2507431982774c2903ace466720d988c6a447baa 100644 --- a/lib/no-aesni/gcm256_gmac_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm256_gmac_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm256_gmac_api_by8_sse.asm" +%include "sse_t1/gcm256_gmac_api_by8_sse.asm" diff --git a/lib/no-aesni/gcm256_sgl_api_sse_no_aesni.asm b/lib/no-aesni/gcm256_sgl_api_sse_no_aesni.asm index 6d9cac6e942c517fa6b60d54b13abdb7e9f7a143..713924e701782435c13c5109c893bdc09b942550 100644 --- a/lib/no-aesni/gcm256_sgl_api_sse_no_aesni.asm +++ b/lib/no-aesni/gcm256_sgl_api_sse_no_aesni.asm @@ -29,4 +29,4 @@ %include "include/aesni_emu.inc" %define NO_AESNI -%include "sse/gcm256_sgl_api_by8_sse.asm" +%include "sse_t1/gcm256_sgl_api_by8_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_cbc_enc_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_cbc_enc_flush_sse_no_aesni.asm index 7e7bc17b79e1f670f902b17c112701f2229366d6..f0cc5e804a91ecf807be299d9f342b28982c3616 100644 --- a/lib/no-aesni/mb_mgr_aes128_cbc_enc_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_cbc_enc_flush_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_128_x4_no_aesni %define FLUSH_JOB_AES_ENC flush_job_aes128_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_cbc_enc_submit_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_cbc_enc_submit_sse_no_aesni.asm index 2e53cf8ed226b8f38cba93b163f3d36f16375579..76332f3788d7f904d81bb09bf46226eeea3086c7 100644 --- a/lib/no-aesni/mb_mgr_aes128_cbc_enc_submit_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_cbc_enc_submit_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_128_x4_no_aesni %define SUBMIT_JOB_AES_ENC submit_job_aes128_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_flush_sse_no_aesni.asm index fe489abba2b9796ceb617b94b0dd08a190cc9aa6..a6bca7a0385cb8f583dcec8b2ab2ec50bcf583dd 100644 --- a/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_flush_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBCS_ENC_X4 aes_cbcs_1_9_enc_128_x4_no_aesni %define FLUSH_JOB_AES_CBCS_ENC flush_job_aes128_cbcs_1_9_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbcs_1_9_flush_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbcs_1_9_flush_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_submit_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_submit_sse_no_aesni.asm index 341c0bfceb4c0d686a533f192431378fb0ccb49f..1793d305e0546f19b383631a68ed1ca0fd5573f7 100644 --- a/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_submit_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_cbcs_1_9_submit_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBCS_ENC_X4 aes_cbcs_1_9_enc_128_x4_no_aesni %define SUBMIT_JOB_AES_CBCS_ENC submit_job_aes128_cbcs_1_9_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbcs_1_9_submit_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbcs_1_9_submit_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_ccm_auth_submit_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_ccm_auth_submit_flush_sse_no_aesni.asm index a12ce6b501ea13cd7aa2ad7b2c0b6efc616c9658..e88628397f31d2eb1d293b8670b0e93eeb911c9d 100644 --- a/lib/no-aesni/mb_mgr_aes128_ccm_auth_submit_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_ccm_auth_submit_flush_sse_no_aesni.asm @@ -29,4 +29,4 @@ %define AES_CBC_MAC aes128_cbc_mac_x4_no_aesni %define SUBMIT_JOB_AES_CCM_AUTH submit_job_aes128_ccm_auth_sse_no_aesni %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes128_ccm_auth_sse_no_aesni -%include "sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_cmac_submit_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_cmac_submit_flush_sse_no_aesni.asm index 419fa21b3fef97d4172c949894349a27045a547b..dfd49155bfd851d5b528d8c901ef8022b99aac7f 100644 --- a/lib/no-aesni/mb_mgr_aes128_cmac_submit_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_cmac_submit_flush_sse_no_aesni.asm @@ -28,4 +28,4 @@ %define AES_CBC_MAC aes128_cbc_mac_x4_no_aesni %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes128_cmac_auth_sse_no_aesni %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes128_cmac_auth_sse_no_aesni -%include "sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_xcbc_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_xcbc_flush_sse_no_aesni.asm index 92db2710820ea4ce227120921b1b25b2e80b804e..116f0ab154ab49ed8e22df06095a7374d9407a20 100644 --- a/lib/no-aesni/mb_mgr_aes128_xcbc_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_xcbc_flush_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_XCBC_X4 aes_xcbc_mac_128_x4_no_aesni %define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_sse_no_aesni -%include "sse/mb_mgr_aes128_xcbc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_xcbc_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes128_xcbc_submit_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes128_xcbc_submit_sse_no_aesni.asm index 4da294b778ee280cb96dadc4ce4b58ff9844ccd9..20b036ea2dcab9965e13dac38d6d3033df7a184d 100644 --- a/lib/no-aesni/mb_mgr_aes128_xcbc_submit_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes128_xcbc_submit_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_XCBC_X4 aes_xcbc_mac_128_x4_no_aesni %define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_sse_no_aesni -%include "sse/mb_mgr_aes128_xcbc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_xcbc_submit_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes192_cbc_enc_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes192_cbc_enc_flush_sse_no_aesni.asm index 4a84bef5a912eba6a0998d55ae2998c551eb1759..e700ca6225a6acd412985735e9206d0179e20f7b 100644 --- a/lib/no-aesni/mb_mgr_aes192_cbc_enc_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes192_cbc_enc_flush_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_192_x4_no_aesni %define FLUSH_JOB_AES_ENC flush_job_aes192_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes192_cbc_enc_submit_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes192_cbc_enc_submit_sse_no_aesni.asm index cf864cc2ef0ea25f12572fe387591ebfc73dc522..50c86fe776fe4fea31edb1ed49e4fccedaed9017 100644 --- a/lib/no-aesni/mb_mgr_aes192_cbc_enc_submit_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes192_cbc_enc_submit_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_192_x4_no_aesni %define SUBMIT_JOB_AES_ENC submit_job_aes192_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes256_cbc_enc_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes256_cbc_enc_flush_sse_no_aesni.asm index c784aca309fa3bbb5c8e4b7f56f9950be327b813..4256da5f7b5232927b300933b3af9cebf7040046 100644 --- a/lib/no-aesni/mb_mgr_aes256_cbc_enc_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes256_cbc_enc_flush_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_256_x4_no_aesni %define FLUSH_JOB_AES_ENC flush_job_aes256_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes256_cbc_enc_submit_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes256_cbc_enc_submit_sse_no_aesni.asm index aee87cb152f291d890e80ae33a4c226c95799b50..46de48dad4a2260eaf854613c9ef82832e1c9ec2 100644 --- a/lib/no-aesni/mb_mgr_aes256_cbc_enc_submit_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes256_cbc_enc_submit_sse_no_aesni.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_256_x4_no_aesni %define SUBMIT_JOB_AES_ENC submit_job_aes256_enc_sse_no_aesni -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes256_ccm_auth_submit_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes256_ccm_auth_submit_flush_sse_no_aesni.asm index 52e3fe8530fc7350e8910db6c040229943e5751a..02af37da7e59956f9526fd0475d185a744636f3c 100644 --- a/lib/no-aesni/mb_mgr_aes256_ccm_auth_submit_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes256_ccm_auth_submit_flush_sse_no_aesni.asm @@ -30,4 +30,4 @@ %define AES_CBC_MAC aes256_cbc_mac_x4_no_aesni %define SUBMIT_JOB_AES_CCM_AUTH submit_job_aes256_ccm_auth_sse_no_aesni %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes256_ccm_auth_sse_no_aesni -%include "sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_aes256_cmac_submit_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_aes256_cmac_submit_flush_sse_no_aesni.asm index 0aa7fedd67a8539ce660f12ab5bdeb2bf5aa220e..7d543d0ea3b216b1c5b86844e458caacaaa5f923 100644 --- a/lib/no-aesni/mb_mgr_aes256_cmac_submit_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_aes256_cmac_submit_flush_sse_no_aesni.asm @@ -28,4 +28,4 @@ %define AES_CBC_MAC aes256_cbc_mac_x4_no_aesni %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes256_cmac_auth_sse_no_aesni %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes256_cmac_auth_sse_no_aesni -%include "sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" diff --git a/lib/no-aesni/mb_mgr_sse_no_aesni.c b/lib/no-aesni/mb_mgr_sse_no_aesni.c index b84eb36599d293969ea6625c95b4acbbd5c2794f..0b3a15865d1c90482c05c0951260dae9c1a0a877 100644 --- a/lib/no-aesni/mb_mgr_sse_no_aesni.c +++ b/lib/no-aesni/mb_mgr_sse_no_aesni.c @@ -29,130 +29,40 @@ #include #include -#define CLEAR_SCRATCH_SIMD_REGS clear_scratch_xmms_sse +#define SSE_AESNI_EMU #include "ipsec-mb.h" #include "include/ipsec_ooo_mgr.h" -#include "include/kasumi_internal.h" +#include "include/kasumi_interface.h" #include "include/zuc_internal.h" #include "include/snow3g.h" #include "include/chacha20_poly1305.h" #include "include/save_xmms.h" -#include "include/asm.h" #include "include/des.h" #include "include/gcm.h" #include "include/noaesni.h" #include "include/error.h" +#include "include/arch_x86_64.h" /* self-test */ +#include "include/arch_noaesni.h" +#include "include/arch_sse_type1.h" -/* ====================================================================== */ - -IMB_JOB *submit_job_aes128_enc_sse_no_aesni(MB_MGR_AES_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes128_enc_sse_no_aesni(MB_MGR_AES_OOO *state); - -IMB_JOB *submit_job_aes192_enc_sse_no_aesni(MB_MGR_AES_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes192_enc_sse_no_aesni(MB_MGR_AES_OOO *state); - -IMB_JOB *submit_job_aes256_enc_sse_no_aesni(MB_MGR_AES_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes256_enc_sse_no_aesni(MB_MGR_AES_OOO *state); - -IMB_JOB *submit_job_hmac_sse(MB_MGR_HMAC_SHA_1_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_hmac_sse(MB_MGR_HMAC_SHA_1_OOO *state); - -IMB_JOB *submit_job_hmac_sha_224_sse(MB_MGR_HMAC_SHA_256_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_hmac_sha_224_sse(MB_MGR_HMAC_SHA_256_OOO *state); - -IMB_JOB *submit_job_hmac_sha_256_sse(MB_MGR_HMAC_SHA_256_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_hmac_sha_256_sse(MB_MGR_HMAC_SHA_256_OOO *state); - -IMB_JOB *submit_job_hmac_sha_384_sse(MB_MGR_HMAC_SHA_512_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_hmac_sha_384_sse(MB_MGR_HMAC_SHA_512_OOO *state); - -IMB_JOB *submit_job_hmac_sha_512_sse(MB_MGR_HMAC_SHA_512_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_hmac_sha_512_sse(MB_MGR_HMAC_SHA_512_OOO *state); - -IMB_JOB *submit_job_hmac_md5_sse(MB_MGR_HMAC_MD5_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_hmac_md5_sse(MB_MGR_HMAC_MD5_OOO *state); - -IMB_JOB *submit_job_aes_xcbc_sse_no_aesni(MB_MGR_AES_XCBC_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes_xcbc_sse_no_aesni(MB_MGR_AES_XCBC_OOO *state); - -IMB_JOB *submit_job_aes128_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes128_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state); - -IMB_JOB *submit_job_aes256_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes256_cmac_auth_sse_no_aesni(MB_MGR_CMAC_OOO *state); - -IMB_JOB *submit_job_aes128_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state, - IMB_JOB *job); - -IMB_JOB *flush_job_aes128_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state); - -IMB_JOB *submit_job_aes256_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state, - IMB_JOB *job); - -IMB_JOB *flush_job_aes256_ccm_auth_sse_no_aesni(MB_MGR_CCM_OOO *state); - -IMB_JOB *submit_job_aes_cntr_sse_no_aesni(IMB_JOB *job); - -IMB_JOB *submit_job_aes_cntr_bit_sse_no_aesni(IMB_JOB *job); +#include "include/ooo_mgr_reset.h" -IMB_JOB *submit_job_zuc_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_zuc_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state); - -IMB_JOB *submit_job_zuc256_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_zuc256_eea3_sse_no_aesni(MB_MGR_ZUC_OOO *state); - -IMB_JOB *submit_job_zuc_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_zuc_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state); - -IMB_JOB *submit_job_zuc256_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state, - IMB_JOB *job, - const uint64_t tag_sz); -IMB_JOB *flush_job_zuc256_eia3_sse_no_aesni(MB_MGR_ZUC_OOO *state, - const uint64_t tag_sz); - -uint32_t hec_32_sse_no_aesni(const uint8_t *in); -uint64_t hec_64_sse_no_aesni(const uint8_t *in); - -IMB_JOB *submit_job_aes128_cbcs_1_9_enc_sse_no_aesni(MB_MGR_AES_OOO *state, - IMB_JOB *job); -IMB_JOB *flush_job_aes128_cbcs_1_9_enc_sse_no_aesni(MB_MGR_AES_OOO *state); - -IMB_JOB *submit_job_chacha20_enc_dec_sse(IMB_JOB *job); - -void *poly1305_mac_scalar(IMB_JOB *job); - -IMB_JOB *snow_v_sse_no_aesni(IMB_JOB *job); -IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); +/* ====================================================================== */ #define SAVE_XMMS save_xmms #define RESTORE_XMMS restore_xmms -#define SUBMIT_JOB_AES128_ENC submit_job_aes128_enc_sse_no_aesni -#define SUBMIT_JOB_AES128_DEC submit_job_aes128_dec_sse_no_aesni -#define FLUSH_JOB_AES128_ENC flush_job_aes128_enc_sse_no_aesni -#define SUBMIT_JOB_AES192_ENC submit_job_aes192_enc_sse_no_aesni -#define SUBMIT_JOB_AES192_DEC submit_job_aes192_dec_sse_no_aesni -#define FLUSH_JOB_AES192_ENC flush_job_aes192_enc_sse_no_aesni -#define SUBMIT_JOB_AES256_ENC submit_job_aes256_enc_sse_no_aesni -#define SUBMIT_JOB_AES256_DEC submit_job_aes256_dec_sse_no_aesni -#define FLUSH_JOB_AES256_ENC flush_job_aes256_enc_sse_no_aesni +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_sse_no_aesni +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_sse_no_aesni +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_sse_no_aesni +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_sse_no_aesni +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_sse_no_aesni +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_sse_no_aesni +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_sse_no_aesni +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_sse_no_aesni +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_sse_no_aesni #define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_sse_no_aesni #define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_sse_no_aesni #define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_sse_no_aesni @@ -179,9 +89,22 @@ IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); #define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_sse #define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_sse_no_aesni #define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_sse_no_aesni - -#define SUBMIT_JOB_AES_CNTR submit_job_aes_cntr_sse_no_aesni -#define SUBMIT_JOB_AES_CNTR_BIT submit_job_aes_cntr_bit_sse_no_aesni +#define SUBMIT_JOB_SHA1 submit_job_sha1_sse +#define FLUSH_JOB_SHA1 flush_job_sha1_sse +#define SUBMIT_JOB_SHA224 submit_job_sha224_sse +#define FLUSH_JOB_SHA224 flush_job_sha224_sse +#define SUBMIT_JOB_SHA256 submit_job_sha256_sse +#define FLUSH_JOB_SHA256 flush_job_sha256_sse +#define SUBMIT_JOB_SHA1_NI submit_job_sha1_ni_sse +#define FLUSH_JOB_SHA1_NI flush_job_sha1_ni_sse +#define SUBMIT_JOB_SHA224_NI submit_job_sha224_ni_sse +#define FLUSH_JOB_SHA224_NI flush_job_sha224_ni_sse +#define SUBMIT_JOB_SHA256_NI submit_job_sha256_ni_sse +#define FLUSH_JOB_SHA256_NI flush_job_sha256_ni_sse +#define SUBMIT_JOB_SHA384 submit_job_sha384_sse +#define FLUSH_JOB_SHA384 flush_job_sha384_sse +#define SUBMIT_JOB_SHA512 submit_job_sha512_sse +#define FLUSH_JOB_SHA512 flush_job_sha512_sse #define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_sse_no_aesni #define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_sse_no_aesni @@ -196,9 +119,12 @@ IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); #define AES_CBC_DEC_192 aes_cbc_dec_192_sse_no_aesni #define AES_CBC_DEC_256 aes_cbc_dec_256_sse_no_aesni -#define AES_CNTR_128 aes_cntr_128_sse_no_aesni -#define AES_CNTR_192 aes_cntr_192_sse_no_aesni -#define AES_CNTR_256 aes_cntr_256_sse_no_aesni +#define AES_CTR_128 aes_cntr_128_sse_no_aesni +#define AES_CTR_192 aes_cntr_192_sse_no_aesni +#define AES_CTR_256 aes_cntr_256_sse_no_aesni +#define AES_CTR_128_BIT aes_cntr_bit_128_sse_no_aesni +#define AES_CTR_192_BIT aes_cntr_bit_192_sse_no_aesni +#define AES_CTR_256_BIT aes_cntr_bit_256_sse_no_aesni #define AES_CNTR_CCM_128 aes_cntr_ccm_128_sse_no_aesni #define AES_CNTR_CCM_256 aes_cntr_ccm_256_sse_no_aesni @@ -215,13 +141,6 @@ IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); #define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_sse_no_aesni #define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_sse_no_aesni -#define AES_GCM_DEC_128 aes_gcm_dec_128_sse_no_aesni -#define AES_GCM_ENC_128 aes_gcm_enc_128_sse_no_aesni -#define AES_GCM_DEC_192 aes_gcm_dec_192_sse_no_aesni -#define AES_GCM_ENC_192 aes_gcm_enc_192_sse_no_aesni -#define AES_GCM_DEC_256 aes_gcm_dec_256_sse_no_aesni -#define AES_GCM_ENC_256 aes_gcm_enc_256_sse_no_aesni - #define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_sse_no_aesni #define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_sse_no_aesni #define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_sse_no_aesni @@ -239,6 +158,14 @@ IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); #define SUBMIT_JOB_NOCHECK submit_job_nocheck_sse_no_aesni #define GET_NEXT_JOB get_next_job_sse_no_aesni #define GET_COMPLETED_JOB get_completed_job_sse_no_aesni +#define GET_NEXT_BURST get_next_burst_sse_no_aesni +#define SUBMIT_BURST submit_burst_sse_no_aesni +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_sse_no_aesni +#define FLUSH_BURST flush_burst_sse_no_aesni +#define SUBMIT_CIPHER_BURST submit_cipher_burst_sse_no_aesni +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_sse_no_aesni +#define SUBMIT_HASH_BURST submit_hash_burst_sse_no_aesni +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_sse_no_aesni #define SUBMIT_JOB_AES128_DEC submit_job_aes128_dec_sse_no_aesni #define SUBMIT_JOB_AES192_DEC submit_job_aes192_dec_sse_no_aesni @@ -247,9 +174,9 @@ IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); /* ====================================================================== */ -#define SUBMIT_JOB_AES_ENC SUBMIT_JOB_AES_ENC_SSE -#define FLUSH_JOB_AES_ENC FLUSH_JOB_AES_ENC_SSE -#define SUBMIT_JOB_AES_DEC SUBMIT_JOB_AES_DEC_SSE +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_SSE +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_SSE +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_SSE #define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_SSE #define FLUSH_JOB_HASH FLUSH_JOB_HASH_SSE @@ -258,7 +185,6 @@ IMB_JOB *snow_v_aead_init_sse_no_aesni(IMB_JOB *job); #define AES_CFB_128_ONE aes_cfb_128_one_sse_no_aesni #define AES_CFB_256_ONE aes_cfb_256_one_sse_no_aesni -void aes128_cbc_mac_x4_no_aesni(AES_ARGS *args, uint64_t len); #define AES128_CBC_MAC aes128_cbc_mac_x4_no_aesni @@ -294,573 +220,86 @@ void aes128_cbc_mac_x4_no_aesni(AES_ARGS *args, uint64_t len); /* ====================================================================== */ -uint32_t -ethernet_fcs_sse_no_aesni_local(const void *msg, const uint64_t len, - const void *tag_ouput); - #define ETHERNET_FCS ethernet_fcs_sse_no_aesni_local -uint32_t ethernet_fcs_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc16_x25_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc32_sctp_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc24_lte_a_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc24_lte_b_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc16_fp_data_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc11_fp_header_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc7_fp_header_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc10_iuup_data_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc6_iuup_header_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t -crc32_wimax_ofdma_data_sse_no_aesni(const void *msg, const uint64_t len); -uint32_t crc8_wimax_ofdma_hcs_sse_no_aesni(const void *msg, const uint64_t len); - -/* ====================================================================== */ - -/* - * GCM submit / flush API for SSE arch without AESNI - */ -static IMB_JOB * -submit_job_aes_gcm_dec_sse_no_aesni(IMB_MGR *state, IMB_JOB *job) -{ - DECLARE_ALIGNED(struct gcm_context_data ctx, 16); - (void) state; - - if (16 == job->key_len_in_bytes) { - AES_GCM_DEC_IV_128(job->dec_keys, - &ctx, job->dst, - job->src + - job->cipher_start_src_offset_in_bytes, - job->msg_len_to_cipher_in_bytes, - job->iv, job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } else if (24 == job->key_len_in_bytes) { - AES_GCM_DEC_IV_192(job->dec_keys, - &ctx, job->dst, - job->src + - job->cipher_start_src_offset_in_bytes, - job->msg_len_to_cipher_in_bytes, - job->iv, job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } else { /* assume 32 bytes */ - AES_GCM_DEC_IV_256(job->dec_keys, - &ctx, job->dst, - job->src + - job->cipher_start_src_offset_in_bytes, - job->msg_len_to_cipher_in_bytes, - job->iv, job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } - - job->status = IMB_STATUS_COMPLETED; - return job; -} - -static IMB_JOB * -submit_job_aes_gcm_enc_sse_no_aesni(IMB_MGR *state, IMB_JOB *job) -{ - DECLARE_ALIGNED(struct gcm_context_data ctx, 16); - (void) state; - - if (16 == job->key_len_in_bytes) { - AES_GCM_ENC_IV_128(job->enc_keys, - &ctx, job->dst, - job->src + - job->cipher_start_src_offset_in_bytes, - job->msg_len_to_cipher_in_bytes, - job->iv, job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } else if (24 == job->key_len_in_bytes) { - AES_GCM_ENC_IV_192(job->enc_keys, - &ctx, job->dst, - job->src + - job->cipher_start_src_offset_in_bytes, - job->msg_len_to_cipher_in_bytes, - job->iv, job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } else { /* assume 32 bytes */ - AES_GCM_ENC_IV_256(job->enc_keys, - &ctx, job->dst, - job->src + - job->cipher_start_src_offset_in_bytes, - job->msg_len_to_cipher_in_bytes, - job->iv, job->iv_len_in_bytes, - job->u.GCM.aad, - job->u.GCM.aad_len_in_bytes, - job->auth_tag_output, - job->auth_tag_output_len_in_bytes); - } - - job->status = IMB_STATUS_COMPLETED; - return job; -} - -IMB_DLL_LOCAL IMB_JOB * -submit_job_aes_cntr_sse_no_aesni(IMB_JOB *job) -{ - if (16 == job->key_len_in_bytes) - AES_CNTR_128(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bytes, - job->iv_len_in_bytes); - else if (24 == job->key_len_in_bytes) - AES_CNTR_192(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bytes, - job->iv_len_in_bytes); - else /* assume 32 bytes */ - AES_CNTR_256(job->src + job->cipher_start_src_offset_in_bytes, - job->iv, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bytes, - job->iv_len_in_bytes); - - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - -IMB_DLL_LOCAL IMB_JOB * -submit_job_aes_cntr_bit_sse_no_aesni(IMB_JOB *job) -{ - const uint64_t offset = job->cipher_start_src_offset_in_bytes; - - if (16 == job->key_len_in_bytes) - aes_cntr_bit_128_sse_no_aesni(job->src + offset, - job->iv, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bits, - job->iv_len_in_bytes); - else if (24 == job->key_len_in_bytes) - aes_cntr_bit_192_sse_no_aesni(job->src + offset, - job->iv, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bits, - job->iv_len_in_bytes); - else /* assume 32 bytes */ - aes_cntr_bit_256_sse_no_aesni(job->src + offset, - job->iv, - job->enc_keys, - job->dst, - job->msg_len_to_cipher_in_bits, - job->iv_len_in_bytes); - - job->status |= IMB_STATUS_COMPLETED_CIPHER; - return job; -} - /* ====================================================================== */ static void reset_ooo_mgrs(IMB_MGR *state) { - unsigned int j; - uint8_t *p; - size_t size; - MB_MGR_AES_OOO *aes128_ooo = state->aes128_ooo; - MB_MGR_AES_OOO *aes192_ooo = state->aes192_ooo; - MB_MGR_AES_OOO *aes256_ooo = state->aes256_ooo; - MB_MGR_DOCSIS_AES_OOO *docsis128_sec_ooo = state->docsis128_sec_ooo; - MB_MGR_DOCSIS_AES_OOO *docsis128_crc32_sec_ooo = - state->docsis128_crc32_sec_ooo; - MB_MGR_DOCSIS_AES_OOO *docsis256_sec_ooo = state->docsis256_sec_ooo; - MB_MGR_DOCSIS_AES_OOO *docsis256_crc32_sec_ooo = - state->docsis256_crc32_sec_ooo; - MB_MGR_HMAC_SHA_1_OOO *hmac_sha_1_ooo = state->hmac_sha_1_ooo; - MB_MGR_HMAC_SHA_256_OOO *hmac_sha_224_ooo = state->hmac_sha_224_ooo; - MB_MGR_HMAC_SHA_256_OOO *hmac_sha_256_ooo = state->hmac_sha_256_ooo; - MB_MGR_HMAC_SHA_512_OOO *hmac_sha_384_ooo = state->hmac_sha_384_ooo; - MB_MGR_HMAC_SHA_512_OOO *hmac_sha_512_ooo = state->hmac_sha_512_ooo; - MB_MGR_HMAC_MD5_OOO *hmac_md5_ooo = state->hmac_md5_ooo; - MB_MGR_AES_XCBC_OOO *aes_xcbc_ooo = state->aes_xcbc_ooo; - MB_MGR_CCM_OOO *aes_ccm_ooo = state->aes_ccm_ooo; - MB_MGR_CCM_OOO *aes256_ccm_ooo = state->aes256_ccm_ooo; - MB_MGR_CMAC_OOO *aes_cmac_ooo = state->aes_cmac_ooo; - MB_MGR_CMAC_OOO *aes256_cmac_ooo = state->aes256_cmac_ooo; - MB_MGR_ZUC_OOO *zuc_eea3_ooo = state->zuc_eea3_ooo; - MB_MGR_ZUC_OOO *zuc256_eea3_ooo = state->zuc256_eea3_ooo; - MB_MGR_ZUC_OOO *zuc_eia3_ooo = state->zuc_eia3_ooo; - MB_MGR_AES_OOO *aes128_cbcs_ooo = state->aes128_cbcs_ooo; - MB_MGR_ZUC_OOO *zuc256_eia3_ooo = state->zuc256_eia3_ooo; - /* Init AES out-of-order fields */ - memset(aes128_ooo->lens, 0xFF, - sizeof(aes128_ooo->lens)); - memset(&aes128_ooo->lens[0], 0, - sizeof(aes128_ooo->lens[0]) * 4); - memset(aes128_ooo->job_in_lane, 0, - sizeof(aes128_ooo->job_in_lane)); - aes128_ooo->unused_lanes = 0xF3210; - aes128_ooo->num_lanes_inuse = 0; - - - memset(aes192_ooo->lens, 0xFF, - sizeof(aes192_ooo->lens)); - memset(&aes192_ooo->lens[0], 0, - sizeof(aes192_ooo->lens[0]) * 4); - memset(aes192_ooo->job_in_lane, 0, - sizeof(aes192_ooo->job_in_lane)); - aes192_ooo->unused_lanes = 0xF3210; - aes192_ooo->num_lanes_inuse = 0; - - - memset(aes256_ooo->lens, 0xFF, - sizeof(aes256_ooo->lens)); - memset(&aes256_ooo->lens[0], 0, - sizeof(aes256_ooo->lens[0]) * 4); - memset(aes256_ooo->job_in_lane, 0, - sizeof(aes256_ooo->job_in_lane)); - aes256_ooo->unused_lanes = 0xF3210; - aes256_ooo->num_lanes_inuse = 0; - + ooo_mgr_aes_reset(state->aes128_ooo, 4); + ooo_mgr_aes_reset(state->aes192_ooo, 4); + ooo_mgr_aes_reset(state->aes256_ooo, 4); /* DOCSIS SEC BPI uses same settings as AES CBC */ - memset(docsis128_sec_ooo->lens, 0xFF, - sizeof(docsis128_sec_ooo->lens)); - memset(&docsis128_sec_ooo->lens[0], 0, - sizeof(docsis128_sec_ooo->lens[0]) * 4); - memset(docsis128_sec_ooo->job_in_lane, 0, - sizeof(docsis128_sec_ooo->job_in_lane)); - docsis128_sec_ooo->unused_lanes = 0xF3210; - docsis128_sec_ooo->num_lanes_inuse = 0; - - memset(docsis128_crc32_sec_ooo->lens, 0xFF, - sizeof(docsis128_crc32_sec_ooo->lens)); - memset(&docsis128_crc32_sec_ooo->lens[0], 0, - sizeof(docsis128_crc32_sec_ooo->lens[0]) * 4); - memset(docsis128_crc32_sec_ooo->job_in_lane, 0, - sizeof(docsis128_crc32_sec_ooo->job_in_lane)); - docsis128_crc32_sec_ooo->unused_lanes = 0xF3210; - docsis128_crc32_sec_ooo->num_lanes_inuse = 0; - - memset(docsis256_sec_ooo->lens, 0xFF, - sizeof(docsis256_sec_ooo->lens)); - memset(&docsis256_sec_ooo->lens[0], 0, - sizeof(docsis256_sec_ooo->lens[0]) * 4); - memset(docsis256_sec_ooo->job_in_lane, 0, - sizeof(docsis256_sec_ooo->job_in_lane)); - docsis256_sec_ooo->unused_lanes = 0xF3210; - docsis256_sec_ooo->num_lanes_inuse = 0; - - memset(docsis256_crc32_sec_ooo->lens, 0xFF, - sizeof(docsis256_crc32_sec_ooo->lens)); - memset(&docsis256_crc32_sec_ooo->lens[0], 0, - sizeof(docsis256_crc32_sec_ooo->lens[0]) * 4); - memset(docsis256_crc32_sec_ooo->job_in_lane, 0, - sizeof(docsis256_crc32_sec_ooo->job_in_lane)); - docsis256_crc32_sec_ooo->unused_lanes = 0xF3210; - docsis256_crc32_sec_ooo->num_lanes_inuse = 0; + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 4); /* Init ZUC out-of-order fields */ - memset(zuc_eea3_ooo->lens, 0, - sizeof(zuc_eea3_ooo->lens)); - memset(zuc_eea3_ooo->job_in_lane, 0, - sizeof(zuc_eea3_ooo->job_in_lane)); - zuc_eea3_ooo->unused_lanes = 0xFF03020100; - zuc_eea3_ooo->num_lanes_inuse = 0; - memset(&zuc_eea3_ooo->state, 0, - sizeof(zuc_eea3_ooo->state)); - zuc_eea3_ooo->init_not_done = 0; - zuc_eea3_ooo->unused_lane_bitmask = 0x0f; - - memset(zuc_eia3_ooo->lens, 0xFF, - sizeof(zuc_eia3_ooo->lens)); - memset(zuc_eia3_ooo->job_in_lane, 0, - sizeof(zuc_eia3_ooo->job_in_lane)); - zuc_eia3_ooo->unused_lanes = 0xFF03020100; - zuc_eia3_ooo->num_lanes_inuse = 0; - memset(&zuc_eia3_ooo->state, 0, - sizeof(zuc_eia3_ooo->state)); - zuc_eia3_ooo->init_not_done = 0; - zuc_eia3_ooo->unused_lane_bitmask = 0x0f; - - memset(zuc256_eea3_ooo->lens, 0, - sizeof(zuc256_eea3_ooo->lens)); - memset(zuc256_eea3_ooo->job_in_lane, 0, - sizeof(zuc256_eea3_ooo->job_in_lane)); - zuc256_eea3_ooo->unused_lanes = 0xFF03020100; - zuc256_eea3_ooo->num_lanes_inuse = 0; - memset(&zuc256_eea3_ooo->state, 0, - sizeof(zuc256_eea3_ooo->state)); - zuc256_eea3_ooo->init_not_done = 0; - zuc256_eea3_ooo->unused_lane_bitmask = 0x0f; - - memset(zuc256_eia3_ooo->lens, 0xFF, - sizeof(zuc256_eia3_ooo->lens)); - memset(zuc256_eia3_ooo->job_in_lane, 0, - sizeof(zuc256_eia3_ooo->job_in_lane)); - zuc256_eia3_ooo->unused_lanes = 0xFF03020100; - zuc256_eia3_ooo->num_lanes_inuse = 0; - memset(&zuc256_eia3_ooo->state, 0, - sizeof(zuc256_eia3_ooo->state)); - zuc256_eia3_ooo->init_not_done = 0; - zuc256_eia3_ooo->unused_lane_bitmask = 0x0f; + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 4); /* Init HMAC/SHA1 out-of-order fields */ - hmac_sha_1_ooo->lens[0] = 0; - hmac_sha_1_ooo->lens[1] = 0; - hmac_sha_1_ooo->lens[2] = 0; - hmac_sha_1_ooo->lens[3] = 0; - hmac_sha_1_ooo->lens[4] = 0xFFFF; - hmac_sha_1_ooo->lens[5] = 0xFFFF; - hmac_sha_1_ooo->lens[6] = 0xFFFF; - hmac_sha_1_ooo->lens[7] = 0xFFFF; - hmac_sha_1_ooo->unused_lanes = 0xFF03020100; - for (j = 0; j < SSE_NUM_SHA1_LANES; j++) { - hmac_sha_1_ooo->ldata[j].job_in_lane = NULL; - hmac_sha_1_ooo->ldata[j].extra_block[64] = 0x80; - memset(hmac_sha_1_ooo->ldata[j].extra_block + 65, - 0x00, - 64+7); - p = hmac_sha_1_ooo->ldata[j].outer_block; - memset(p + 5*4 + 1, - 0x00, - 64 - 5*4 - 1 - 2); - p[5*4] = 0x80; - p[64-2] = 0x02; - p[64-1] = 0xA0; - } + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, SSE_NUM_SHA1_LANES); /* Init HMAC/SHA224 out-of-order fields */ - hmac_sha_224_ooo->lens[0] = 0; - hmac_sha_224_ooo->lens[1] = 0; - hmac_sha_224_ooo->lens[2] = 0; - hmac_sha_224_ooo->lens[3] = 0; - hmac_sha_224_ooo->lens[4] = 0xFFFF; - hmac_sha_224_ooo->lens[5] = 0xFFFF; - hmac_sha_224_ooo->lens[6] = 0xFFFF; - hmac_sha_224_ooo->lens[7] = 0xFFFF; - hmac_sha_224_ooo->unused_lanes = 0xFF03020100; - for (j = 0; j < SSE_NUM_SHA256_LANES; j++) { - hmac_sha_224_ooo->ldata[j].job_in_lane = NULL; - - p = hmac_sha_224_ooo->ldata[j].extra_block; - size = sizeof(hmac_sha_224_ooo->ldata[j].extra_block); - memset (p, 0x00, size); - p[64] = 0x80; - - p = hmac_sha_224_ooo->ldata[j].outer_block; - size = sizeof(hmac_sha_224_ooo->ldata[j].outer_block); - memset(p, 0x00, size); - p[7*4] = 0x80; /* digest 7 words long */ - p[64-2] = 0x02; /* length in little endian = 0x02E0 */ - p[64-1] = 0xE0; - } + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, + SSE_NUM_SHA256_LANES); /* Init HMAC/SHA_256 out-of-order fields */ - hmac_sha_256_ooo->lens[0] = 0; - hmac_sha_256_ooo->lens[1] = 0; - hmac_sha_256_ooo->lens[2] = 0; - hmac_sha_256_ooo->lens[3] = 0; - hmac_sha_256_ooo->lens[4] = 0xFFFF; - hmac_sha_256_ooo->lens[5] = 0xFFFF; - hmac_sha_256_ooo->lens[6] = 0xFFFF; - hmac_sha_256_ooo->lens[7] = 0xFFFF; - hmac_sha_256_ooo->unused_lanes = 0xFF03020100; - for (j = 0; j < SSE_NUM_SHA256_LANES; j++) { - hmac_sha_256_ooo->ldata[j].job_in_lane = NULL; - hmac_sha_256_ooo->ldata[j].extra_block[64] = 0x80; - memset(hmac_sha_256_ooo->ldata[j].extra_block + 65, - 0x00, - 64+7); - p = hmac_sha_256_ooo->ldata[j].outer_block; - memset(p + 8*4 + 1, - 0x00, - 64 - 8*4 - 1 - 2); /* digest is 8*4 bytes long */ - p[8*4] = 0x80; - p[64-2] = 0x03; /* length of (opad (64*8) bits + 256 bits) - * in hex is 0x300 */ - p[64-1] = 0x00; - } + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, + SSE_NUM_SHA256_LANES); /* Init HMAC/SHA384 out-of-order fields */ - hmac_sha_384_ooo->lens[0] = 0; - hmac_sha_384_ooo->lens[1] = 0; - hmac_sha_384_ooo->lens[2] = 0xFFFF; - hmac_sha_384_ooo->lens[3] = 0xFFFF; - hmac_sha_384_ooo->lens[4] = 0xFFFF; - hmac_sha_384_ooo->lens[5] = 0xFFFF; - hmac_sha_384_ooo->lens[6] = 0xFFFF; - hmac_sha_384_ooo->lens[7] = 0xFFFF; - hmac_sha_384_ooo->unused_lanes = 0xFF0100; - for (j = 0; j < SSE_NUM_SHA512_LANES; j++) { - MB_MGR_HMAC_SHA_512_OOO *ctx = hmac_sha_384_ooo; - - ctx->ldata[j].job_in_lane = NULL; - ctx->ldata[j].extra_block[IMB_SHA_384_BLOCK_SIZE] = 0x80; - memset(ctx->ldata[j].extra_block + (IMB_SHA_384_BLOCK_SIZE + 1), - 0x00, IMB_SHA_384_BLOCK_SIZE + 7); - - p = ctx->ldata[j].outer_block; - memset(p + IMB_SHA384_DIGEST_SIZE_IN_BYTES + 1, 0x00, - /* special end point because this length is constant */ - IMB_SHA_384_BLOCK_SIZE - - IMB_SHA384_DIGEST_SIZE_IN_BYTES - 1 - 2); - p[IMB_SHA384_DIGEST_SIZE_IN_BYTES] = 0x80; /* mark the end */ - /* - * hmac outer block length always of fixed size, it is OKey - * length, a whole message block length, 1024 bits, with padding - * plus the length of the inner digest, which is 384 bits - * 1408 bits == 0x0580. The input message block needs to be - * converted to big endian within the sha implementation - * before use. - */ - p[IMB_SHA_384_BLOCK_SIZE - 2] = 0x05; - p[IMB_SHA_384_BLOCK_SIZE - 1] = 0x80; - } + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + SSE_NUM_SHA512_LANES); /* Init HMAC/SHA512 out-of-order fields */ - hmac_sha_512_ooo->lens[0] = 0; - hmac_sha_512_ooo->lens[1] = 0; - hmac_sha_512_ooo->lens[2] = 0xFFFF; - hmac_sha_512_ooo->lens[3] = 0xFFFF; - hmac_sha_512_ooo->lens[4] = 0xFFFF; - hmac_sha_512_ooo->lens[5] = 0xFFFF; - hmac_sha_512_ooo->lens[6] = 0xFFFF; - hmac_sha_512_ooo->lens[7] = 0xFFFF; - hmac_sha_512_ooo->unused_lanes = 0xFF0100; - for (j = 0; j < SSE_NUM_SHA512_LANES; j++) { - MB_MGR_HMAC_SHA_512_OOO *ctx = hmac_sha_512_ooo; - - ctx->ldata[j].job_in_lane = NULL; - ctx->ldata[j].extra_block[IMB_SHA_512_BLOCK_SIZE] = 0x80; - memset(ctx->ldata[j].extra_block + (IMB_SHA_512_BLOCK_SIZE + 1), - 0x00, IMB_SHA_512_BLOCK_SIZE + 7); - - p = ctx->ldata[j].outer_block; - memset(p + IMB_SHA512_DIGEST_SIZE_IN_BYTES + 1, 0x00, - /* special end point because this length is constant */ - IMB_SHA_512_BLOCK_SIZE - - IMB_SHA512_DIGEST_SIZE_IN_BYTES - 1 - 2); - p[IMB_SHA512_DIGEST_SIZE_IN_BYTES] = 0x80; /* mark the end */ - /* - * hmac outer block length always of fixed size, it is OKey - * length, a whole message block length, 1024 bits, with padding - * plus the length of the inner digest, which is 512 bits - * 1536 bits == 0x600. The input message block needs to be - * converted to big endian within the sha implementation - * before use. - */ - p[IMB_SHA_512_BLOCK_SIZE - 2] = 0x06; - p[IMB_SHA_512_BLOCK_SIZE - 1] = 0x00; - } + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + SSE_NUM_SHA512_LANES); /* Init HMAC/MD5 out-of-order fields */ - hmac_md5_ooo->lens[0] = 0; - hmac_md5_ooo->lens[1] = 0; - hmac_md5_ooo->lens[2] = 0; - hmac_md5_ooo->lens[3] = 0; - hmac_md5_ooo->lens[4] = 0; - hmac_md5_ooo->lens[5] = 0; - hmac_md5_ooo->lens[6] = 0; - hmac_md5_ooo->lens[7] = 0; - hmac_md5_ooo->lens[8] = 0xFFFF; - hmac_md5_ooo->lens[9] = 0xFFFF; - hmac_md5_ooo->lens[10] = 0xFFFF; - hmac_md5_ooo->lens[11] = 0xFFFF; - hmac_md5_ooo->lens[12] = 0xFFFF; - hmac_md5_ooo->lens[13] = 0xFFFF; - hmac_md5_ooo->lens[14] = 0xFFFF; - hmac_md5_ooo->lens[15] = 0xFFFF; - hmac_md5_ooo->unused_lanes = 0xF76543210; - for (j = 0; j < SSE_NUM_MD5_LANES; j++) { - hmac_md5_ooo->ldata[j].job_in_lane = NULL; - - p = hmac_md5_ooo->ldata[j].extra_block; - size = sizeof(hmac_md5_ooo->ldata[j].extra_block); - memset (p, 0x00, size); - p[64] = 0x80; - - p = hmac_md5_ooo->ldata[j].outer_block; - size = sizeof(hmac_md5_ooo->ldata[j].outer_block); - memset(p, 0x00, size); - p[4*4] = 0x80; - p[64-7] = 0x02; - p[64-8] = 0x80; - } + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, SSE_NUM_MD5_LANES); /* Init AES/XCBC OOO fields */ - memset(aes_xcbc_ooo->lens, 0xff, - sizeof(aes_xcbc_ooo->lens)); - aes_xcbc_ooo->unused_lanes = 0xFF03020100; - for (j = 0; j < 4; j++) { - aes_xcbc_ooo->lens[j] = 0xFFFF; - aes_xcbc_ooo->ldata[j].job_in_lane = NULL; - aes_xcbc_ooo->ldata[j].final_block[16] = 0x80; - memset(aes_xcbc_ooo->ldata[j].final_block + 17, 0x00, 15); - } - aes_xcbc_ooo->num_lanes_inuse = 0; + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 4); /* Init AES-CCM auth out-of-order fields */ - memset(aes_ccm_ooo, 0, sizeof(MB_MGR_CCM_OOO)); - for (j = 4; j < 16; j++) - aes_ccm_ooo->lens[j] = 0xFFFF; - aes_ccm_ooo->unused_lanes = 0xF3210; - aes_ccm_ooo->num_lanes_inuse = 0; - - memset(aes256_ccm_ooo, 0, sizeof(MB_MGR_CCM_OOO)); - for (j = 4; j < 16; j++) - aes256_ccm_ooo->lens[j] = 0xFFFF; - aes256_ccm_ooo->unused_lanes = 0xF3210; - aes256_ccm_ooo->num_lanes_inuse = 0; + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 4); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 4); /* Init AES-CMAC auth out-of-order fields */ - aes_cmac_ooo->lens[0] = 0; - aes_cmac_ooo->lens[1] = 0; - aes_cmac_ooo->lens[2] = 0; - aes_cmac_ooo->lens[3] = 0; - aes_cmac_ooo->lens[4] = 0xFFFF; - aes_cmac_ooo->lens[5] = 0xFFFF; - aes_cmac_ooo->lens[6] = 0xFFFF; - aes_cmac_ooo->lens[7] = 0xFFFF; - for (j = 0; j < 4; j++) { - aes_cmac_ooo->init_done[j] = 0; - aes_cmac_ooo->job_in_lane[j] = NULL; - } - aes_cmac_ooo->unused_lanes = 0xF3210; - aes_cmac_ooo->num_lanes_inuse = 0; - - aes256_cmac_ooo->lens[0] = 0; - aes256_cmac_ooo->lens[1] = 0; - aes256_cmac_ooo->lens[2] = 0; - aes256_cmac_ooo->lens[3] = 0; - aes256_cmac_ooo->lens[4] = 0xFFFF; - aes256_cmac_ooo->lens[5] = 0xFFFF; - aes256_cmac_ooo->lens[6] = 0xFFFF; - aes256_cmac_ooo->lens[7] = 0xFFFF; - for (j = 0; j < 4; j++) { - aes256_cmac_ooo->init_done[j] = 0; - aes256_cmac_ooo->job_in_lane[j] = NULL; - } - aes256_cmac_ooo->unused_lanes = 0xF3210; - aes256_cmac_ooo->num_lanes_inuse = 0; + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 4); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 4); /* Init AES-CBCS out-of-order fields */ - memset(aes128_cbcs_ooo->lens, 0xFF, sizeof(aes128_cbcs_ooo->lens)); - memset(aes128_cbcs_ooo->job_in_lane, 0, - sizeof(aes128_cbcs_ooo->job_in_lane)); - aes128_cbcs_ooo->num_lanes_inuse = 0; - aes128_cbcs_ooo->unused_lanes = 0xF3210; + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 4); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, SSE_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, SSE_NUM_SHA256_LANES); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, SSE_NUM_SHA256_LANES); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, SSE_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, SSE_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); } IMB_DLL_LOCAL void @@ -876,6 +315,14 @@ init_mb_mgr_sse_no_aesni_internal(IMB_MGR *state, const int reset_mgrs) /* reset error status */ imb_set_errno(state, 0); + + /* Check if CPU flags needed for NO_AESNI interface are present */ + if ((state->features & IMB_CPUFLAGS_NO_AESNI) != + IMB_CPUFLAGS_NO_AESNI) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + /* Set architecture for future checks */ state->used_arch = (uint32_t) IMB_ARCH_NOAESNI; @@ -890,6 +337,16 @@ init_mb_mgr_sse_no_aesni_internal(IMB_MGR *state, const int reset_mgrs) /* set SSE NO AESNI handlers */ state->get_next_job = get_next_job_sse_no_aesni; state->submit_job = submit_job_sse_no_aesni; + state->get_next_burst = get_next_burst_sse_no_aesni; + state->submit_burst = submit_burst_sse_no_aesni; + state->submit_burst_nocheck= submit_burst_nocheck_sse_no_aesni; + state->flush_burst = flush_burst_sse_no_aesni; + state->submit_cipher_burst = submit_cipher_burst_sse_no_aesni; + state->submit_cipher_burst_nocheck = + submit_cipher_burst_nocheck_sse_no_aesni; + state->submit_hash_burst = submit_hash_burst_sse_no_aesni; + state->submit_hash_burst_nocheck = + submit_hash_burst_nocheck_sse_no_aesni; state->submit_job_nocheck = submit_job_nocheck_sse_no_aesni; state->get_completed_job = get_completed_job_sse_no_aesni; state->flush_job = flush_job_sse_no_aesni; @@ -1014,6 +471,9 @@ void init_mb_mgr_sse_no_aesni(IMB_MGR *state) { init_mb_mgr_sse_no_aesni_internal(state, 1); + + if (!self_test(state)) + imb_set_errno(state, IMB_ERR_SELFTEST); } #include "mb_mgr_code.h" diff --git a/lib/no-aesni/mb_mgr_zuc_submit_flush_sse_no_aesni.asm b/lib/no-aesni/mb_mgr_zuc_submit_flush_sse_no_aesni.asm index 5f408376822e2b0fa2a996cfb959c1556bad0be7..2b3009c232262615bc368471afaef64c1457003b 100644 --- a/lib/no-aesni/mb_mgr_zuc_submit_flush_sse_no_aesni.asm +++ b/lib/no-aesni/mb_mgr_zuc_submit_flush_sse_no_aesni.asm @@ -38,4 +38,4 @@ %define ZUC128_INIT_4 asm_ZucInitialization_4_sse_no_aesni %define ZUC256_INIT_4 asm_Zuc256Initialization_4_sse_no_aesni %define ZUC_CIPHER_4 asm_ZucCipher_4_sse_no_aesni -%include "sse/mb_mgr_zuc_submit_flush_sse.asm" +%include "sse_t1/mb_mgr_zuc_submit_flush_sse.asm" diff --git a/lib/no-aesni/pon_sse_no_aesni.asm b/lib/no-aesni/pon_sse_no_aesni.asm index 9211eacc6226bd6c7a26b6a997cb4b4d66d6cef9..65170f1d7e05419129cea7e03cf1d95331bbdf9a 100644 --- a/lib/no-aesni/pon_sse_no_aesni.asm +++ b/lib/no-aesni/pon_sse_no_aesni.asm @@ -33,4 +33,4 @@ %define ENC_NO_CTR_FN_NAME submit_job_pon_enc_no_ctr_sse_no_aesni %define HEC_32 hec_32_sse_no_aesni %define HEC_64 hec_64_sse_no_aesni -%include "sse/pon_by8_sse.asm" +%include "sse_t1/pon_by8_sse.asm" diff --git a/lib/no-aesni/snow3g_uia2_sse_no_aesni.asm b/lib/no-aesni/snow3g_uia2_sse_no_aesni.asm index 65f1f51f25cce7d6508b2c6ebc1911703f194755..676912cfbade1b28c70fca95fcce66b79cd12e47 100644 --- a/lib/no-aesni/snow3g_uia2_sse_no_aesni.asm +++ b/lib/no-aesni/snow3g_uia2_sse_no_aesni.asm @@ -28,5 +28,5 @@ %include "include/aesni_emu.inc" %define NO_AESNI %define SNOW3G_F9_1_BUFFER_INTERNAL snow3g_f9_1_buffer_internal_sse_no_aesni -%include "sse/snow3g_uia2_by4_sse.asm" +%include "sse_t1/snow3g_uia2_by4_sse.asm" diff --git a/lib/no-aesni/snow_v_sse_no_aesni.asm b/lib/no-aesni/snow_v_sse_no_aesni.asm index 04a0ed60ad28887d729ae4181009e9757f7eb04f..65559065f64acd73ced56ebb0bcdc849c60a6360 100644 --- a/lib/no-aesni/snow_v_sse_no_aesni.asm +++ b/lib/no-aesni/snow_v_sse_no_aesni.asm @@ -28,4 +28,4 @@ %include "include/aesni_emu.inc" %define SNOW_V snow_v_sse_no_aesni %define SNOW_V_AEAD_INIT snow_v_aead_init_sse_no_aesni -%include "sse/snow_v_sse.asm" +%include "sse_t1/snow_v_sse.asm" diff --git a/lib/no-aesni/zuc_sse_no_aesni.asm b/lib/no-aesni/zuc_sse_no_aesni.asm index c2ccc00317f041923854f5376ad32becdcc1d849..d43e99d7e1b74e72f9572f1add35e141ad4d2a9d 100644 --- a/lib/no-aesni/zuc_sse_no_aesni.asm +++ b/lib/no-aesni/zuc_sse_no_aesni.asm @@ -35,4 +35,4 @@ %define ZUC_EIA3ROUND16B asm_Eia3Round16B_sse_no_aesni %define ZUC_EIA3REMAINDER asm_Eia3Remainder_sse_no_aesni %define USE_GFNI 0 -%include "sse/zuc_x4_sse.asm" +%include "sse_t1/zuc_x4_sse.asm" diff --git a/lib/no-aesni/zuc_top_sse_no_aesni.c b/lib/no-aesni/zuc_top_sse_no_aesni.c old mode 100755 new mode 100644 index aa21a973371cbd0533be2376e332e190b0cab7dd..15543a25b6f4995ebc9e27d77378cf6a3f55daa3 --- a/lib/no-aesni/zuc_top_sse_no_aesni.c +++ b/lib/no-aesni/zuc_top_sse_no_aesni.c @@ -972,7 +972,6 @@ zuc256_eia3_4_buffer_job_sse_no_aesni(const void * const pKey[NUM_SSE_BUFS], keys.pKeys[i] = pKey[i]; } - /* TODO: Handle 16-byte digest cases */ asm_Zuc256Initialization_4_sse_no_aesni(&keys, ivs, &state, T, tag_size); diff --git a/lib/sse_t1/README b/lib/sse_t1/README new file mode 100644 index 0000000000000000000000000000000000000000..b0ea1cff1d5f940afd6c452cf42d4959b77d2008 --- /dev/null +++ b/lib/sse_t1/README @@ -0,0 +1,2 @@ +SSE TYPE1: +- SSE4.2, AESNI, PCLMULQDQ, CMOV, BSWAP diff --git a/lib/sse/aes128_cbc_dec_by4_sse.asm b/lib/sse_t1/aes128_cbc_dec_by4_sse.asm similarity index 100% rename from lib/sse/aes128_cbc_dec_by4_sse.asm rename to lib/sse_t1/aes128_cbc_dec_by4_sse.asm diff --git a/lib/sse/aes128_cbc_enc_x4_sse.asm b/lib/sse_t1/aes128_cbc_enc_x4_sse.asm similarity index 99% rename from lib/sse/aes128_cbc_enc_x4_sse.asm rename to lib/sse_t1/aes128_cbc_enc_x4_sse.asm index d84d9164bf72bd7c2e902bb5478d791e0e62cf32..a2713c2c537552d3f2d6660964aa554679a0c02d 100644 --- a/lib/sse/aes128_cbc_enc_x4_sse.asm +++ b/lib/sse_t1/aes128_cbc_enc_x4_sse.asm @@ -32,7 +32,6 @@ %include "include/os.asm" %include "include/mb_mgr_datastruct.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" %define MOVDQ movdqu ;; assume buffers not aligned %macro pxor2 2 @@ -378,7 +377,6 @@ mksection .text MKGLOBAL(FUNC,function,internal) FUNC: - endbranch64 %ifdef ARG_OUT AES_CBC_X4 MODE, OFFSET, ARG_IV, ARG_KEYS, ARG_IN, ARG_OUT %else diff --git a/lib/sse/aes128_cbc_mac_x4_sse.asm b/lib/sse_t1/aes128_cbc_mac_x4_sse.asm similarity index 97% rename from lib/sse/aes128_cbc_mac_x4_sse.asm rename to lib/sse_t1/aes128_cbc_mac_x4_sse.asm index 02d526c85f9d204758de938014dc30c53f380663..52726f1b72beea68600b035a9c7d86340141f4f2 100644 --- a/lib/sse/aes128_cbc_mac_x4_sse.asm +++ b/lib/sse_t1/aes128_cbc_mac_x4_sse.asm @@ -34,4 +34,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/sse/aes128_cbcs_1_9_dec_by4_sse.asm b/lib/sse_t1/aes128_cbcs_1_9_dec_by4_sse.asm similarity index 97% rename from lib/sse/aes128_cbcs_1_9_dec_by4_sse.asm rename to lib/sse_t1/aes128_cbcs_1_9_dec_by4_sse.asm index 875b7fe2c2b99d4139c164507134a98c9f06fd9a..fcef251c8945680d1bb5889e951457e538e6ecb1 100644 --- a/lib/sse/aes128_cbcs_1_9_dec_by4_sse.asm +++ b/lib/sse_t1/aes128_cbcs_1_9_dec_by4_sse.asm @@ -30,4 +30,4 @@ %define CBCS %endif -%include "sse/aes128_cbc_dec_by4_sse.asm" +%include "sse_t1/aes128_cbc_dec_by4_sse.asm" diff --git a/lib/sse/aes128_cbcs_1_9_enc_x4_sse.asm b/lib/sse_t1/aes128_cbcs_1_9_enc_x4_sse.asm similarity index 98% rename from lib/sse/aes128_cbcs_1_9_enc_x4_sse.asm rename to lib/sse_t1/aes128_cbcs_1_9_enc_x4_sse.asm index 1181b75fb90992c24bcbfc1de8cd9af8ca0c30f4..77e9a3a0d884fd42e38ec2db0995a904cab8da7e 100644 --- a/lib/sse/aes128_cbcs_1_9_enc_x4_sse.asm +++ b/lib/sse_t1/aes128_cbcs_1_9_enc_x4_sse.asm @@ -49,4 +49,4 @@ %define ARG_KEYS _aesarg_keys %define ARG_IV _aesarg_IV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/sse/aes128_cntr_by8_sse.asm b/lib/sse_t1/aes128_cntr_by8_sse.asm similarity index 99% rename from lib/sse/aes128_cntr_by8_sse.asm rename to lib/sse_t1/aes128_cntr_by8_sse.asm index d0ea42ba1ff97246fbdded79b6c470bdb1853dda..7005d5639ab89debb8c98a1b148acd582142561e 100644 --- a/lib/sse/aes128_cntr_by8_sse.asm +++ b/lib/sse_t1/aes128_cntr_by8_sse.asm @@ -560,20 +560,22 @@ align 32 jmp %%bswap_iv %endmacro -align 32 %ifdef CNTR_CCM_SSE ; IMB_JOB * aes_cntr_ccm_128_sse(IMB_JOB *job) ; arg 1 : job +align 32 MKGLOBAL(AES_CNTR_CCM_128,function,internal) AES_CNTR_CCM_128: DO_CNTR CCM %else ;; aes_cntr_128_sse(void *in, void *IV, void *keys, void *out, UINT64 num_bytes, UINT64 iv_len) +align 32 MKGLOBAL(AES_CNTR_128,function,internal) AES_CNTR_128: DO_CNTR CNTR ;; aes_cntr_bit_128_sse(void *in, void *IV, void *keys, void *out, UINT64 num_bits, UINT64 iv_len) +align 32 MKGLOBAL(AES_CNTR_BIT_128,function,internal) AES_CNTR_BIT_128: DO_CNTR CNTR_BIT diff --git a/lib/sse/aes128_cntr_ccm_by8_sse.asm b/lib/sse_t1/aes128_cntr_ccm_by8_sse.asm similarity index 97% rename from lib/sse/aes128_cntr_ccm_by8_sse.asm rename to lib/sse_t1/aes128_cntr_ccm_by8_sse.asm index bd3fc780c001146f272cf705c7410060880c534d..3b2e16c8d01503f86bf0ab811f1f630813ec2593 100644 --- a/lib/sse/aes128_cntr_ccm_by8_sse.asm +++ b/lib/sse_t1/aes128_cntr_ccm_by8_sse.asm @@ -29,4 +29,4 @@ %ifndef AES_CNTR_CCM_128 %define AES_CNTR_CCM_128 aes_cntr_ccm_128_sse %endif -%include "sse/aes128_cntr_by8_sse.asm" +%include "sse_t1/aes128_cntr_by8_sse.asm" diff --git a/lib/sse/aes128_ecb_by4_sse.asm b/lib/sse_t1/aes128_ecb_by4_sse.asm similarity index 98% rename from lib/sse/aes128_ecb_by4_sse.asm rename to lib/sse_t1/aes128_ecb_by4_sse.asm index 625673871a9e27c423fa8d6c0093f2960f771cb4..f1fafa2709a6f2c78d6b8fb80001d60b90253b32 100644 --- a/lib/sse/aes128_ecb_by4_sse.asm +++ b/lib/sse_t1/aes128_ecb_by4_sse.asm @@ -25,7 +25,7 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; -; routine to do AES ECB encrypt/decrypt on 16n bytes doing AES by 4 +; routine to do AES ECB 128 encrypt/decrypt on 16n bytes doing AES by 4 ; XMM registers are clobbered. Saving/restoring must be done at a higher level @@ -48,8 +48,8 @@ %ifndef AES_ECB_ENC_256 %ifndef AES_ECB_ENC_192 %ifndef AES_ECB_ENC_128 -%define AES_ECB_ENC_128 aes_ecb_enc_128_sse -%define AES_ECB_DEC_128 aes_ecb_dec_128_sse +%define AES_ECB_ENC_128 aes_ecb_enc_128_by4_sse +%define AES_ECB_DEC_128 aes_ecb_dec_128_by4_sse %endif %endif %endif diff --git a/lib/sse/aes128_xcbc_mac_x4_sse.asm b/lib/sse_t1/aes128_xcbc_mac_x4_sse.asm similarity index 98% rename from lib/sse/aes128_xcbc_mac_x4_sse.asm rename to lib/sse_t1/aes128_xcbc_mac_x4_sse.asm index 60df2b34e35fbe4d4d5d902fea38a81d6d20cafb..376327c31e14a818363948bcb3a931e4f417205b 100644 --- a/lib/sse/aes128_xcbc_mac_x4_sse.asm +++ b/lib/sse_t1/aes128_xcbc_mac_x4_sse.asm @@ -46,4 +46,4 @@ %define ARG_KEYS _aesxcbcarg_keys %define ARG_IV _aesxcbcarg_ICV -%include "sse/aes128_cbc_enc_x4_sse.asm" +%include "sse_t1/aes128_cbc_enc_x4_sse.asm" diff --git a/lib/sse/aes192_cbc_dec_by4_sse.asm b/lib/sse_t1/aes192_cbc_dec_by4_sse.asm similarity index 100% rename from lib/sse/aes192_cbc_dec_by4_sse.asm rename to lib/sse_t1/aes192_cbc_dec_by4_sse.asm diff --git a/lib/sse/aes192_cbc_enc_x4_sse.asm b/lib/sse_t1/aes192_cbc_enc_x4_sse.asm similarity index 100% rename from lib/sse/aes192_cbc_enc_x4_sse.asm rename to lib/sse_t1/aes192_cbc_enc_x4_sse.asm diff --git a/lib/sse/aes192_cntr_by8_sse.asm b/lib/sse_t1/aes192_cntr_by8_sse.asm similarity index 99% rename from lib/sse/aes192_cntr_by8_sse.asm rename to lib/sse_t1/aes192_cntr_by8_sse.asm index 7939aa1cdcc953683970150e806f6edb27726ec7..d0a861dd133cffe0001fdf893ce5e1ee68ebcfa4 100644 --- a/lib/sse/aes192_cntr_by8_sse.asm +++ b/lib/sse_t1/aes192_cntr_by8_sse.asm @@ -498,6 +498,7 @@ MKGLOBAL(AES_CNTR_192,function,internal) AES_CNTR_192: DO_CNTR CNTR +align 32 ;; aes_cntr_bit_192_sse(void *in, void *IV, void *keys, void *out, UINT64 num_bits, UINT64 iv_len) MKGLOBAL(AES_CNTR_BIT_192,function,internal) AES_CNTR_BIT_192: diff --git a/lib/sse/aes192_ecb_by4_sse.asm b/lib/sse_t1/aes192_ecb_by4_sse.asm similarity index 92% rename from lib/sse/aes192_ecb_by4_sse.asm rename to lib/sse_t1/aes192_ecb_by4_sse.asm index b66db01296f4e4a62ff6892cd83aca716eb9492f..fcad26e596cfc2b6423d5a2f2659956d66e959ec 100644 --- a/lib/sse/aes192_ecb_by4_sse.asm +++ b/lib/sse_t1/aes192_ecb_by4_sse.asm @@ -27,7 +27,7 @@ ; routine to do AES ECB 192 encrypt/decrypt on 16n bytes doing AES by 4 -%define AES_ECB_ENC_192 aes_ecb_enc_192_sse -%define AES_ECB_DEC_192 aes_ecb_dec_192_sse +%define AES_ECB_ENC_192 aes_ecb_enc_192_by4_sse +%define AES_ECB_DEC_192 aes_ecb_dec_192_by4_sse -%include "sse/aes128_ecb_by4_sse.asm" +%include "sse_t1/aes128_ecb_by4_sse.asm" diff --git a/lib/sse/aes256_cbc_dec_by4_sse.asm b/lib/sse_t1/aes256_cbc_dec_by4_sse.asm similarity index 100% rename from lib/sse/aes256_cbc_dec_by4_sse.asm rename to lib/sse_t1/aes256_cbc_dec_by4_sse.asm diff --git a/lib/sse/aes256_cbc_enc_x4_sse.asm b/lib/sse_t1/aes256_cbc_enc_x4_sse.asm similarity index 100% rename from lib/sse/aes256_cbc_enc_x4_sse.asm rename to lib/sse_t1/aes256_cbc_enc_x4_sse.asm diff --git a/lib/sse/aes256_cbc_mac_x4_sse.asm b/lib/sse_t1/aes256_cbc_mac_x4_sse.asm similarity index 97% rename from lib/sse/aes256_cbc_mac_x4_sse.asm rename to lib/sse_t1/aes256_cbc_mac_x4_sse.asm index ccff0e57baccff8f056681dbe14f872eb91b35df..998c8434eaea3b34ce0e4d291d7df080923ba786 100644 --- a/lib/sse/aes256_cbc_mac_x4_sse.asm +++ b/lib/sse_t1/aes256_cbc_mac_x4_sse.asm @@ -28,4 +28,4 @@ ;;; Routine to compute CBC-MAC based on 256 bit CBC AES encryption code %define CBC_MAC -%include "sse/aes256_cbc_enc_x4_sse.asm" +%include "sse_t1/aes256_cbc_enc_x4_sse.asm" diff --git a/lib/sse/aes256_cntr_by8_sse.asm b/lib/sse_t1/aes256_cntr_by8_sse.asm similarity index 99% rename from lib/sse/aes256_cntr_by8_sse.asm rename to lib/sse_t1/aes256_cntr_by8_sse.asm index ee9e7fe3ab5f8c61fef120455283b212699d525d..c9ca98770fd5b68d4afdadddbf676586026c3fc8 100644 --- a/lib/sse/aes256_cntr_by8_sse.asm +++ b/lib/sse_t1/aes256_cntr_by8_sse.asm @@ -588,20 +588,22 @@ align 32 jmp %%_bswap_iv %endmacro -align 32 %ifdef CNTR_CCM_SSE ; IMB_JOB * aes_cntr_ccm_256_(IMB_JOB *job) ; arg 1 : job +align 32 MKGLOBAL(AES_CNTR_CCM_256,function,internal) AES_CNTR_CCM_256: DO_CNTR CCM %else ;; aes_cntr_256_sse(void *in, void *IV, void *keys, void *out, UINT64 num_bytes, UINT64 iv_len) +align 32 MKGLOBAL(AES_CNTR_256,function,internal) AES_CNTR_256: DO_CNTR CNTR ;; aes_cntr_bit_256_sse(void *in, void *IV, void *keys, void *out, UINT64 num_bits, UINT64 iv_len) +align 32 MKGLOBAL(AES_CNTR_BIT_256,function,internal) AES_CNTR_BIT_256: DO_CNTR CNTR_BIT diff --git a/lib/sse/aes256_cntr_ccm_by8_sse.asm b/lib/sse_t1/aes256_cntr_ccm_by8_sse.asm similarity index 97% rename from lib/sse/aes256_cntr_ccm_by8_sse.asm rename to lib/sse_t1/aes256_cntr_ccm_by8_sse.asm index 946b42867c3106cf95a059aa3b1890be4a7c38d2..94c9c2d265e9d1c6a40c05e4f4e977d070005177 100644 --- a/lib/sse/aes256_cntr_ccm_by8_sse.asm +++ b/lib/sse_t1/aes256_cntr_ccm_by8_sse.asm @@ -29,4 +29,4 @@ %ifndef AES_CNTR_CCM_256 %define AES_CNTR_CCM_256 aes_cntr_ccm_256_sse %endif -%include "sse/aes256_cntr_by8_sse.asm" +%include "sse_t1/aes256_cntr_by8_sse.asm" diff --git a/lib/sse/aes256_ecb_by4_sse.asm b/lib/sse_t1/aes256_ecb_by4_sse.asm similarity index 92% rename from lib/sse/aes256_ecb_by4_sse.asm rename to lib/sse_t1/aes256_ecb_by4_sse.asm index 544f8dedc413698ed0b268b6a9ea28b82878fe63..703ede7355cf433c303642032960bc08ffd53982 100644 --- a/lib/sse/aes256_ecb_by4_sse.asm +++ b/lib/sse_t1/aes256_ecb_by4_sse.asm @@ -27,7 +27,7 @@ ; routine to do AES ECB 256 encrypt/decrypt on 16n bytes doing AES by 4 -%define AES_ECB_ENC_256 aes_ecb_enc_256_sse -%define AES_ECB_DEC_256 aes_ecb_dec_256_sse +%define AES_ECB_ENC_256 aes_ecb_enc_256_by4_sse +%define AES_ECB_DEC_256 aes_ecb_dec_256_by4_sse -%include "sse/aes128_ecb_by4_sse.asm" +%include "sse_t1/aes128_ecb_by4_sse.asm" diff --git a/lib/sse/aes_cfb_sse.asm b/lib/sse_t1/aes_cfb_sse.asm similarity index 100% rename from lib/sse/aes_cfb_sse.asm rename to lib/sse_t1/aes_cfb_sse.asm diff --git a/lib/sse/chacha20_sse.asm b/lib/sse_t1/chacha20_sse.asm similarity index 98% rename from lib/sse/chacha20_sse.asm rename to lib/sse_t1/chacha20_sse.asm index c222975550fc4ced5745b93b25811fcb8342c7a9..253b17dc021fd58b0c226fb43e6796631570ec70 100644 --- a/lib/sse/chacha20_sse.asm +++ b/lib/sse_t1/chacha20_sse.asm @@ -93,6 +93,7 @@ dq 0x0ffffffc0fffffff, 0x0ffffffc0ffffffc struc STACK _STATE: reso 16 ; Space to store first 4 states _XMM_SAVE: reso 2 ; Space to store up to 2 temporary XMM registers +_XMM_WIN_SAVE: reso 10 ; Space to store up to 10 XMM registers _GP_SAVE: resq 7 ; Space to store up to 7 GP registers _RSP_SAVE: resq 1 ; Space to store rsp pointer endstruc @@ -1109,7 +1110,15 @@ chacha20_enc_dec_ks_sse: mov [rsp + _GP_SAVE + 40], rbp %ifndef LINUX mov [rsp + _GP_SAVE + 48], rdi +%assign i 0 +%assign j 6 +%rep 10 + movdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif + mov [rsp + _RSP_SAVE], rax ; save RSP ; Check if there is nothing to encrypt @@ -1548,6 +1557,13 @@ exit_ks: mov rbp, [rsp + _GP_SAVE + 40] %ifndef LINUX mov rdi, [rsp + _GP_SAVE + 48] +%assign i 0 +%assign j 6 +%rep 10 + movdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov rsp, [rsp + _RSP_SAVE]; restore RSP @@ -1558,6 +1574,16 @@ exit_ks: align 32 MKGLOBAL(poly1305_key_gen_sse,function,internal) poly1305_key_gen_sse: + +%ifndef LINUX + mov rax, rsp + sub rsp, 3*16 + 8 + and rsp, -16 + movdqa [rsp], xmm6 + movdqa [rsp + 16], xmm7 + movdqa [rsp + 16*2], xmm8 + mov [rsp + 16*3], rax +%endif ;; prepare chacha state from IV, key movdqa xmm0, [rel constants] movdqu xmm1, [arg1] ; Load key bytes 0-15 @@ -1580,6 +1606,12 @@ poly1305_key_gen_sse: clear_all_xmms_sse_asm %endif +%ifndef LINUX + movdqa xmm6, [rsp] + movdqa xmm7, [rsp + 16] + movdqa xmm8, [rsp + 16*2] + mov rsp, [rsp + 16*3] +%endif ret align 32 @@ -1601,6 +1633,15 @@ submit_job_chacha20_poly_enc_sse: mov [rsp + _GP_SAVE], r12 mov [rsp + _GP_SAVE + 8], r13 mov [rsp + _GP_SAVE + 16], r14 +;%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + movdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep +;%endif mov [rsp + _RSP_SAVE], rax ; save RSP mov added_len, 64 @@ -2160,6 +2201,15 @@ no_partial_block_poly: mov r12, [rsp + _GP_SAVE] mov r13, [rsp + _GP_SAVE + 8] mov r14, [rsp + _GP_SAVE + 16] +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + movdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov rsp, [rsp + _RSP_SAVE] mov rax, job @@ -2193,6 +2243,13 @@ submit_job_chacha20_poly_dec_sse: mov [rsp + _GP_SAVE + 8], r13 %ifndef LINUX mov [rsp + _GP_SAVE + 16], rsi +%assign i 0 +%assign j 6 +%rep 10 + movdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov [rsp + _RSP_SAVE], rax ; save RSP @@ -2631,6 +2688,15 @@ no_partial_block_dec: mov r13, [rsp + _GP_SAVE + 8] %ifndef LINUX mov rsi, [rsp + _GP_SAVE + 16] +%endif +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + movdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep %endif mov rsp, [rsp + _RSP_SAVE] ret @@ -2643,16 +2709,25 @@ gen_keystr_poly_key_sse: %define len arg3 %define ks arg4 - ; If less than or equal to 64*2 bytes, prepare directly states for - ; up to 2 blocks - cmp len, 64*2 - jbe check_1_or_2_blocks_left_gen - mov rax, rsp sub rsp, STACK_SIZE and rsp, -16 +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + movdqa [rsp + _XMM_WIN_SAVE + i*16], APPEND(xmm, j) +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov [rsp + _RSP_SAVE], rax ; save RSP + ; If less than or equal to 64*2 bytes, prepare directly states for + ; up to 2 blocks + cmp len, 64*2 + jbe check_1_or_2_blocks_left_gen + ; Prepare first 4 chacha states movdqa xmm0, [rel constants0] movdqa xmm1, [rel constants1] @@ -2758,6 +2833,16 @@ gen_keystr_poly_key_sse: movdqa [rsp + _XMM_SAVE + 16], xmm0 %endif +restore_gen_keystr: +%ifndef LINUX +%assign i 0 +%assign j 6 +%rep 10 + movdqa APPEND(xmm, j), [rsp + _XMM_WIN_SAVE + i*16] +%assign i (i + 1) +%assign j (j + 1) +%endrep +%endif mov rsp, [rsp + _RSP_SAVE] ret @@ -2833,6 +2918,6 @@ exit_gen: %ifdef SAFE_DATA clear_all_xmms_sse_asm %endif - ret + jmp restore_gen_keystr mksection stack-noexec diff --git a/lib/sse/crc16_x25_sse.asm b/lib/sse_t1/crc16_x25_sse.asm similarity index 100% rename from lib/sse/crc16_x25_sse.asm rename to lib/sse_t1/crc16_x25_sse.asm diff --git a/lib/sse/crc32_by8_sse.asm b/lib/sse_t1/crc32_by8_sse.asm similarity index 99% rename from lib/sse/crc32_by8_sse.asm rename to lib/sse_t1/crc32_by8_sse.asm index 4c7ba21255b02c0d0900ee57607faf07bd6a90de..3c4dc4012291f26e4b8085d7bcb849702a97ea79 100644 --- a/lib/sse/crc32_by8_sse.asm +++ b/lib/sse_t1/crc32_by8_sse.asm @@ -39,6 +39,7 @@ %include "include/memcpy.asm" %include "include/reg_sizes.asm" %include "include/crc32.inc" +%include "include/clear_regs.asm" %ifndef CRC32_FN %define CRC32_FN crc32_by8_sse @@ -333,6 +334,9 @@ CRC32_FN: pextrd eax, xmm7, 1 .cleanup: +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif ret align 32 diff --git a/lib/sse/crc32_fp_sse.asm b/lib/sse_t1/crc32_fp_sse.asm similarity index 100% rename from lib/sse/crc32_fp_sse.asm rename to lib/sse_t1/crc32_fp_sse.asm diff --git a/lib/sse/crc32_iuup_sse.asm b/lib/sse_t1/crc32_iuup_sse.asm similarity index 100% rename from lib/sse/crc32_iuup_sse.asm rename to lib/sse_t1/crc32_iuup_sse.asm diff --git a/lib/sse/crc32_lte_sse.asm b/lib/sse_t1/crc32_lte_sse.asm similarity index 100% rename from lib/sse/crc32_lte_sse.asm rename to lib/sse_t1/crc32_lte_sse.asm diff --git a/lib/sse/crc32_refl_by8_sse.asm b/lib/sse_t1/crc32_refl_by8_sse.asm similarity index 99% rename from lib/sse/crc32_refl_by8_sse.asm rename to lib/sse_t1/crc32_refl_by8_sse.asm index 1b0225062e8c12061337c374983027fdf18e5845..b5ab5860647a85526e3419be736d967f89ee2257 100644 --- a/lib/sse/crc32_refl_by8_sse.asm +++ b/lib/sse_t1/crc32_refl_by8_sse.asm @@ -39,6 +39,7 @@ %include "include/memcpy.asm" %include "include/reg_sizes.asm" %include "include/crc32_refl.inc" +%include "include/clear_regs.asm" %ifndef CRC32_REFL_FN %define CRC32_REFL_FN crc32_refl_by8_sse @@ -308,6 +309,9 @@ CRC32_REFL_FN: pextrd eax, xmm7, 2 .cleanup: +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif not eax ret diff --git a/lib/sse/crc32_sctp_sse.asm b/lib/sse_t1/crc32_sctp_sse.asm similarity index 100% rename from lib/sse/crc32_sctp_sse.asm rename to lib/sse_t1/crc32_sctp_sse.asm diff --git a/lib/sse/crc32_wimax_sse.asm b/lib/sse_t1/crc32_wimax_sse.asm similarity index 100% rename from lib/sse/crc32_wimax_sse.asm rename to lib/sse_t1/crc32_wimax_sse.asm diff --git a/lib/sse/ethernet_fcs_sse.asm b/lib/sse_t1/ethernet_fcs_sse.asm similarity index 100% rename from lib/sse/ethernet_fcs_sse.asm rename to lib/sse_t1/ethernet_fcs_sse.asm diff --git a/lib/sse/gcm128_api_by8_sse.asm b/lib/sse_t1/gcm128_api_by8_sse.asm similarity index 98% rename from lib/sse/gcm128_api_by8_sse.asm rename to lib/sse_t1/gcm128_api_by8_sse.asm index a142f498dc31e3b1ee56d88ff9d8ff1c9ba216ff..96e7a3409d5b9cdc3878c94e5d4ec6c517e588f8 100644 --- a/lib/sse/gcm128_api_by8_sse.asm +++ b/lib/sse_t1/gcm128_api_by8_sse.asm @@ -27,4 +27,4 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "sse/gcm_api_sse.inc" +%include "sse_t1/gcm_api_sse.inc" diff --git a/lib/sse/gcm128_gmac_api_by8_sse.asm b/lib/sse_t1/gcm128_gmac_api_by8_sse.asm similarity index 97% rename from lib/sse/gcm128_gmac_api_by8_sse.asm rename to lib/sse_t1/gcm128_gmac_api_by8_sse.asm index c8e1c9b34805007b89e612726eeb9392f8213094..e222cf4e297b756ec0c304f10eb101706d2dc246 100644 --- a/lib/sse/gcm128_gmac_api_by8_sse.asm +++ b/lib/sse_t1/gcm128_gmac_api_by8_sse.asm @@ -27,4 +27,4 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "sse/gcm_gmac_api_sse.inc" +%include "sse_t1/gcm_gmac_api_sse.inc" diff --git a/lib/sse/gcm128_sgl_api_by8_sse.asm b/lib/sse_t1/gcm128_sgl_api_by8_sse.asm similarity index 97% rename from lib/sse/gcm128_sgl_api_by8_sse.asm rename to lib/sse_t1/gcm128_sgl_api_by8_sse.asm index 2b98581025e54bb9e363729aafb8e498480c4788..f6df66f7557b35bea64f33ff88f7a180f254aa9c 100644 --- a/lib/sse/gcm128_sgl_api_by8_sse.asm +++ b/lib/sse_t1/gcm128_sgl_api_by8_sse.asm @@ -27,4 +27,4 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM128_MODE 1 -%include "sse/gcm_sgl_api_sse.inc" +%include "sse_t1/gcm_sgl_api_sse.inc" diff --git a/lib/sse/gcm192_api_by8_sse.asm b/lib/sse_t1/gcm192_api_by8_sse.asm similarity index 98% rename from lib/sse/gcm192_api_by8_sse.asm rename to lib/sse_t1/gcm192_api_by8_sse.asm index 5396935e4db33a51bc1f1a2bc4c636887efe6664..13cf48d4332040c4dc36edaed449a9b269b80f69 100644 --- a/lib/sse/gcm192_api_by8_sse.asm +++ b/lib/sse_t1/gcm192_api_by8_sse.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "sse/gcm_api_sse.inc" +%include "sse_t1/gcm_api_sse.inc" diff --git a/lib/sse/gcm192_gmac_api_by8_sse.asm b/lib/sse_t1/gcm192_gmac_api_by8_sse.asm similarity index 97% rename from lib/sse/gcm192_gmac_api_by8_sse.asm rename to lib/sse_t1/gcm192_gmac_api_by8_sse.asm index 57a218a9faaaf62a03b8bdf6601391d742e1d275..5c9967e0bbda3f81ed63f9d47e45998b4742f7e8 100644 --- a/lib/sse/gcm192_gmac_api_by8_sse.asm +++ b/lib/sse_t1/gcm192_gmac_api_by8_sse.asm @@ -27,4 +27,4 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "sse/gcm_gmac_api_sse.inc" +%include "sse_t1/gcm_gmac_api_sse.inc" diff --git a/lib/sse/gcm192_sgl_api_by8_sse.asm b/lib/sse_t1/gcm192_sgl_api_by8_sse.asm similarity index 97% rename from lib/sse/gcm192_sgl_api_by8_sse.asm rename to lib/sse_t1/gcm192_sgl_api_by8_sse.asm index 56959bfce6f93a2472663fcfdd95cb3b0b9c53f5..6b6a83572f0bfe07d574e18fcfbe99b719ff3852 100644 --- a/lib/sse/gcm192_sgl_api_by8_sse.asm +++ b/lib/sse_t1/gcm192_sgl_api_by8_sse.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM192_MODE 1 -%include "sse/gcm_sgl_api_sse.inc" +%include "sse_t1/gcm_sgl_api_sse.inc" diff --git a/lib/sse/gcm256_api_by8_sse.asm b/lib/sse_t1/gcm256_api_by8_sse.asm similarity index 98% rename from lib/sse/gcm256_api_by8_sse.asm rename to lib/sse_t1/gcm256_api_by8_sse.asm index d106086b7968d57f8e6f7cfddcf926d6d899037b..fdffe34b5e62e75dc66bd1aa23c44ee7458b7649 100644 --- a/lib/sse/gcm256_api_by8_sse.asm +++ b/lib/sse_t1/gcm256_api_by8_sse.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "sse/gcm_api_sse.inc" +%include "sse_t1/gcm_api_sse.inc" diff --git a/lib/sse/gcm256_gmac_api_by8_sse.asm b/lib/sse_t1/gcm256_gmac_api_by8_sse.asm similarity index 97% rename from lib/sse/gcm256_gmac_api_by8_sse.asm rename to lib/sse_t1/gcm256_gmac_api_by8_sse.asm index 5d151ec59799e885641ec3c35249498304db783d..5f8dc21e8bc8045ee71fddbfbe425a62ef849377 100644 --- a/lib/sse/gcm256_gmac_api_by8_sse.asm +++ b/lib/sse_t1/gcm256_gmac_api_by8_sse.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "sse/gcm_gmac_api_sse.inc" +%include "sse_t1/gcm_gmac_api_sse.inc" diff --git a/lib/sse/gcm256_sgl_api_by8_sse.asm b/lib/sse_t1/gcm256_sgl_api_by8_sse.asm similarity index 97% rename from lib/sse/gcm256_sgl_api_by8_sse.asm rename to lib/sse_t1/gcm256_sgl_api_by8_sse.asm index f0a22d3eda693ddb38ecdba1f6f4cc539d1eee5f..304b3cb1f70a6fb3e197edc0c2df1632b20c367d 100644 --- a/lib/sse/gcm256_sgl_api_by8_sse.asm +++ b/lib/sse_t1/gcm256_sgl_api_by8_sse.asm @@ -28,4 +28,4 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; %define GCM256_MODE 1 -%include "sse/gcm_sgl_api_sse.inc" +%include "sse_t1/gcm_sgl_api_sse.inc" diff --git a/lib/sse/gcm_api_sse.inc b/lib/sse_t1/gcm_api_sse.inc similarity index 99% rename from lib/sse/gcm_api_sse.inc rename to lib/sse_t1/gcm_api_sse.inc index d5f6e980396286508a624166079d392c53f43e8b..774286eb9cbcac88c82d16a705e43f350412523a 100644 --- a/lib/sse/gcm_api_sse.inc +++ b/lib/sse_t1/gcm_api_sse.inc @@ -29,7 +29,7 @@ ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ; -%include "sse/gcm_sse.inc" +%include "sse_t1/gcm_sse.inc" %ifndef GCM_API_SSE_INC %define GCM_API_SSE_INC diff --git a/lib/sse/gcm_gmac_api_sse.inc b/lib/sse_t1/gcm_gmac_api_sse.inc similarity index 99% rename from lib/sse/gcm_gmac_api_sse.inc rename to lib/sse_t1/gcm_gmac_api_sse.inc index 43bc863726b154dbe0645bf044dd15075e849544..da90159d2724c63b8848ff01fc5b379e13333ac1 100644 --- a/lib/sse/gcm_gmac_api_sse.inc +++ b/lib/sse_t1/gcm_gmac_api_sse.inc @@ -27,7 +27,7 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "sse/gcm_sse.inc" +%include "sse_t1/gcm_sse.inc" %include "include/gcm_common.inc" %ifndef GCM_GMAC_API_SSE_INC diff --git a/lib/sse/gcm_sgl_api_sse.inc b/lib/sse_t1/gcm_sgl_api_sse.inc similarity index 99% rename from lib/sse/gcm_sgl_api_sse.inc rename to lib/sse_t1/gcm_sgl_api_sse.inc index f3bedb585226d698b5736f7f3711be3e1461037f..82f7b2b794e8f6dbf0cfc9f7bcd199b9f5746a78 100644 --- a/lib/sse/gcm_sgl_api_sse.inc +++ b/lib/sse_t1/gcm_sgl_api_sse.inc @@ -27,7 +27,7 @@ ; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -%include "sse/gcm_sse.inc" +%include "sse_t1/gcm_sse.inc" %ifndef GCM_SGL_API_SSE_INC %define GCM_SGL_API_SSE_INC diff --git a/lib/sse/gcm_sse.inc b/lib/sse_t1/gcm_sse.inc similarity index 99% rename from lib/sse/gcm_sse.inc rename to lib/sse_t1/gcm_sse.inc index 8e671328d4e37cdce0ee013ba8ebecd7060c24b9..042ef0737143c0e73c8df829fef12f1c6adfbe0a 100644 --- a/lib/sse/gcm_sse.inc +++ b/lib/sse_t1/gcm_sse.inc @@ -565,7 +565,7 @@ %ifidn %%ENC_DEC, DEC movdqa xmm3, xmm1 - pxor xmm9, xmm1 ; Cyphertext XOR E(K, Yn) + pxor xmm9, xmm1 ; Ciphertext XOR E(K, Yn) mov r15, %%PLAIN_CYPH_LEN add r15, r13 diff --git a/lib/sse/kasumi_sse.c b/lib/sse_t1/kasumi_sse.c similarity index 100% rename from lib/sse/kasumi_sse.c rename to lib/sse_t1/kasumi_sse.c diff --git a/lib/sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm similarity index 98% rename from lib/sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm index 4c7a514672e1feb98efb33847224198cc3ff7483..35362ca7012979d82bceead67e300073f499d469 100644 --- a/lib/sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm @@ -30,7 +30,6 @@ %include "include/mb_mgr_datastruct.asm" %include "include/reg_sizes.asm" -%include "include/cet.inc" %ifndef NUM_LANES %define NUM_LANES 4 @@ -120,8 +119,6 @@ endstruc ; arg 2 : job MKGLOBAL(FLUSH_JOB_AES_ENC,function,internal) FLUSH_JOB_AES_ENC: - endbranch64 - mov rax, rsp sub rsp, STACK_size and rsp, -16 @@ -226,7 +223,6 @@ APPEND(skip_clear_,I): %endif return: - endbranch64 mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] diff --git a/lib/sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm b/lib/sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm similarity index 98% rename from lib/sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm index d692dedec2348b9b483a1d435bd27799e18086a7..0c67da092541157ff8889b224d734b315749c409 100644 --- a/lib/sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm @@ -31,7 +31,6 @@ %include "include/reg_sizes.asm" %include "include/const.inc" -%include "include/cet.inc" %ifndef NUM_LANES %define NUM_LANES 4 %endif @@ -91,7 +90,6 @@ mksection .text ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_ENC,function,internal) SUBMIT_JOB_AES_ENC: - endbranch64 mov rax, rsp sub rsp, STACK_size and rsp, -16 @@ -176,7 +174,6 @@ len_is_0: %endif return: - endbranch64 mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] diff --git a/lib/sse/mb_mgr_aes128_cbcs_1_9_flush_sse.asm b/lib/sse_t1/mb_mgr_aes128_cbcs_1_9_flush_sse.asm similarity index 99% rename from lib/sse/mb_mgr_aes128_cbcs_1_9_flush_sse.asm rename to lib/sse_t1/mb_mgr_aes128_cbcs_1_9_flush_sse.asm index 60b96f3973a9cbb6f8fac557d231effa40baf1b6..54ec68b45c8bb878904f36005e00cb2799f4ebed 100644 --- a/lib/sse/mb_mgr_aes128_cbcs_1_9_flush_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_cbcs_1_9_flush_sse.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %define NUM_LANES 4 diff --git a/lib/sse/mb_mgr_aes128_cbcs_1_9_submit_sse.asm b/lib/sse_t1/mb_mgr_aes128_cbcs_1_9_submit_sse.asm similarity index 99% rename from lib/sse/mb_mgr_aes128_cbcs_1_9_submit_sse.asm rename to lib/sse_t1/mb_mgr_aes128_cbcs_1_9_submit_sse.asm index 79bc51c64d6e00c6808961ba3bf405026eb894da..396fb20222fbb6293d4b02d0fc7d5aca438ce171 100644 --- a/lib/sse/mb_mgr_aes128_cbcs_1_9_submit_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_cbcs_1_9_submit_sse.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" diff --git a/lib/sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm similarity index 95% rename from lib/sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm index 024e045a3f7dfd5be0ae490288d0ca71c2436405..7e12dedd6c903db6691f1fc6c11fd5c37b18ec85 100644 --- a/lib/sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/const.inc" %include "include/memcpy.asm" @@ -400,6 +399,23 @@ APPEND(skip_,I): movdqa [state + _aes_ccm_lens], ccm_lens ;; Find min length phminposuw min_len_idx, ccm_lens + jmp %%_ccm_round + +%%_ccm_round_flush: + ;; This is identical to the above block but optimized for + ;; a repeat flush operation when keys etc. are already set + ;; - vphminposuw was already executed + ;; - good_lane is already known + ;; - copy input pointer from good lane to empty lanes + mov tmp2, [state + _aes_ccm_args_in + good_lane*8] +%assign I 0 +%rep NUM_LANES + cmp qword [state + _aes_ccm_job_in_lane + I*8], 0 + jne APPEND(skip2_,I) + mov [state + _aes_ccm_args_in + I*8], tmp2 +APPEND(skip2_,I): +%assign I (I+1) +%endrep %endif ; end FLUSH @@ -543,12 +559,17 @@ APPEND(skip_clear_,I): ; Reset NULL lane lens to UINT16_MAX %ifidn %%SUBMIT_FLUSH, FLUSH SET_NULL_JOB_LENS_TO_MAX ccm_lens, xtmp0, xtmp1, xtmp2, xtmp3 + mov good_lane, min_idx %endif XPINSRW ccm_lens, xtmp0, tmp2, min_idx, tmp, scale_x16 phminposuw min_len_idx, ccm_lens movdqa [state + _aes_ccm_lens], ccm_lens +%ifidn %%SUBMIT_FLUSH, FLUSH + jmp %%_ccm_round_flush +%else jmp %%_ccm_round +%endif %%_prepare_partial_block_to_auth: ; Check if partial block needs to be hashed @@ -579,7 +600,12 @@ APPEND(skip_clear_,I): movdqa [init_block_addr], xtmp0 mov [state + _aes_ccm_args_in + min_idx * 8], init_block_addr +%ifidn %%SUBMIT_FLUSH, FLUSH + mov good_lane, min_idx + jmp %%_ccm_round_flush +%else jmp %%_ccm_round +%endif %endmacro align 64 @@ -588,14 +614,12 @@ align 64 ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_CCM_AUTH,function,internal) SUBMIT_JOB_AES_CCM_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CCM_AUTH_SSE SUBMIT ; IMB_JOB * flush_job_aes_ccm_auth_sse(MB_MGR_CCM_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_AES_CCM_AUTH,function,internal) FLUSH_JOB_AES_CCM_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CCM_AUTH_SSE FLUSH mksection stack-noexec diff --git a/lib/sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm similarity index 95% rename from lib/sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm index 2b597f07564568a09c9b21c88e2a4a9c123fe37d..be832f5b29eb254ff980a00d60fa7b8802a1b9c9 100644 --- a/lib/sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm @@ -32,7 +32,6 @@ %include "include/reg_sizes.asm" %include "include/memcpy.asm" %include "include/const.inc" -%include "include/cet.inc" ;%define DO_DBGPRINT %include "include/dbgprint.asm" @@ -240,7 +239,7 @@ endstruc shl tmp3, 4 add tmp, tmp3 - memcpy_sse_16 m_last, tmp, r, tmp4, iv + memcpy_sse_16 m_last, tmp, r, tmp4, tmp3 ;; src + n + r mov tmp3, [job + _skey2] @@ -302,6 +301,22 @@ APPEND(skip_,I): %endrep ;; Find min length phminposuw xmm1, xmm0 + jmp %%_cmac_round + +%%_cmac_round_flush: + ;; - good lane already known + ;; - copy good_lane input pointer to empty lanes + ;; - lens updated and vphminposuw executed + mov tmp2, [state + _aes_cmac_args_in + good_lane*8] + xor tmp3, tmp3 +%assign I 0 +%rep NUM_LANES + cmp qword [state + _aes_cmac_job_in_lane + I*8], tmp3 + jne APPEND(skip2_,I) + mov [state + _aes_cmac_args_in + I*8], tmp2 +APPEND(skip2_,I): +%assign I (I+1) +%endrep %endif ; end FLUSH @@ -370,7 +385,12 @@ APPEND(skip_,I): lea m_last, [state + _aes_cmac_scratch + tmp3] mov [state + _aes_cmac_args_in + idx*8], m_last +%ifidn %%SUBMIT_FLUSH, SUBMIT jmp %%_cmac_round +%else + mov good_lane, idx + jmp %%_cmac_round_flush +%endif %%_copy_complete_digest: ; Job complete, copy digest to AT output @@ -552,14 +572,12 @@ align 64 ; arg 2 : job MKGLOBAL(SUBMIT_JOB_AES_CMAC_AUTH,function,internal) SUBMIT_JOB_AES_CMAC_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CMAC_SSE SUBMIT ; IMB_JOB * flush_job_aes_cmac_auth_sse(MB_MGR_CMAC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_AES_CMAC_AUTH,function,internal) FLUSH_JOB_AES_CMAC_AUTH: - endbranch64 GENERIC_SUBMIT_FLUSH_JOB_AES_CMAC_SSE FLUSH mksection stack-noexec diff --git a/lib/sse/mb_mgr_aes128_xcbc_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes128_xcbc_flush_x4_sse.asm similarity index 99% rename from lib/sse/mb_mgr_aes128_xcbc_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes128_xcbc_flush_x4_sse.asm index 123593832f30e22aee54eb37ef91105cf0bfae9f..8d765e9980f3eea0ca492b39210151efaf63aeb3 100644 --- a/lib/sse/mb_mgr_aes128_xcbc_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_xcbc_flush_x4_sse.asm @@ -28,7 +28,6 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %ifndef AES_XCBC_X4 diff --git a/lib/sse/mb_mgr_aes128_xcbc_submit_x4_sse.asm b/lib/sse_t1/mb_mgr_aes128_xcbc_submit_x4_sse.asm similarity index 99% rename from lib/sse/mb_mgr_aes128_xcbc_submit_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes128_xcbc_submit_x4_sse.asm index c6409d06c9fef5ef5fcb5233ed9b1fc75b1e84fc..73afe3e525a7675aff6050dbe708a11b9f4fb61b 100644 --- a/lib/sse/mb_mgr_aes128_xcbc_submit_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes128_xcbc_submit_x4_sse.asm @@ -29,7 +29,6 @@ %include "include/const.inc" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/reg_sizes.asm" %include "include/memcpy.asm" %ifndef AES_XCBC_X4 diff --git a/lib/sse/mb_mgr_aes192_cbc_enc_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes192_cbc_enc_flush_x4_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes192_cbc_enc_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes192_cbc_enc_flush_x4_sse.asm index 6f0d70427eab85362970490e4af0caab76747e80..b0e89607fe4e77825f3af36d6a8178edb5d28852 100644 --- a/lib/sse/mb_mgr_aes192_cbc_enc_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes192_cbc_enc_flush_x4_sse.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_192_x4 %define FLUSH_JOB_AES_ENC flush_job_aes192_enc_sse -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes192_cbc_enc_submit_x4_sse.asm b/lib/sse_t1/mb_mgr_aes192_cbc_enc_submit_x4_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes192_cbc_enc_submit_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes192_cbc_enc_submit_x4_sse.asm index 28770f84c3eabb0735b980107589aa01a11e9368..4ce3d952d4a50daeeecea6737749d18265aaa6a6 100644 --- a/lib/sse/mb_mgr_aes192_cbc_enc_submit_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes192_cbc_enc_submit_x4_sse.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_192_x4 %define SUBMIT_JOB_AES_ENC submit_job_aes192_enc_sse -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_cbc_enc_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes256_cbc_enc_flush_x4_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_cbc_enc_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes256_cbc_enc_flush_x4_sse.asm index 777b796e4a67dcff72c1be2761aed3a5f80561c7..5c82dd21afc338b08483107d7ccc5f0b972947cd 100644 --- a/lib/sse/mb_mgr_aes256_cbc_enc_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes256_cbc_enc_flush_x4_sse.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_256_x4 %define FLUSH_JOB_AES_ENC flush_job_aes256_enc_sse -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_cbc_enc_submit_x4_sse.asm b/lib/sse_t1/mb_mgr_aes256_cbc_enc_submit_x4_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_cbc_enc_submit_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes256_cbc_enc_submit_x4_sse.asm index d8e0138fe1720aa54ff2915bf758cfd443d34418..3736b445053889c4bdf78514d0c0ad0041730ac9 100644 --- a/lib/sse/mb_mgr_aes256_cbc_enc_submit_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes256_cbc_enc_submit_x4_sse.asm @@ -27,4 +27,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_256_x4 %define SUBMIT_JOB_AES_ENC submit_job_aes256_enc_sse -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_ccm_auth_submit_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes256_ccm_auth_submit_flush_x4_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_ccm_auth_submit_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes256_ccm_auth_submit_flush_x4_sse.asm index 113b1a8bb0c897438b3ccd2cfe83aff86dd3ef4f..6dce9dfa954312f2fa2cafc58ecf04664086b3ac 100644 --- a/lib/sse/mb_mgr_aes256_ccm_auth_submit_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes256_ccm_auth_submit_flush_x4_sse.asm @@ -32,4 +32,4 @@ %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes256_ccm_auth_sse %endif -%include "sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_cmac_submit_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_aes256_cmac_submit_flush_x4_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_cmac_submit_flush_x4_sse.asm rename to lib/sse_t1/mb_mgr_aes256_cmac_submit_flush_x4_sse.asm index e54c3a53d21eff4309e0a4c7a7e8d86d595c9582..c7c33d976bd7ebcf82db44dc71a1110bbfddc548 100644 --- a/lib/sse/mb_mgr_aes256_cmac_submit_flush_x4_sse.asm +++ b/lib/sse_t1/mb_mgr_aes256_cmac_submit_flush_x4_sse.asm @@ -30,4 +30,4 @@ %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes256_cmac_auth_sse %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes256_cmac_auth_sse -%include "sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_md5_flush_sse.asm b/lib/sse_t1/mb_mgr_hmac_md5_flush_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_md5_flush_sse.asm rename to lib/sse_t1/mb_mgr_hmac_md5_flush_sse.asm diff --git a/lib/sse/mb_mgr_hmac_md5_submit_sse.asm b/lib/sse_t1/mb_mgr_hmac_md5_submit_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_md5_submit_sse.asm rename to lib/sse_t1/mb_mgr_hmac_md5_submit_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha1_flush_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha1_flush_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha1_flush_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha1_flush_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha1_submit_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha1_submit_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha1_submit_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha1_submit_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha224_flush_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha224_flush_sse.asm similarity index 96% rename from lib/sse/mb_mgr_hmac_sha224_flush_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha224_flush_sse.asm index 69588c78b8d3aed1633a016220b0023f70ffb49a..bda204599baef127e0e6d4e8477e1c63f3f72081 100644 --- a/lib/sse/mb_mgr_hmac_sha224_flush_sse.asm +++ b/lib/sse_t1/mb_mgr_hmac_sha224_flush_sse.asm @@ -28,4 +28,4 @@ %define FUNC flush_job_hmac_sha_224_sse %define SHA224 -%include "sse/mb_mgr_hmac_sha256_flush_sse.asm" +%include "sse_t1/mb_mgr_hmac_sha256_flush_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_sha224_submit_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha224_submit_sse.asm similarity index 96% rename from lib/sse/mb_mgr_hmac_sha224_submit_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha224_submit_sse.asm index 09f423ce6945757af42519657e80864ccd325444..bddf843ea43074ea99e0e34f1afe72def80efd60 100644 --- a/lib/sse/mb_mgr_hmac_sha224_submit_sse.asm +++ b/lib/sse_t1/mb_mgr_hmac_sha224_submit_sse.asm @@ -28,4 +28,4 @@ %define FUNC submit_job_hmac_sha_224_sse %define SHA224 -%include "sse/mb_mgr_hmac_sha256_submit_sse.asm" +%include "sse_t1/mb_mgr_hmac_sha256_submit_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_sha256_flush_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha256_flush_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha256_flush_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha256_flush_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha256_submit_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha256_submit_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha256_submit_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha256_submit_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha384_flush_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha384_flush_sse.asm similarity index 97% rename from lib/sse/mb_mgr_hmac_sha384_flush_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha384_flush_sse.asm index 435cf5018e91f45be52402d4461ca0ee5c0126f5..8e95926a933a56a5b8cb8d21a8a716bb07399c88 100644 --- a/lib/sse/mb_mgr_hmac_sha384_flush_sse.asm +++ b/lib/sse_t1/mb_mgr_hmac_sha384_flush_sse.asm @@ -28,4 +28,4 @@ %define FUNC flush_job_hmac_sha_384_sse %define SHA_X_DIGEST_SIZE 384 -%include "sse/mb_mgr_hmac_sha512_flush_sse.asm" +%include "sse_t1/mb_mgr_hmac_sha512_flush_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_sha384_submit_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha384_submit_sse.asm similarity index 96% rename from lib/sse/mb_mgr_hmac_sha384_submit_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha384_submit_sse.asm index ec2085f1dbea2ae2af531686f9d177c306200dd5..3734957189bae23edaf69307a43f6d0395e3b9be 100644 --- a/lib/sse/mb_mgr_hmac_sha384_submit_sse.asm +++ b/lib/sse_t1/mb_mgr_hmac_sha384_submit_sse.asm @@ -28,4 +28,4 @@ %define FUNC submit_job_hmac_sha_384_sse %define SHA_X_DIGEST_SIZE 384 -%include "sse/mb_mgr_hmac_sha512_submit_sse.asm" +%include "sse_t1/mb_mgr_hmac_sha512_submit_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_sha512_flush_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha512_flush_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha512_flush_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha512_flush_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha512_submit_sse.asm b/lib/sse_t1/mb_mgr_hmac_sha512_submit_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha512_submit_sse.asm rename to lib/sse_t1/mb_mgr_hmac_sha512_submit_sse.asm diff --git a/lib/sse_t1/mb_mgr_snow3g_uea2_submit_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_snow3g_uea2_submit_flush_x4_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..d06c403ff3765a96fd5f3c5192178aac4419a8a4 --- /dev/null +++ b/lib/sse_t1/mb_mgr_snow3g_uea2_submit_flush_x4_sse.asm @@ -0,0 +1,427 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%include "include/os.asm" +%include "include/imb_job.asm" +%include "include/mb_mgr_datastruct.asm" +%include "include/reg_sizes.asm" +%include "sse_t1/snow3g_uea2_by4_sse.asm" + +%define SUBMIT_JOB_SNOW3G_UEA2 submit_job_snow3g_uea2_sse +%define FLUSH_JOB_SNOW3G_UEA2 flush_job_snow3g_uea2_sse + +mksection .rodata +default rel + +align 64 +last_3_bytes: +dd 0x00000003, 0x00000003, 0x00000003, 0x00000003 +align 64 +zero_xmm: +dd 0x00000000, 0x00000000, 0x00000000, 0x00000000 + +mksection .text +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define tmp_gp1 rcx +%define tmp_gp2 rdx +%else +%define arg1 rcx +%define arg2 rdx +%define tmp_gp1 rdi +%define tmp_gp2 rsi +%endif + +%define tmp_gp3 rbx +%define tmp_gp4 rbp +%define tmp_gp5 r9 +%define tmp_gp6 r10 +%define tmp_gp7 r11 +%define tmp_gp8 r12 + +%define state arg1 +%define job arg2 + +%define job_rax rax + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Get lane nr from ptr to the list of unused lanes. +;; Remove returned lane nr from the list +;; Increase lanes in use. +;; Put job ptr in appropriate lane field in state (arg %3) +;; Assumptions: +;; In (arg %1) single lane nr takes 4 bits and 1st free lane nr is lowest 4 bits +;; Job ptr in (arg %3) takes 8 bytes +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro GET_UNUSED_LANE_SSE 6 +%define %%LANE_LIST %1 ;; [in] ptr to unused lane list +%define %%LANES_IN_USE %2 ;; [in] ptr to lanes in use count +%define %%JOB_LANES %3 ;; [in] ptr to list of jobs +%define %%JOB %4 ;; [in] ptr to job structure +%define %%LANE_NR %5 ;; [out] GP register to fill with unused lane nr +%define %%UNUSED_LANES %6 ;; [clobbered] GP register + + mov DWORD(%%UNUSED_LANES), dword [%%LANE_LIST] + mov %%LANE_NR, %%UNUSED_LANES + and %%LANE_NR, 0x3 + ;; remove picked lane nr from list of unused lanes + shr %%UNUSED_LANES, 4 + mov dword [%%LANE_LIST], DWORD(%%UNUSED_LANES) + + add word [%%LANES_IN_USE], 1 + mov [%%JOB_LANES + %%LANE_NR*8], %%JOB +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Find minimum value in table of 4 dwords +;; Outputs (%4) min value and (%5) position of that value in the table +;; Additionally (%2) contains list of lane lengths extracted from (%1) +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro GET_MIN_LENGTH_X4_SSE 4 +%define %%LANE_LENS_LIST_PTR %1 ;; [in] ptr to list of lane lengths +%define %%TEMP_64 %2 ;; [clobbered] tmp 64bit register +%define %%LENGTH %3 ;; [out] gp register to put min length in +%define %%INDEX %4 ;; [out] gp register to put index in + + mov DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*0] + xor %%INDEX, %%INDEX + mov %%TEMP_64, 1 + + cmp DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*1] + cmova DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*1] + cmova %%INDEX, %%TEMP_64 + inc %%TEMP_64 + + cmp DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*2] + cmova DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*2] + cmova %%INDEX, %%TEMP_64 + inc %%TEMP_64 + + cmp DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*3] + cmova DWORD(%%LENGTH), [%%LANE_LENS_LIST_PTR + 4*3] + cmova %%INDEX, %%TEMP_64 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Depending on %1: +;; submit: Submit single SNOW3G request to be later processed, setup masks and +;; initial FSM/LFSR state. After that, if there is full 4 requests +;; submitted proceed with flush operation. +;; flush: Do SNOW3G encrypt/decrypt processing for 4 buffers until one of them +;; is fully processed. Return job pointer corresponding to finished +;; request. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SUBMIT_FLUSH_JOB_SNOW3G_UEA2_SSE 25 +%define %%SUBMIT_FLUSH %1 ;; [in] submit/flush selector +%define %%UNUSED_LANES %2 ;; [clobbered] GP register +%define %%LANE %3 ;; [clobbered] GP register +%define %%TGP0 %4 ;; [clobbered] GP register +%define %%TGP1 %5 ;; [clobbered] GP register +%define %%TGP2 %6 ;; [clobbered] GP register +%define %%TGP3 %7 ;; [clobbered] GP register +%define %%TGP4 %8 ;; [clobbered] GP register +%define %%TGP5 %9 ;; [clobbered] GP register +%define %%TMP_XMM_0 %10 ;; [clobbered] xmm register +%define %%TMP_XMM_1 %11 ;; [clobbered] xmm register +%define %%TMP_XMM_2 %12 ;; [clobbered] xmm register +%define %%TMP_XMM_3 %13 ;; [clobbered] xmm register +%define %%TMP_XMM_4 %14 ;; [clobbered] xmm register +%define %%TMP_XMM_5 %15 ;; [clobbered] xmm register +%define %%TMP_XMM_6 %16 ;; [clobbered] xmm register +%define %%TMP_XMM_7 %17 ;; [clobbered] xmm register +%define %%TMP_XMM_8 %18 ;; [clobbered] xmm register +%define %%TMP_XMM_9 %19 ;; [clobbered] xmm register +%define %%TMP_XMM_10 %20 ;; [clobbered] xmm register +%define %%TMP_XMM_11 %21 ;; [clobbered] xmm register +%define %%TMP_XMM_12 %22 ;; [clobbered] xmm register +%define %%TMP_XMM_13 %23 ;; [clobbered] xmm register +%define %%TMP_XMM_14 %24 ;; [clobbered] xmm register +%define %%TMP_XMM_15 %25 ;; [clobbered] xmm register + + SNOW3G_FUNC_START + xor job_rax, job_rax ;; assume NULL return job + +%ifidn %%SUBMIT_FLUSH, submit + GET_UNUSED_LANE_SSE state + _snow3g_unused_lanes, \ + state + _snow3g_lanes_in_use, \ + state + _snow3g_job_in_lane, \ + job, %%LANE, %%UNUSED_LANES + + mov %%TGP1, [job + _enc_keys] + mov %%TGP2, [job + _iv] + + ;; --------------------------------------------------------------------- + ;; Initialize LFSR and FSM registers + ;; [LD_ST_MASK + 4*%%LANE] = 0 + ;; [LD_ST_MASK + 4*4+ 4*%%LANE] = 0 + ;; LD_ST_MASK field from state is later used to determine if any data + ;; should be read from src and written to dst. + ;; When mask is set to 0 so no reads/writes occur. + SNOW3G_INIT_LANE_SSE state, %%LANE, %%TGP1, %%TGP2, %%TMP_XMM_0, \ + %%TMP_XMM_1, %%TMP_XMM_2 + + ;; 32 iterations in Init mode are required + ;; details of _snow3g_lens dw fields are in FLUSH section + mov dword [state + _snow3g_lens + 32 + 4*%%LANE], 32 + mov dword [state + _snow3g_lens + 4*%%LANE], 4 + + mov %%TGP0, [job + _msg_len_to_cipher_in_bits] + shr %%TGP0, 3 + + mov dword [state + _snow3g_args_byte_length + %%LANE*4], DWORD(%%TGP0) + + mov %%TGP0, [job + _cipher_start_offset_in_bits] + shr %%TGP0, 3 ;; convert from bits to bytes (src & dst) + mov %%TGP1, [job + _dst] + add %%TGP1, %%TGP0 + add %%TGP0, [job + _src] + + mov [state + _snow3g_args_in + %%LANE*8], %%TGP0 + mov [state + _snow3g_args_out + %%LANE*8], %%TGP1 + + cmp word [state + _snow3g_lanes_in_use], 4 + jne %%return_uea2 + ;; if all lanes are busy fall through to %%process_job_uea2 + +%else ;; FLUSH + ;; --------------------------------------------------------------------- + ;; All lanes are busy or flush is called - process used lanes until + ;; one job is done. + ;; --------------------------------------------------------------------- + ;; Each of the lanes can be in any stage: INIT1, INIT2, KEYGEN, FINISHED + ;; and they can be processed in parallel by the algorithmic code. + ;; START -> INIT1 -> INIT2 -> KEYGEN -> COMPLETE + ;; --------------------------------------------------------------------- + ;; State of the job is identified with: + ;; _snow3g_args_LD_ST_MASK + ;; *dwords 4:7 - determines if INIT1 phase is done + ;; *dwords 0:3 - determines if lane is KEYGEN state + ;; -> yes: all bits set in dw per given lane + ;; -> no : set to 0 + ;; _snow3g_args_byte_length + ;; message lengths to be processed (bytes). Decreased appropriately + ;; if particular lane is in KEYGEN phase + ;; _snow3g_lens: + ;; *dwords 0:3 - indicating final 0-4 bytes to be outputted for the + ;; lane per SNOW3G_INIT_LANE_SSE macro call + ;; *dword 4 - common minimum length in double words (rounded up) + ;; *dwords 5:7 - unused + ;; *dwords 8:11 - length in dwords to be processed per lane in + ;; given processing phase(rounded up) + ;; --------------------------------------------------------------------- + cmp word [state + _snow3g_lanes_in_use], 0 + je %%return_uea2 +%endif + +%%_find_min: +%define ROUNDED_DW_LENS _snow3g_lens+32 +%define KEYGEN_STAGE _snow3g_args_LD_ST_MASK +%define INIT1_DONE _snow3g_args_LD_ST_MASK+16 + ;; Find minimum length. If lane is empty min length is set to 0xffffffff + GET_MIN_LENGTH_X4_SSE state + ROUNDED_DW_LENS, %%TGP1, %%TGP0, %%LANE + or %%TGP0, %%TGP0 + jz %%_len_is_0 + + ;; fill %%TMP_XMM_0 with common length values per initialized length + ;; to be subtracted from remaining byte lengths and rounded dw lengths + movd %%TMP_XMM_0, DWORD(%%TGP0) + pshufd %%TMP_XMM_0, %%TMP_XMM_0, 0 + + ;; Create mask with lanes in use + pxor %%TMP_XMM_2, %%TMP_XMM_2 + pxor %%TMP_XMM_3, %%TMP_XMM_3 + pcmpeqq %%TMP_XMM_2, [state + _snow3g_job_in_lane] + pcmpeqq %%TMP_XMM_3, [state + _snow3g_job_in_lane + 16] + pshufd %%TMP_XMM_2, %%TMP_XMM_2, 0x88 ;; lane order: 1,0,1,0 + pshufd %%TMP_XMM_3, %%TMP_XMM_3, 0x88 ;; lane order: 3,2,3,2 + pblendw %%TMP_XMM_2, %%TMP_XMM_3, 0xf0 + pandn %%TMP_XMM_2, %%TMP_XMM_0 + + ;; Decrease rouded dw lengths remaining for processing + movdqa %%TMP_XMM_5, [state + ROUNDED_DW_LENS] + psubd %%TMP_XMM_5, %%TMP_XMM_2 + movdqa [state + ROUNDED_DW_LENS], %%TMP_XMM_5 + + ;; Set all bits in dws where rounded dw length is bigger than original + ;; byte lengths and lane is initialized + pslld %%TMP_XMM_0, 2 ;; common length in bytes + pand %%TMP_XMM_0, [state + KEYGEN_STAGE] + movdqa %%TMP_XMM_1, %%TMP_XMM_0 + pcmpgtd %%TMP_XMM_1, [state + _snow3g_args_byte_length] + movdqa %%TMP_XMM_2, %%TMP_XMM_1 + pand %%TMP_XMM_1, [state + _snow3g_args_byte_length] + + pxor %%TMP_XMM_2, [rel all_fs] + pand %%TMP_XMM_0, %%TMP_XMM_2 + por %%TMP_XMM_0, %%TMP_XMM_1 + + ;; Write outstanding bytes to _snow3g_lens dwords [0:3] and adjust + ;; _snow3g_args_byte_length so after common dw length subtraction + ;; it is set to 0 + pand %%TMP_XMM_1, [rel last_3_bytes] + pand %%TMP_XMM_2, [state+_snow3g_lens] + por %%TMP_XMM_1, %%TMP_XMM_2 + movdqa [state + _snow3g_lens], %%TMP_XMM_1 + + ;; Subtract Common dw length from all byte lengths + movdqa %%TMP_XMM_4, [state+_snow3g_args_byte_length] + psubd %%TMP_XMM_4, %%TMP_XMM_0 + movdqa [state+_snow3g_args_byte_length], %%TMP_XMM_4 + + ;; Do cipher / clock operation for all lanes and given common length + SNOW3G_ENC_DEC state, %%TGP0, %%TGP1, %%TGP2, %%TGP3, %%TGP4, %%TGP5, \ + %%TMP_XMM_0, %%TMP_XMM_1, %%TMP_XMM_2, %%TMP_XMM_3, \ + %%TMP_XMM_4, %%TMP_XMM_5, %%TMP_XMM_6, %%TMP_XMM_7, \ + %%TMP_XMM_8, %%TMP_XMM_9, %%TMP_XMM_10, %%TMP_XMM_11, \ + %%TMP_XMM_12, %%TMP_XMM_13, %%TMP_XMM_14, %%TMP_XMM_15 + + jmp %%_find_min + +%%_len_is_0: + ;; --------------------------------------------------------------------- + ;; 3 states are possible here for the lane with length 0: + ;; INIT1 done -> set DW length to 1 and update LD_ST_MASK + ;; INIT2 done -> set DW length to bytelength rounded up to dws and + ;; update LD_ST_MASK + ;; COMPLETED -> update length and return job + ;; check if the job is in one of INIT1 or INIT2 + ;; lane with len 0 index is %%LANE + ;; --------------------------------------------------------------------- + test dword [state + KEYGEN_STAGE + %%LANE*4], 0xffffffff + jne %%process_completed_job_submit_uea2 + + ;; check if INIT1 stage is done + test dword [state + INIT1_DONE + %%LANE*4], 0xffffffff + jne %%_init_done + + ;; mark INIT1 completed and set length to 1DW for INIT2 stage + mov dword [state + INIT1_DONE + %%LANE*4], 0xffffffff + mov dword [state + ROUNDED_DW_LENS + %%LANE*4], 1 + + jmp %%_find_min + +%%_init_done: + mov dword [state + KEYGEN_STAGE + %%LANE*4], 0xffffffff + + ;; length in double words = original length in bytes / 4 + ;; odd bytes are rounded up + mov DWORD(%%TGP0), [state + _snow3g_args_byte_length + %%LANE*4] + mov DWORD(%%TGP1), DWORD(%%TGP0) + shr %%TGP0, 2 + and %%TGP1, 3 + je %%_no_rounding_up + inc %%TGP0 +%%_no_rounding_up: + mov dword [state + ROUNDED_DW_LENS + %%LANE*4], DWORD (%%TGP0) + jmp %%_find_min + +%%process_completed_job_submit_uea2: + ;; COMPLETE: return job, change job dw length to UINT32_MAX, set masks + ;; to not initialized + mov dword [state + ROUNDED_DW_LENS + %%LANE*4], 0xffffffff + mov dword [state + KEYGEN_STAGE + %%LANE*4], 0 + mov dword [state + INIT1_DONE + %%LANE*4], 0 + + ;; decrement number of jobs in use + dec word [state + _snow3g_lanes_in_use] + + mov job_rax, [state + _snow3g_job_in_lane + %%LANE*8] + or qword [job_rax + _status], IMB_STATUS_COMPLETED_CIPHER + + mov %%UNUSED_LANES, [state + _snow3g_unused_lanes] + mov qword [state + _snow3g_job_in_lane + %%LANE*8], 0 + shl %%UNUSED_LANES, 4 + or %%UNUSED_LANES, %%LANE + mov [state + _snow3g_unused_lanes], %%UNUSED_LANES + +%ifdef SAFE_DATA + ;; clear finished job lane, %%LANE is an index of finished job + mov dword [state + _snow3g_args_LFSR_0 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_1 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_2 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_3 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_4 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_5 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_6 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_7 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_8 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_9 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_10 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_11 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_12 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_13 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_14 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_LFSR_15 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_FSM_1 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_FSM_2 + 4*%%LANE], 0 + mov dword [state + _snow3g_args_FSM_3 + 4*%%LANE], 0 + + ;; clear key stream stack frame + pxor %%TMP_XMM_0, %%TMP_XMM_0 + ;; _keystream clean is part of submit as well under return_uea2 label + movdqa [rsp + _keystream + 1 * 16], %%TMP_XMM_0 + movdqa [rsp + _keystream + 2 * 16], %%TMP_XMM_0 + movdqa [rsp + _keystream + 3 * 16], %%TMP_XMM_0 +%endif + +%%return_uea2: + +%ifdef SAFE_DATA + ;; clear temporarily stored swapped IV (done inside of submit) + pxor %%TMP_XMM_0, %%TMP_XMM_0 + movdqa [rsp + _keystream], %%TMP_XMM_0 +%endif + + SNOW3G_FUNC_END + +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; JOB* SUBMIT_JOB_SNOW3G_UEA2(MB_MGR_SNOW3G_OOO *state, IMB_JOB *job) +;; arg 1 : state +;; arg 2 : job +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +MKGLOBAL(SUBMIT_JOB_SNOW3G_UEA2,function,internal) +SUBMIT_JOB_SNOW3G_UEA2: + SUBMIT_FLUSH_JOB_SNOW3G_UEA2_SSE submit, tmp_gp1, tmp_gp2, tmp_gp3, \ + tmp_gp4, tmp_gp5, tmp_gp6, tmp_gp7, \ + tmp_gp8, xmm0, xmm1, xmm2, xmm3, xmm4, \ + xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, \ + xmm11, xmm12, xmm13, xmm14, xmm15 + + ret + +MKGLOBAL(FLUSH_JOB_SNOW3G_UEA2,function,internal) +FLUSH_JOB_SNOW3G_UEA2: + SUBMIT_FLUSH_JOB_SNOW3G_UEA2_SSE flush, tmp_gp1, tmp_gp2, tmp_gp3, tmp_gp4,\ + tmp_gp5, tmp_gp6, tmp_gp7, tmp_gp8, xmm0, \ + xmm1, xmm2, xmm3, xmm4, xmm5, xmm6, xmm7, \ + xmm8, xmm9, xmm10, xmm11, xmm12, xmm13, \ + xmm14, xmm15 + ret +mksection stack-noexec diff --git a/lib/sse_t1/mb_mgr_snow3g_uia2_submit_flush_x4_sse.asm b/lib/sse_t1/mb_mgr_snow3g_uia2_submit_flush_x4_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..4490a7485d9f4fcbc3628b269e9d290816334024 --- /dev/null +++ b/lib/sse_t1/mb_mgr_snow3g_uia2_submit_flush_x4_sse.asm @@ -0,0 +1,307 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%include "include/os.asm" +%include "include/imb_job.asm" +%include "include/mb_mgr_datastruct.asm" +%include "include/reg_sizes.asm" +%include "include/clear_regs.asm" +%include "sse_t1/snow3g_uea2_by4_sse.asm" + +%define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +%define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse +%define SNOW3G_F9_1_BUFFER_INT snow3g_f9_1_buffer_internal_sse + +%define APPEND(a,b) a %+ b + +extern SNOW3G_F9_1_BUFFER_INT + +mksection .text +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%else +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%endif + +%define state arg1 +%define job arg2 +%define job_rax rax + +%define tmp_gp0 rbx +%define tmp_gp1 rbp +%define tmp_gp2 r9 +%define tmp_gp3 r10 +%define init_lanes r11 +%define tmp_state r12 +%define tmp_gp4 r13 +%define tmp_gp5 r14 +%define tmp_gp6 r15 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Get lane nr from ptr to the list of unused lanes. +;; Remove returned lane nr from the list +;; Increase lanes in use. +;; Put job ptr in appropriate lane field in state (arg %3) +;; Assumptions: +;; In (arg %1) single lane nr takes 4 bits and 1st free lane nr is lowest 4 bits +;; Job ptr in (arg %3) takes 8 bytes +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro GET_UNUSED_LANE_SSE 6 +%define %%LANE_LIST %1 ;; [in] ptr to unused lane list +%define %%LANES_IN_USE %2 ;; [in] ptr to lanes in use count +%define %%JOB_LANES %3 ;; [in] ptr to list of jobs +%define %%JOB %4 ;; [in] ptr to job structure +%define %%LANE_NR %5 ;; [out] GP register to fill with unused lane nr +%define %%UNUSED_LANES %6 ;; [clobbered] GP register + + mov DWORD(%%UNUSED_LANES), [%%LANE_LIST] + mov DWORD(%%LANE_NR), DWORD(%%UNUSED_LANES) + and DWORD(%%LANE_NR), 0x3 + ;; remove picked lane nr from list of unused lanes + shr DWORD(%%UNUSED_LANES), 4 + mov [%%LANE_LIST], DWORD(%%UNUSED_LANES) + + add qword [%%LANES_IN_USE], 1 + mov [%%JOB_LANES + %%LANE_NR*8], %%JOB +%endmacro + +%macro SUBMIT_FLUSH_JOB_SNOW3G_UIA2 24 +%define %%SUBMIT_FLUSH %1 ;; [in] submit/flush selector +%define %%UNUSED_LANES %2 ;; [clobbered] GP register +%define %%LANE %3 ;; [clobbered] GP register +%define %%TGP0 %4 ;; [clobbered] GP register +%define %%TGP1 %5 ;; [clobbered] GP register +%define %%TGP2 %6 ;; [clobbered] GP register +%define %%TGP3 %7 ;; [clobbered] GP register +%define %%TGP4 %8 ;; [clobbered] GP register +%define %%TMP_XMM_0 %9 ;; [clobbered] xmm register +%define %%TMP_XMM_1 %10 ;; [clobbered] xmm register +%define %%TMP_XMM_2 %11 ;; [clobbered] xmm register +%define %%TMP_XMM_3 %12 ;; [clobbered] xmm register +%define %%TMP_XMM_4 %13 ;; [clobbered] xmm register +%define %%TMP_XMM_5 %14 ;; [clobbered] xmm register +%define %%TMP_XMM_6 %15 ;; [clobbered] xmm register +%define %%TMP_XMM_7 %16 ;; [clobbered] xmm register +%define %%TMP_XMM_8 %17 ;; [clobbered] xmm register +%define %%TMP_XMM_9 %18 ;; [clobbered] xmm register +%define %%TMP_XMM_10 %19 ;; [clobbered] xmm register +%define %%TMP_XMM_11 %20 ;; [clobbered] xmm register +%define %%TMP_XMM_12 %21 ;; [clobbered] xmm register +%define %%TMP_XMM_13 %22 ;; [clobbered] xmm register +%define %%TMP_XMM_14 %23 ;; [clobbered] xmm register +%define %%TMP_XMM_15 %24 ;; [clobbered] xmm register + + SNOW3G_FUNC_START + xor job_rax, job_rax ;; assume NULL return job + +%ifidn %%SUBMIT_FLUSH, submit + GET_UNUSED_LANE_SSE state + _snow3g_unused_lanes, \ + state + _snow3g_lanes_in_use, \ + state + _snow3g_job_in_lane, \ + job, %%LANE, %%UNUSED_LANES + + ;; copy src, key, iv and len to OOO mgr + mov %%TGP0, [job + _hash_start_src_offset_in_bytes] + add %%TGP0, [job + _src] + mov [state + _snow3g_args_in + %%LANE*8], %%TGP0 + + mov %%TGP0, [job + _snow3g_uia2_key] + mov [state + _snow3g_args_keys + %%LANE*8], %%TGP0 + + mov %%TGP0, [job + _snow3g_uia2_iv] + mov [state + _snow3g_args_IV + %%LANE*8], %%TGP0 + + mov %%TGP0, [job + _msg_len_to_hash_in_bits] + mov [state + _snow3g_lens + %%LANE*4], DWORD(%%TGP0) + + cmp qword [state + _snow3g_lanes_in_use], 4 + jne %%return_null_uia2 + + ;; all lanes full but no jobs initialized - do init + ;; at least 1 job initialized - process next job + cmp word [state + _snow3g_init_done], 0 + jz %%init_all_lanes_uia2 + + ;; find next initialized job lane + xor DWORD(%%LANE), DWORD(%%LANE) + bsf WORD(%%LANE), [state + _snow3g_init_done] + +%else ;; FLUSH + + ;; check ooo mgr empty + cmp qword [state + _snow3g_lanes_in_use], 0 + jz %%return_null_uia2 + + ;; check for initialized jobs + xor %%LANE, %%LANE + movzx DWORD(%%TGP0), word [state + _snow3g_init_done] + bsf WORD(%%LANE), WORD(%%TGP0) + jnz %%process_job_uia2 + + ;; no initialized jobs found + ;; - find valid job + ;; - copy valid job fields to empty lanes + ;; - initialize all lanes + + ;; find a valid lane + xor init_lanes, init_lanes +%assign i 0 +%rep 4 + cmp qword [state + _snow3g_job_in_lane + (i*8)], 0 + je APPEND(skip_lane_,i) + mov WORD(%%LANE), i + bts WORD(init_lanes), i ;; build init lanes mask +APPEND(skip_lane_,i): +%assign i (i+1) +%endrep + + ;; copy valid lane pointers to empty lanes + mov %%TGP0, [state + _snow3g_args_in + %%LANE*8] + mov %%TGP1, [state + _snow3g_args_keys + %%LANE*8] + mov %%TGP2, [state + _snow3g_args_IV + %%LANE*8] + +%assign i 0 +%rep 4 + bt WORD(init_lanes), i + jc APPEND(skip_lane_copy_,i) ;; skip copy for valid lanes + ;; empty lane - copy good job pointers + mov [state + _snow3g_args_in + i*8], %%TGP0 + mov [state + _snow3g_args_keys + i*8], %%TGP1 + mov [state + _snow3g_args_IV + i*8], %%TGP2 +APPEND(skip_lane_copy_,i): +%assign i (i+1) +%endrep + jmp %%init_lanes_uia2 + +%endif ;;submit/flush + +%%process_job_uia2: + ;; preserve state for function call + mov tmp_state, state + + mov arg1, [tmp_state + _snow3g_args_in + %%LANE*8] + lea arg2, [%%LANE*8] + lea arg2, [tmp_state + _snow3g_ks + arg2*4] ;; arg2*4 = %%LANE*32 + mov DWORD(arg3), [tmp_state + _snow3g_lens + %%LANE*4] + + call SNOW3G_F9_1_BUFFER_INT + + ;; restore state + mov state, tmp_state + + ;; copy digest temporarily + mov DWORD(%%TGP0), eax + +%%process_completed_job_submit_uia2: + ; process completed job "%%LANE" + ;; - decrement number of jobs in use + sub qword [state + _snow3g_lanes_in_use], 1 + mov job_rax, [state + _snow3g_job_in_lane + %%LANE*8] + mov %%UNUSED_LANES, [state + _snow3g_unused_lanes] + mov qword [state + _snow3g_job_in_lane + %%LANE*8], 0 + or dword [job_rax + _status], IMB_STATUS_COMPLETED_AUTH + ; Copy digest to auth tag output + mov %%TGP1, [job_rax + _auth_tag_output] + mov [%%TGP1], DWORD(%%TGP0) + shl %%UNUSED_LANES, 4 + or %%UNUSED_LANES, %%LANE + mov [state + _snow3g_unused_lanes], %%UNUSED_LANES + btr [state + _snow3g_init_done], WORD(%%LANE) + +%ifdef SAFE_DATA + ;; clear keystream for processed job + pxor %%TMP_XMM_0, %%TMP_XMM_0 + shl WORD(%%LANE), 5 ;; ks stored at 32 byte offsets + movdqa [state + _snow3g_ks + %%LANE], %%TMP_XMM_0 + movdqa [state + _snow3g_ks + 16 + %%LANE], %%TMP_XMM_0 +%endif + + jmp %%return_uia2 + +%%init_all_lanes_uia2: + ;; set initialized lanes mask for all 4 lanes + ;; this is used to update OOO MGR after initialization + mov DWORD(init_lanes), 0xf + +%%init_lanes_uia2: + + ;; multi-buffer init + 5 dw of KS gen + lea %%TGP0, [state + _snow3g_ks] + + SNOW3G_AUTH_INIT_5_BY_4 {state + _snow3g_args_keys}, \ + {state + _snow3g_args_IV}, \ + %%TGP0, %%TGP1, %%TGP2, %%TGP3, %%TGP4, \ + %%TMP_XMM_0, %%TMP_XMM_1, %%TMP_XMM_2, \ + %%TMP_XMM_3, %%TMP_XMM_4, %%TMP_XMM_5, \ + %%TMP_XMM_6, %%TMP_XMM_7, %%TMP_XMM_8, \ + %%TMP_XMM_9, %%TMP_XMM_10, %%TMP_XMM_11, \ + %%TMP_XMM_12, %%TMP_XMM_13, %%TMP_XMM_14, \ + %%TMP_XMM_15, state + + ;; update init_done for valid initialized lanes + mov [state + _snow3g_init_done], WORD(init_lanes) + bsf WORD(%%LANE), WORD(init_lanes) + + ;; process first job + jmp %%process_job_uia2 + +%%return_null_uia2: + xor job_rax, job_rax + +%%return_uia2: + SNOW3G_FUNC_END + +%endmacro + +; JOB* SUBMIT_JOB_SNOW3G_UIA2(MB_MGR_SNOW3G_OOO *state, IMB_JOB *job) +; arg 1 : state +; arg 2 : job +MKGLOBAL(SUBMIT_JOB_SNOW3G_UIA2,function,internal) +SUBMIT_JOB_SNOW3G_UIA2: + SUBMIT_FLUSH_JOB_SNOW3G_UIA2 submit, tmp_gp0, tmp_gp1, \ + tmp_gp2, tmp_gp3, tmp_gp4, tmp_gp5, \ + tmp_gp6, xmm0, xmm1, xmm2, xmm3, xmm4, \ + xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, \ + xmm11, xmm12, xmm13, xmm14, xmm15 + ret + +; JOB* FLUSH_JOB_SNOW3G_UIA2(MB_MGR_SNOW3G_OOO *state) +; arg 1 : state +MKGLOBAL(FLUSH_JOB_SNOW3G_UIA2,function,internal) +FLUSH_JOB_SNOW3G_UIA2: + SUBMIT_FLUSH_JOB_SNOW3G_UIA2 flush, tmp_gp0, tmp_gp1, \ + tmp_gp2, tmp_gp3, tmp_gp4, tmp_gp5, \ + tmp_gp6, xmm0, xmm1, xmm2, xmm3, xmm4, \ + xmm5, xmm6, xmm7, xmm8, xmm9, xmm10, \ + xmm11, xmm12, xmm13, xmm14, xmm15 + ret + +mksection stack-noexec diff --git a/lib/sse_t1/mb_mgr_sse.c b/lib/sse_t1/mb_mgr_sse.c new file mode 100644 index 0000000000000000000000000000000000000000..fe09e2f423ffbb048bc33719af7689a5df885a4c --- /dev/null +++ b/lib/sse_t1/mb_mgr_sse.c @@ -0,0 +1,102 @@ +/******************************************************************************* + Copyright (c) 2012-2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/error.h" +#include "include/cpu_feature.h" +#include "include/aesni_emu.h" +#include "include/arch_x86_64.h" + +IMB_DLL_LOCAL void +init_mb_mgr_sse_internal(IMB_MGR *state, const int reset_mgrs) +{ +#ifdef SAFE_PARAM + if (state == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_MBMGR); + return; + } +#endif + + if (!(state->features & IMB_FEATURE_AESNI)) { + fallback_no_aesni(state, 1); + return; + } + + state->features = cpu_feature_adjust(state->flags, + cpu_feature_detect()); + + /* reset error status */ + imb_set_errno(state, 0); + + if ((state->features & IMB_CPUFLAGS_SSE_T3) == IMB_CPUFLAGS_SSE_T3) + init_mb_mgr_sse_t3_internal(state, reset_mgrs); + else if ((state->features & IMB_CPUFLAGS_SSE_T2) == IMB_CPUFLAGS_SSE_T2) + init_mb_mgr_sse_t2_internal(state, reset_mgrs); + else + init_mb_mgr_sse_t1_internal(state, reset_mgrs); +} + +void +init_mb_mgr_sse(IMB_MGR *state) +{ + init_mb_mgr_sse_internal(state, 1); + + if (!self_test(state)) + imb_set_errno(state, IMB_ERR_SELFTEST); +} + +IMB_JOB *submit_job_sse(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB(state); +} + +IMB_JOB *flush_job_sse(IMB_MGR *state) +{ + return IMB_FLUSH_JOB(state); +} + +uint32_t queue_size_sse(IMB_MGR *state) +{ + return IMB_QUEUE_SIZE(state); +} + +IMB_JOB *submit_job_nocheck_sse(IMB_MGR *state) +{ + return IMB_SUBMIT_JOB_NOCHECK(state); +} + +IMB_JOB *get_next_job_sse(IMB_MGR *state) +{ + return IMB_GET_NEXT_JOB(state); +} + +IMB_JOB *get_completed_job_sse(IMB_MGR *state) +{ + return IMB_GET_COMPLETED_JOB(state); +} + diff --git a/lib/sse_t1/mb_mgr_sse_t1.c b/lib/sse_t1/mb_mgr_sse_t1.c new file mode 100644 index 0000000000000000000000000000000000000000..afbaf667160c6d105e02c44fb80eefb60a8eed8b --- /dev/null +++ b/lib/sse_t1/mb_mgr_sse_t1.c @@ -0,0 +1,490 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define SSE + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/snow3g_submit.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_sse_type1.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms +#define RESTORE_XMMS restore_xmms + +/* JOB API */ +#define SUBMIT_JOB submit_job_sse_t1 +#define FLUSH_JOB flush_job_sse_t1 +#define QUEUE_SIZE queue_size_sse_t1 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_sse_t1 +#define GET_NEXT_JOB get_next_job_sse_t1 +#define GET_COMPLETED_JOB get_completed_job_sse_t1 +#define GET_NEXT_BURST get_next_burst_sse_t1 +#define SUBMIT_BURST submit_burst_sse_t1 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_sse_t1 +#define FLUSH_BURST flush_burst_sse_t1 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_sse_t1 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_sse_t1 +#define SUBMIT_HASH_BURST submit_hash_burst_sse_t1 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_sse_t1 + + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_SSE +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_SSE + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_SSE +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_SSE +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_SSE + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_sse +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_sse +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_sse +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_sse +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_sse +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_sse + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_aes_gcm_dec_sse +#define SUBMIT_JOB_AES_GCM_ENC submit_job_aes_gcm_enc_sse + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_sse +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_sse +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_sse + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_sse +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_sse +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_sse + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_sse +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_sse +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_sse + +#define AES_CBC_DEC_128 aes_cbc_dec_128_sse +#define AES_CBC_DEC_192 aes_cbc_dec_192_sse +#define AES_CBC_DEC_256 aes_cbc_dec_256_sse + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_sse +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_sse +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_sse +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_sse + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_sse +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_sse +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_sse +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_sse +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_sse +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_sse + +#define AES_ECB_ENC_128 aes_ecb_enc_128_by4_sse +#define AES_ECB_ENC_192 aes_ecb_enc_192_by4_sse +#define AES_ECB_ENC_256 aes_ecb_enc_256_by4_sse +#define AES_ECB_DEC_128 aes_ecb_dec_128_by4_sse +#define AES_ECB_DEC_192 aes_ecb_dec_192_by4_sse +#define AES_ECB_DEC_256 aes_ecb_dec_256_by4_sse + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_sse +#define AES_CTR_192 aes_cntr_192_sse +#define AES_CTR_256 aes_cntr_256_sse +#define AES_CTR_128_BIT aes_cntr_bit_128_sse +#define AES_CTR_192_BIT aes_cntr_bit_192_sse +#define AES_CTR_256_BIT aes_cntr_bit_256_sse + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_sse +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_sse + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_sse +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_sse + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_sse +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_sse + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_sse +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_sse + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_sse +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_sse + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_sse +#define AES_CFB_256_ONE aes_cfb_256_one_sse + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_sse +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_sse + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_sse +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_sse +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_sse +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_sse + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_sse +#define FLUSH_JOB_SHA1 flush_job_sha1_sse +#define SUBMIT_JOB_SHA224 submit_job_sha224_sse +#define FLUSH_JOB_SHA224 flush_job_sha224_sse +#define SUBMIT_JOB_SHA256 submit_job_sha256_sse +#define FLUSH_JOB_SHA256 flush_job_sha256_sse +#define SUBMIT_JOB_SHA384 submit_job_sha384_sse +#define FLUSH_JOB_SHA384 flush_job_sha384_sse +#define SUBMIT_JOB_SHA512 submit_job_sha512_sse +#define FLUSH_JOB_SHA512 flush_job_sha512_sse + +/* HMAC-SHA1/224/256/384/512/MD5 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_sse +#define FLUSH_JOB_HMAC flush_job_hmac_sse +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_sse +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_sse +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_sse +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_sse +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_sse +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_sse +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_sse +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_sse +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_sse +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_sse + +/* DES & 3DES */ + +/* - default x86-64 implementation */ + +/* DES-DOCSIS */ + +/* - default x86-64 implementation */ + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_sse +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_sse +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_sse +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_no_gfni_sse +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_no_gfni_sse +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_no_gfni_sse +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_no_gfni_sse +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_no_gfni_sse +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_no_gfni_sse +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_no_gfni_sse +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_no_gfni_sse + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_sse +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_sse + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_sse(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_sse(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_sse +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_sse + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_sse_local + +/* ====================================================================== */ + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 4); + ooo_mgr_aes_reset(state->aes192_ooo, 4); + ooo_mgr_aes_reset(state->aes256_ooo, 4); + + /* DOCSIS SEC BPI uses same settings as AES CBC */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 4); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 4); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, SSE_NUM_SHA1_LANES); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, + SSE_NUM_SHA256_LANES); + + /* Init HMAC/SHA_256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, + SSE_NUM_SHA256_LANES); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + SSE_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + SSE_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, SSE_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 4); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 4); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 4); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 4); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 4); + + /* Init AES-CBCS out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 4); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, SSE_NUM_SHA1_LANES); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, SSE_NUM_SHA256_LANES); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, SSE_NUM_SHA256_LANES); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, SSE_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, SSE_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_sse_t1_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for SSE interface are present */ + if ((state->features & IMB_CPUFLAGS_SSE) != IMB_CPUFLAGS_SSE) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_SSE; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_sse; + state->keyexp_192 = aes_keyexp_192_sse; + state->keyexp_256 = aes_keyexp_256_sse; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_sse; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_sse; + + state->xcbc_keyexp = aes_xcbc_expand_key_sse; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_sse; + state->sha1 = sha1_sse; + state->sha224_one_block = sha224_one_block_sse; + state->sha224 = sha224_sse; + state->sha256_one_block = sha256_one_block_sse; + state->sha256 = sha256_sse; + state->sha384_one_block = sha384_one_block_sse; + state->sha384 = sha384_sse; + state->sha512_one_block = sha512_one_block_sse; + state->sha512 = sha512_sse; + state->md5_one_block = md5_one_block_sse; + + state->aes128_cfb_one = aes_cfb_128_one_sse; + + state->eea3_1_buffer = zuc_eea3_1_buffer_sse; + state->eea3_4_buffer = zuc_eea3_4_buffer_sse; + state->eea3_n_buffer = zuc_eea3_n_buffer_sse; + state->eia3_n_buffer = zuc_eia3_n_buffer_sse; + state->eia3_1_buffer = zuc_eia3_1_buffer_sse; + + state->f8_1_buffer = kasumi_f8_1_buffer_sse; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_sse; + state->f8_2_buffer = kasumi_f8_2_buffer_sse; + state->f8_3_buffer = kasumi_f8_3_buffer_sse; + state->f8_4_buffer = kasumi_f8_4_buffer_sse; + state->f8_n_buffer = kasumi_f8_n_buffer_sse; + state->f9_1_buffer = kasumi_f9_1_buffer_sse; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_sse; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_sse; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_sse; + state->kasumi_key_sched_size = kasumi_key_sched_size_sse; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_sse; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_sse; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_sse; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_sse; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_sse; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_sse; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_sse; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_sse; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_sse; + state->snow3g_init_key_sched = snow3g_init_key_sched_sse; + state->snow3g_key_sched_size = snow3g_key_sched_size_sse; + + state->hec_32 = hec_32_sse; + state->hec_64 = hec_64_sse; + + state->crc32_ethernet_fcs = ethernet_fcs_sse; + state->crc16_x25 = crc16_x25_sse; + state->crc32_sctp = crc32_sctp_sse; + state->crc24_lte_a = crc24_lte_a_sse; + state->crc24_lte_b = crc24_lte_b_sse; + state->crc16_fp_data = crc16_fp_data_sse; + state->crc11_fp_header = crc11_fp_header_sse; + state->crc7_fp_header = crc7_fp_header_sse; + state->crc10_iuup_data = crc10_iuup_data_sse; + state->crc6_iuup_header = crc6_iuup_header_sse; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_sse; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_sse; + + state->chacha20_poly1305_init = init_chacha20_poly1305_sse; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_sse; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_sse; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_sse; + + state->gcm128_enc = aes_gcm_enc_128_sse; + state->gcm192_enc = aes_gcm_enc_192_sse; + state->gcm256_enc = aes_gcm_enc_256_sse; + state->gcm128_dec = aes_gcm_dec_128_sse; + state->gcm192_dec = aes_gcm_dec_192_sse; + state->gcm256_dec = aes_gcm_dec_256_sse; + state->gcm128_init = aes_gcm_init_128_sse; + state->gcm192_init = aes_gcm_init_192_sse; + state->gcm256_init = aes_gcm_init_256_sse; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_sse; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_sse; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_sse; + state->gcm128_enc_update = aes_gcm_enc_128_update_sse; + state->gcm192_enc_update = aes_gcm_enc_192_update_sse; + state->gcm256_enc_update = aes_gcm_enc_256_update_sse; + state->gcm128_dec_update = aes_gcm_dec_128_update_sse; + state->gcm192_dec_update = aes_gcm_dec_192_update_sse; + state->gcm256_dec_update = aes_gcm_dec_256_update_sse; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_sse; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_sse; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_sse; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_sse; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_sse; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_sse; + state->gcm128_precomp = aes_gcm_precomp_128_sse; + state->gcm192_precomp = aes_gcm_precomp_192_sse; + state->gcm256_precomp = aes_gcm_precomp_256_sse; + state->gcm128_pre = aes_gcm_pre_128_sse; + state->gcm192_pre = aes_gcm_pre_192_sse; + state->gcm256_pre = aes_gcm_pre_256_sse; + + state->ghash = ghash_sse; + state->ghash_pre = ghash_pre_sse; + + state->gmac128_init = imb_aes_gmac_init_128_sse; + state->gmac192_init = imb_aes_gmac_init_192_sse; + state->gmac256_init = imb_aes_gmac_init_256_sse; + state->gmac128_update = imb_aes_gmac_update_128_sse; + state->gmac192_update = imb_aes_gmac_update_192_sse; + state->gmac256_update = imb_aes_gmac_update_256_sse; + state->gmac128_finalize = imb_aes_gmac_finalize_128_sse; + state->gmac192_finalize = imb_aes_gmac_finalize_192_sse; + state->gmac256_finalize = imb_aes_gmac_finalize_256_sse; +} + +#include "mb_mgr_code.h" diff --git a/lib/sse/mb_mgr_zuc_submit_flush_sse.asm b/lib/sse_t1/mb_mgr_zuc_submit_flush_sse.asm similarity index 93% rename from lib/sse/mb_mgr_zuc_submit_flush_sse.asm rename to lib/sse_t1/mb_mgr_zuc_submit_flush_sse.asm index 0b060b27b1bba6858eab1d820cfbc4dc977592c2..efc6f9a713aaec8c891a05853b0d68fa606096e5 100644 --- a/lib/sse/mb_mgr_zuc_submit_flush_sse.asm +++ b/lib/sse_t1/mb_mgr_zuc_submit_flush_sse.asm @@ -28,10 +28,10 @@ %include "include/os.asm" %include "include/imb_job.asm" %include "include/mb_mgr_datastruct.asm" - %include "include/reg_sizes.asm" %include "include/const.inc" -%include "include/cet.inc" +%include "include/clear_regs.asm" + %ifndef SUBMIT_JOB_ZUC128_EEA3 %define SUBMIT_JOB_ZUC128_EEA3 submit_job_zuc_eea3_no_gfni_sse %define FLUSH_JOB_ZUC128_EEA3 flush_job_zuc_eea3_no_gfni_sse @@ -266,12 +266,14 @@ mksection .text ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes and write as 8 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out EXPAND_FROM_6_TO_8_BYTES tmp2, tmp, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -334,11 +336,8 @@ mksection .text %assign I (I + 1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 8*5 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_args_keys] lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] @@ -349,9 +348,7 @@ mksection .text call ZUC256_INIT_4 %endif -%ifndef LINUX - add rsp, 8*5 -%endif + RESTORE_STACK_SPACE 5 cmp byte [r12 + _zuc_init_not_done], 0x0f ; Init done for all lanes je %%skip_submit_restoring_state @@ -392,11 +389,8 @@ mksection .text %endif mov byte [r12 + _zuc_init_not_done], 0 ; Init done for all lanes - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -405,9 +399,8 @@ mksection .text call ZUC_CIPHER_4 -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -429,6 +422,9 @@ mksection .text %endif %%return_submit_eea3: +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -542,11 +538,8 @@ APPEND(%%skip_eea3_,I): %assign I (I + 1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 8*5 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_args_keys] lea arg2, [r12 + _zuc_args_IV] lea arg3, [r12 + _zuc_state] @@ -558,9 +551,8 @@ APPEND(%%skip_eea3_,I): call ZUC256_INIT_4 %endif -%ifndef LINUX - add rsp, 8*5 -%endif + RESTORE_STACK_SPACE 5 + cmp word [r12 + _zuc_init_not_done], 0x0f ; Init done for all lanes je %%skip_flush_restoring_state @@ -619,11 +611,8 @@ APPEND3(%%skip_eea3_copy_,I,J): movdqa [r12 + _zuc_state + 16*I], xmm1 ; Save new state %assign I (I+1) %endrep - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 40 bytes for 5 parameters - sub rsp, 40 -%endif + RESERVE_STACK_SPACE 5 + lea arg1, [r12 + _zuc_state] lea arg2, [r12 + _zuc_args_in] lea arg3, [r12 + _zuc_args_out] @@ -632,9 +621,8 @@ APPEND3(%%skip_eea3_copy_,I,J): call ZUC_CIPHER_4 -%ifndef LINUX - add rsp, 40 -%endif + RESTORE_STACK_SPACE 5 + mov state, [rsp + _gpr_save + 8*8] ; Clear ZUC state of the lane that is returned and NULL lanes @@ -667,7 +655,9 @@ APPEND3(%%skip_eea3_copy_,I,J): SHIFT_GP 1, idx, tmp3, tmp4, left or [state + _zuc_unused_lane_bitmask], BYTE(tmp3) %%return_flush_eea3: - +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -692,7 +682,6 @@ APPEND3(%%skip_eea3_copy_,I,J): ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EEA3,function,internal) SUBMIT_JOB_ZUC128_EEA3: - endbranch64 SUBMIT_JOB_ZUC_EEA3 128 ; JOB* SUBMIT_JOB_ZUC256_EEA3(MB_MGR_ZUC_OOO *state, IMB_JOB *job) @@ -700,21 +689,18 @@ SUBMIT_JOB_ZUC128_EEA3: ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC256_EEA3,function,internal) SUBMIT_JOB_ZUC256_EEA3: - endbranch64 SUBMIT_JOB_ZUC_EEA3 256 ; JOB* FLUSH_JOB_ZUC128_EEA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC128_EEA3,function,internal) FLUSH_JOB_ZUC128_EEA3: - endbranch64 FLUSH_JOB_ZUC_EEA3 128 ; JOB* FLUSH_JOB_ZUC256_EEA3(MB_MGR_ZUC_OOO *state) ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC256_EEA3,function,internal) FLUSH_JOB_ZUC256_EEA3: - endbranch64 FLUSH_JOB_ZUC_EEA3 256 %macro SUBMIT_JOB_ZUC_EIA3 2 @@ -772,12 +758,14 @@ FLUSH_JOB_ZUC256_EEA3: ; Read and write next byte mov al, [tmp + 16] mov [state + _zuc_args_IV + lane + 16], al - ; Read next 6 bytes and write as 8 bytes - movzx DWORD(tmp2), word [tmp + 17] - mov DWORD(tmp3), [tmp + 19] - shl tmp2, 32 - or tmp2, tmp3 + ; Read last 8 bytes and keep only the last 6 bytes + mov tmp2, [tmp + 15] + mov tmp3, 0x0000ffffffffffff + bswap tmp2 + and tmp2, tmp3 ; last 6 bytes of IV + ; Expand 6 bytes to 8 bytes and write out EXPAND_FROM_6_TO_8_BYTES tmp2, tmp, tmp3 + bswap tmp2 mov [state + _zuc_args_IV + lane + 17], tmp2 jmp %%_iv_read @@ -825,20 +813,10 @@ FLUSH_JOB_ZUC256_EEA3: mov r11, state %if %%KEY_SIZE == 128 - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 48 bytes for 6 parameters (already aligned to 16 bytes) - sub rsp, 48 -%endif + RESERVE_STACK_SPACE 6 %else ; %%KEY_SIZE == 256 -%ifndef LINUX - ;; 56 bytes for 7 parameters - sub rsp, 8*7 -%else - ;; 8 bytes for one extra parameter (apart from first 6) - sub rsp, 8 + RESERVE_STACK_SPACE 7 %endif -%endif ;; %%KEY_SIZE lea arg1, [r11 + _zuc_args_keys] lea arg2, [r11 + _zuc_args_IV] @@ -864,16 +842,10 @@ FLUSH_JOB_ZUC256_EEA3: %endif %if %%KEY_SIZE == 128 -%ifndef LINUX - add rsp, 48 -%endif -%else ;; %%KEY_SIZE == 256 -%ifndef LINUX - add rsp, 8*7 -%else - add rsp, 8 + RESTORE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESTORE_STACK_SPACE 7 %endif -%endif ;; %%KEY_SIZE mov state, [rsp + _gpr_save + 8*8] mov job, [rsp + _gpr_save + 8*9] @@ -893,7 +865,9 @@ FLUSH_JOB_ZUC256_EEA3: mov [state + _zuc_unused_lanes], unused_lanes %%return_submit_eia3: - +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -1005,20 +979,11 @@ APPEND(%%skip_eia3_,I): mov r11, state %if %%KEY_SIZE == 128 - ;; If Windows, reserve memory in stack for parameter transferring -%ifndef LINUX - ;; 48 bytes for 6 parameters (already aligned to 16 bytes) - sub rsp, 48 -%endif + RESERVE_STACK_SPACE 6 %else ; %%KEY_SIZE == 256 -%ifndef LINUX - ;; 56 bytes for 7 parameters - sub rsp, 8*7 -%else - ;; 8 bytes for one extra parameter (apart from first 6) - sub rsp, 8 + RESERVE_STACK_SPACE 7 %endif -%endif ;; %%KEY_SIZE + lea arg1, [r11 + _zuc_args_keys] lea arg2, [r11 + _zuc_args_IV] lea arg3, [r11 + _zuc_args_in] @@ -1043,16 +1008,10 @@ APPEND(%%skip_eia3_,I): %endif %if %%KEY_SIZE == 128 -%ifndef LINUX - add rsp, 48 -%endif -%else ;; %%KEY_SIZE == 256 -%ifndef LINUX - add rsp, 8*7 -%else - add rsp, 8 + RESTORE_STACK_SPACE 6 +%else ; %%KEY_SIZE == 256 + RESTORE_STACK_SPACE 7 %endif -%endif ;; %%KEY_SIZE mov tmp5, [rsp + _null_len_save] mov state, [rsp + _gpr_save + 8*8] @@ -1073,6 +1032,9 @@ APPEND(%%skip_eia3_,I): mov [state + _zuc_unused_lanes], unused_lanes %%return_flush_eia3: +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif mov rbx, [rsp + _gpr_save + 8*0] mov rbp, [rsp + _gpr_save + 8*1] mov r12, [rsp + _gpr_save + 8*2] @@ -1098,7 +1060,6 @@ APPEND(%%skip_eia3_,I): ; arg 2 : job MKGLOBAL(SUBMIT_JOB_ZUC128_EIA3,function,internal) SUBMIT_JOB_ZUC128_EIA3: - endbranch64 SUBMIT_JOB_ZUC_EIA3 128, 4 ret @@ -1109,7 +1070,6 @@ SUBMIT_JOB_ZUC128_EIA3: ; arg 3 : tag size (4, 8 or 16 bytes) MKGLOBAL(SUBMIT_JOB_ZUC256_EIA3,function,internal) SUBMIT_JOB_ZUC256_EIA3: - endbranch64 cmp arg3, 8 je submit_tag_8B jb submit_tag_4B @@ -1130,7 +1090,6 @@ submit_tag_4B: ; arg 1 : state MKGLOBAL(FLUSH_JOB_ZUC128_EIA3,function,internal) FLUSH_JOB_ZUC128_EIA3: - endbranch64 FLUSH_JOB_ZUC_EIA3 128, 4 ret @@ -1140,7 +1099,6 @@ FLUSH_JOB_ZUC128_EIA3: ; arg 2 : tag size (4, 8 or 16 bytes) MKGLOBAL(FLUSH_JOB_ZUC256_EIA3,function,internal) FLUSH_JOB_ZUC256_EIA3: - endbranch64 cmp arg2, 8 je flush_tag_8B jb flush_tag_4B diff --git a/lib/sse/md5_x4x2_sse.asm b/lib/sse_t1/md5_x4x2_sse.asm similarity index 100% rename from lib/sse/md5_x4x2_sse.asm rename to lib/sse_t1/md5_x4x2_sse.asm diff --git a/lib/sse/memcpy_sse.asm b/lib/sse_t1/memcpy_sse.asm similarity index 87% rename from lib/sse/memcpy_sse.asm rename to lib/sse_t1/memcpy_sse.asm index 22a35b837953e2edab07a275e86947e473ceed02..b2433175f0b637dda31b38aca0396a2cbcab13f3 100644 --- a/lib/sse/memcpy_sse.asm +++ b/lib/sse_t1/memcpy_sse.asm @@ -68,4 +68,24 @@ memcpy_fn_sse_128: ret +MKGLOBAL(safe_memcpy,function,internal) +safe_memcpy: +%ifndef LINUX + ;; save rdi and rsi + mov rax, rdi + mov r9, rsi + + mov rdi, arg1 + mov rsi, arg2 +%endif + mov rcx, arg3 + rep movsb + +%ifndef LINUX + ;; restore rdi and rsi + mov rdi, rax + mov rsi, r9 +%endif + ret + mksection stack-noexec diff --git a/lib/sse/pon_by8_sse.asm b/lib/sse_t1/pon_by8_sse.asm similarity index 97% rename from lib/sse/pon_by8_sse.asm rename to lib/sse_t1/pon_by8_sse.asm index 6e067ba4c6831fd5904bf56f6de542361c673457..f0a4f0b52266f421d3e79446d14e18c23ab04c66 100644 --- a/lib/sse/pon_by8_sse.asm +++ b/lib/sse_t1/pon_by8_sse.asm @@ -207,7 +207,7 @@ mksection .text ;;; ============================================================================ ;;; PON stitched algorithm round on a single AES block (16 bytes): -;;; AES-CTR (optional, depending on %%CIPH) +;;; AES-CTR (optional, depending on %%CIPHER) ;;; - prepares counter blocks ;;; - encrypts counter blocks ;;; - loads text @@ -233,10 +233,10 @@ mksection .text %define %%TXMM2 %11 ; [clobbered] XMM temporary %define %%CRC_TYPE %12 ; [in] "first_crc" or "next_crc" or "no_crc" %define %%DIR %13 ; [in] "ENC" or "DEC" -%define %%CIPH %14 ; [in] "CTR" or "NO_CTR" +%define %%CIPHER %14 ; [in] "CTR" or "NO_CTR" %define %%CTR_CHECK %15 ; [in/out] GP with 64bit counter (to identify overflow) -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; prepare counter blocks for encryption movdqa %%TXMM0, %%CTR pshufb %%TXMM0, [rel byteswap_const] @@ -261,18 +261,18 @@ mksection .text movdqu %%TXMM1, [%%INP] %endif -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; AES rounds AES_ENC_ROUNDS %%KP, %%N_ROUNDS, %%TXMM0 ;; xor plaintext/ciphertext against encrypted counter blocks pxor %%TXMM0, %%TXMM1 -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR ;; if no encryption needs to be done, move from input to output reg movdqa %%TXMM0, %%TXMM1 -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR %ifidn %%DIR, ENC ;; CRC calculation for ENCRYPTION %ifidn %%CRC_TYPE, first_crc @@ -298,7 +298,7 @@ mksection .text pxor %%XCRC_IN_OUT, %%TXMM0 %endif %endif ; DECRYPT -%else ;; CIPH = NO_CTR +%else ;; CIPHER = NO_CTR ;; CRC calculation for DECRYPTION %ifidn %%CRC_TYPE, first_crc ;; in the first run just XOR initial CRC with the first block @@ -311,7 +311,7 @@ mksection .text pxor %%XCRC_IN_OUT, %%TXMM1 %endif -%endif ;; CIPH = CTR +%endif ;; CIPHER = CTR ;; store the result in the output buffer %ifnidn %%OUTP, no_store @@ -343,7 +343,7 @@ mksection .text %macro CIPHER_BIP_REST 14 %define %%NUM_BYTES %1 ; [in/clobbered] number of bytes to cipher %define %%DIR %2 ; [in] "ENC" or "DEC" -%define %%CIPH %3 ; [in] "CTR" or "NO_CTR" +%define %%CIPHER %3 ; [in] "CTR" or "NO_CTR" %define %%PTR_IN %4 ; [in/clobbered] GPR pointer to input buffer %define %%PTR_OUT %5 ; [in/clobbered] GPR pointer to output buffer %define %%PTR_KEYS %6 ; [in] GPR pointer to expanded keys @@ -361,7 +361,7 @@ mksection .text jb %%_partial_block_left DO_PON %%PTR_KEYS, NUM_AES_ROUNDS, %%XCTR_IN_OUT, %%PTR_IN, %%PTR_OUT, %%XBIP_IN_OUT, \ - no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPH, %%CTR_CHECK + no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPHER, %%CTR_CHECK sub %%NUM_BYTES, 16 jz %%_bip_done jmp %%_cipher_last_blocks @@ -373,7 +373,7 @@ mksection .text ;; XMMT2 = data in ;; XMMT1 = data out DO_PON %%PTR_KEYS, NUM_AES_ROUNDS, %%XCTR_IN_OUT, no_load, no_store, no_bip, \ - no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPH, %%CTR_CHECK + no_crc, no_crc, %%XMMT1, %%XMMT2, %%XMMT3, no_crc, %%DIR, %%CIPHER, %%CTR_CHECK ;; BIP update for partial block (mask out bytes outside the message) lea %%GPT1, [rel mask_out_top_bytes + 16] @@ -608,8 +608,8 @@ mksection .text ;;; - calls other macros and directly uses registers ;;; defined at the top of the file %macro AES128_CTR_PON 2 -%define %%DIR %1 ; [in] direction "ENC" or "DEC" -%define %%CIPH %2 ; [in] cipher "CTR" or "NO_CTR" +%define %%DIR %1 ; [in] direction "ENC" or "DEC" +%define %%CIPHER %2 ; [in] cipher "CTR" or "NO_CTR" push r12 push r13 @@ -666,7 +666,7 @@ mksection .text %%_crc_not_zero: sub bytes_to_crc, 4 ; subtract size of the CRC itself -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; - read 16 bytes of IV ;; - convert to little endian format ;; - save least significant 8 bytes in GP register for overflow check @@ -683,7 +683,7 @@ mksection .text ;; get output buffer mov p_out, [job + _dst] -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR ;; get key pointers mov p_keys, [job + _enc_keys] %endif @@ -695,7 +695,7 @@ mksection .text movdqa xcrckey, [rel rk1] ; rk1 and rk2 in xcrckey ;; get number of bytes to cipher -%ifidn %%CIPH, CTR +%ifidn %%CIPHER, CTR mov num_bytes, [job + _msg_len_to_cipher_in_bytes] %else ;; Message length to cipher is 0 @@ -712,7 +712,7 @@ mksection .text %ifidn %%DIR, DEC ;; decrypt the buffer first mov tmp, num_bytes - CIPHER_BIP_REST tmp, %%DIR, %%CIPH, p_in, p_out, p_keys, xbip, \ + CIPHER_BIP_REST tmp, %%DIR, %%CIPHER, p_in, p_out, p_keys, xbip, \ xcounter, xtmp1, xtmp2, xtmp3, ctr_check, tmp2, tmp3 ;; correct in/out pointers - go back to start of the buffers @@ -783,7 +783,7 @@ mksection .text %%_at_least_32_bytes: DO_PON p_keys, NUM_AES_ROUNDS, xcounter, p_in, p_out, xbip, \ - xcrc, xcrckey, xtmp1, xtmp2, xtmp3, first_crc, %%DIR, %%CIPH, ctr_check + xcrc, xcrckey, xtmp1, xtmp2, xtmp3, first_crc, %%DIR, %%CIPHER, ctr_check sub num_bytes, 16 sub bytes_to_crc, 16 @@ -791,7 +791,7 @@ mksection .text cmp bytes_to_crc, 16 jb %%_exit_loop DO_PON p_keys, NUM_AES_ROUNDS, xcounter, p_in, p_out, xbip, \ - xcrc, xcrckey, xtmp1, xtmp2, xtmp3, next_crc, %%DIR, %%CIPH, ctr_check + xcrc, xcrckey, xtmp1, xtmp2, xtmp3, next_crc, %%DIR, %%CIPHER, ctr_check sub num_bytes, 16 sub bytes_to_crc, 16 %ifidn %%DIR, ENC @@ -805,7 +805,7 @@ mksection .text ;; decrypt rest of the message including CRC and optional padding mov tmp, num_bytes - CIPHER_BIP_REST tmp, %%DIR, %%CIPH, p_in, p_out, p_keys, xbip, \ + CIPHER_BIP_REST tmp, %%DIR, %%CIPHER, p_in, p_out, p_keys, xbip, \ xcounter, xtmp1, xtmp2, xtmp3, ctr_check, tmp2, tmp3 mov tmp, num_bytes ; correct in/out pointers - to point before cipher & BIP @@ -869,7 +869,7 @@ mksection .text or DWORD(decrypt_not_done), DWORD(decrypt_not_done) jnz %%_do_not_cipher_the_rest %endif - CIPHER_BIP_REST num_bytes, %%DIR, %%CIPH, p_in, p_out, p_keys, xbip, \ + CIPHER_BIP_REST num_bytes, %%DIR, %%CIPHER, p_in, p_out, p_keys, xbip, \ xcounter, xtmp1, xtmp2, xtmp3, ctr_check, tmp2, tmp3 %%_do_not_cipher_the_rest: diff --git a/lib/sse/sha1_one_block_sse.asm b/lib/sse_t1/sha1_one_block_sse.asm similarity index 100% rename from lib/sse/sha1_one_block_sse.asm rename to lib/sse_t1/sha1_one_block_sse.asm diff --git a/lib/sse/sha1_x4_sse.asm b/lib/sse_t1/sha1_x4_sse.asm similarity index 79% rename from lib/sse/sha1_x4_sse.asm rename to lib/sse_t1/sha1_x4_sse.asm index f88c077c0d455fb9b202a4f7b342380285efc92f..7994bc1be0eb6e82ee34c83c6b0b0686ba0621e0 100644 --- a/lib/sse/sha1_x4_sse.asm +++ b/lib/sse_t1/sha1_x4_sse.asm @@ -29,7 +29,6 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" -%include "include/cet.inc" %include "include/mb_mgr_datastruct.asm" %include "include/clear_regs.asm" @@ -78,6 +77,12 @@ mksection .text ; r0 = {d2 c2 b2 a2} ; r3 = {d3 c3 b3 a3} ; +%define XMM_STORAGE 16*10 +%define GP_STORAGE 8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + %macro TRANSPOSE 6 %define %%r0 %1 %define %%r1 %2 @@ -223,8 +228,7 @@ mksection .text ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; -;; FRAMESZ must be an odd multiple of 8 -%define FRAMESZ 16*16 + 8 +%define FRAMESZ 16*16 + 16*10 + 8 %define MOVPS movdqu @@ -288,6 +292,55 @@ mksection .text %xdefine W14 TMP_ %endm +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes +%ifndef LINUX + movdqa [rsp + 0*16], xmm6 + movdqa [rsp + 1*16], xmm7 + movdqa [rsp + 2*16], xmm8 + movdqa [rsp + 3*16], xmm9 + movdqa [rsp + 4*16], xmm10 + movdqa [rsp + 5*16], xmm11 + movdqa [rsp + 6*16], xmm12 + movdqa [rsp + 7*16], xmm13 + movdqa [rsp + 8*16], xmm14 + movdqa [rsp + 9*16], xmm15 +%endif + mov [rsp + GP_OFFSET], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 +%ifndef LINUX + movdqa xmm6, [rsp + 0*16] + movdqa xmm7, [rsp + 1*16] + movdqa xmm8, [rsp + 2*16] + movdqa xmm9, [rsp + 3*16] + movdqa xmm10, [rsp + 4*16] + movdqa xmm11, [rsp + 5*16] + movdqa xmm12, [rsp + 6*16] + movdqa xmm13, [rsp + 7*16] + movdqa xmm14, [rsp + 8*16] + movdqa xmm15, [rsp + 9*16] + +%ifdef SAFE_DATA + pxor xmm5, xmm5 + movdqa [rsp + 0*16], xmm5 + movdqa [rsp + 1*16], xmm5 + movdqa [rsp + 2*16], xmm5 + movdqa [rsp + 3*16], xmm5 + movdqa [rsp + 4*16], xmm5 + movdqa [rsp + 5*16], xmm5 + movdqa [rsp + 6*16], xmm5 + movdqa [rsp + 7*16], xmm5 + movdqa [rsp + 8*16], xmm5 + movdqa [rsp + 9*16], xmm5 +%endif +%endif + mov rsp, [rsp + GP_OFFSET] ;; rsp pointer +%endmacro + align 32 ; XMM registers are clobbered. Saving/restoring must be done at a higher level @@ -300,6 +353,18 @@ sha1_mult_sse: sub rsp, FRAMESZ +%ifndef LINUX + movdqa [rsp + 16*16 + 0*16], xmm6 + movdqa [rsp + 16*16 + 1*16], xmm7 + movdqa [rsp + 16*16 + 2*16], xmm8 + movdqa [rsp + 16*16 + 3*16], xmm9 + movdqa [rsp + 16*16 + 4*16], xmm10 + movdqa [rsp + 16*16 + 5*16], xmm11 + movdqa [rsp + 16*16 + 6*16], xmm12 + movdqa [rsp + 16*16 + 7*16], xmm13 + movdqa [rsp + 16*16 + 8*16], xmm14 + movdqa [rsp + 16*16 + 9*16], xmm15 +%endif ;; Initialize digests movdqa A, [arg1 + 0*SHA1_DIGEST_ROW_SIZE] movdqa B, [arg1 + 1*SHA1_DIGEST_ROW_SIZE] @@ -419,17 +484,44 @@ lloop: ;; Clear stack frame (16*16 bytes) %ifdef SAFE_DATA - pxor xmm0, xmm0 + clear_all_xmms_sse_asm %assign i 0 %rep 16 movdqa [rsp + i*16], xmm0 %assign i (i+1) %endrep - clear_all_xmms_sse_asm %endif - add rsp, FRAMESZ +%ifndef LINUX + movdqa xmm6, [rsp + 16*16 + 0*16] + movdqa xmm7, [rsp + 16*16 + 1*16] + movdqa xmm8, [rsp + 16*16 + 2*16] + movdqa xmm9, [rsp + 16*16 + 3*16] + movdqa xmm10, [rsp + 16*16 + 4*16] + movdqa xmm11, [rsp + 16*16 + 5*16] + movdqa xmm12, [rsp + 16*16 + 6*16] + movdqa xmm13, [rsp + 16*16 + 7*16] + movdqa xmm14, [rsp + 16*16 + 8*16] + movdqa xmm15, [rsp + 16*16 + 9*16] + +%ifdef SAFE_DATA + ; xmm0 already 0 +%assign i 0 +%rep 10 + movdqa [rsp + 16*16 + i*16], xmm0 +%assign i (i+1) +%endrep +%endif +%endif + add rsp, FRAMESZ + ret +; void call_sha1_mult_sse_from_c(SHA1_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha1_mult_sse_from_c,function,internal) +call_sha1_mult_sse_from_c: + FUNC_SAVE + call sha1_mult_sse + FUNC_RESTORE ret mksection stack-noexec diff --git a/lib/sse/sha224_one_block_sse.asm b/lib/sse_t1/sha224_one_block_sse.asm similarity index 97% rename from lib/sse/sha224_one_block_sse.asm rename to lib/sse_t1/sha224_one_block_sse.asm index b943ca507dc0082d390c33814cc6ad1d4664e50b..cebbe79f5bab45ec3e21eb891e7e500aa7a76f25 100644 --- a/lib/sse/sha224_one_block_sse.asm +++ b/lib/sse_t1/sha224_one_block_sse.asm @@ -30,4 +30,4 @@ %define FUNC sha224_block_sse -%include "sse/sha256_one_block_sse.asm" +%include "sse_t1/sha256_one_block_sse.asm" diff --git a/lib/sse/sha256_mult_sse.asm b/lib/sse_t1/sha256_mult_sse.asm similarity index 86% rename from lib/sse/sha256_mult_sse.asm rename to lib/sse_t1/sha256_mult_sse.asm index b69c9b47197042d4c05f213cd6742ed1d7bb4934..e77fcd27bf969b560c1265d23191a48cbbd0541f 100644 --- a/lib/sse/sha256_mult_sse.asm +++ b/lib/sse_t1/sha256_mult_sse.asm @@ -311,6 +311,72 @@ endstruc ROUND_00_15 %%T1, %%i %endm +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + movdqa [rsp + 3*16], xmm6 + movdqa [rsp + 4*16], xmm7 + movdqa [rsp + 5*16], xmm8 + movdqa [rsp + 6*16], xmm9 + movdqa [rsp + 7*16], xmm10 + movdqa [rsp + 8*16], xmm11 + movdqa [rsp + 9*16], xmm12 + movdqa [rsp + 10*16], xmm13 + movdqa [rsp + 11*16], xmm14 + movdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + movdqa xmm6, [rsp + 3*16] + movdqa xmm7, [rsp + 4*16] + movdqa xmm8, [rsp + 5*16] + movdqa xmm9, [rsp + 6*16] + movdqa xmm10, [rsp + 7*16] + movdqa xmm11, [rsp + 8*16] + movdqa xmm12, [rsp + 9*16] + movdqa xmm13, [rsp + 10*16] + movdqa xmm14, [rsp + 11*16] + movdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + pxor xmm5, xmm5 + movdqa xmm5, [rsp + 3*16] + movdqa xmm5, [rsp + 4*16] + movdqa xmm5, [rsp + 5*16] + movdqa xmm5, [rsp + 6*16] + movdqa xmm5, [rsp + 7*16] + movdqa xmm5, [rsp + 8*16] + movdqa xmm5, [rsp + 9*16] + movdqa xmm5, [rsp + 10*16] + movdqa xmm5, [rsp + 11*16] + movdqa xmm5, [rsp + 12*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + ;; SHA256_ARGS: ;; UINT128 digest[8]; // transposed digests ;; UINT8 *data_ptr[4]; @@ -320,6 +386,7 @@ endstruc ;; arg 1 : STATE : pointer args ;; arg 2 : INP_SIZE : size of data in blocks (assumed >= 1) ;; + MKGLOBAL(sha_256_mult_sse,function,internal) align 32 sha_256_mult_sse: @@ -447,4 +514,12 @@ Lrounds_16_xx: ; outer calling routine restores XMM and other GP registers ret +; void call_sha_256_mult_sse_from_c(SHA256_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha_256_mult_sse_from_c,function,internal) +call_sha_256_mult_sse_from_c: + FUNC_SAVE + call sha_256_mult_sse + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/sse/sha256_one_block_sse.asm b/lib/sse_t1/sha256_one_block_sse.asm similarity index 100% rename from lib/sse/sha256_one_block_sse.asm rename to lib/sse_t1/sha256_one_block_sse.asm diff --git a/lib/sse/sha384_one_block_sse.asm b/lib/sse_t1/sha384_one_block_sse.asm similarity index 97% rename from lib/sse/sha384_one_block_sse.asm rename to lib/sse_t1/sha384_one_block_sse.asm index 8fd010d3645ddd476582d26fadcaba3c4d3773ae..9db43fd9268202649de28b59d422796574dc303d 100644 --- a/lib/sse/sha384_one_block_sse.asm +++ b/lib/sse_t1/sha384_one_block_sse.asm @@ -30,4 +30,4 @@ %define FUNC sha384_block_sse -%include "sse/sha512_one_block_sse.asm" +%include "sse_t1/sha512_one_block_sse.asm" diff --git a/lib/sse/sha512_one_block_sse.asm b/lib/sse_t1/sha512_one_block_sse.asm similarity index 100% rename from lib/sse/sha512_one_block_sse.asm rename to lib/sse_t1/sha512_one_block_sse.asm diff --git a/lib/sse/sha512_x2_sse.asm b/lib/sse_t1/sha512_x2_sse.asm similarity index 86% rename from lib/sse/sha512_x2_sse.asm rename to lib/sse_t1/sha512_x2_sse.asm index c9dd782f76e62805819895e40f8f9317d81bd67a..999b75a315f2234e0e26b6558b61deb4ed196ac1 100644 --- a/lib/sse/sha512_x2_sse.asm +++ b/lib/sse_t1/sha512_x2_sse.asm @@ -310,6 +310,72 @@ endstruc ROUND_00_15 %%T1, %%i %endm +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + movdqa [rsp + 3*16], xmm6 + movdqa [rsp + 4*16], xmm7 + movdqa [rsp + 5*16], xmm8 + movdqa [rsp + 6*16], xmm9 + movdqa [rsp + 7*16], xmm10 + movdqa [rsp + 8*16], xmm11 + movdqa [rsp + 9*16], xmm12 + movdqa [rsp + 10*16], xmm13 + movdqa [rsp + 11*16], xmm14 + movdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + movdqa xmm6, [rsp + 3*16] + movdqa xmm7, [rsp + 4*16] + movdqa xmm8, [rsp + 5*16] + movdqa xmm9, [rsp + 6*16] + movdqa xmm10, [rsp + 7*16] + movdqa xmm11, [rsp + 8*16] + movdqa xmm12, [rsp + 9*16] + movdqa xmm13, [rsp + 10*16] + movdqa xmm14, [rsp + 11*16] + movdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + pxor xmm5, xmm5 + movdqa xmm5, [rsp + 3*16] + movdqa xmm5, [rsp + 4*16] + movdqa xmm5, [rsp + 5*16] + movdqa xmm5, [rsp + 6*16] + movdqa xmm5, [rsp + 7*16] + movdqa xmm5, [rsp + 8*16] + movdqa xmm5, [rsp + 9*16] + movdqa xmm5, [rsp + 10*16] + movdqa xmm5, [rsp + 11*16] + movdqa xmm5, [rsp + 12*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + ;; SHA512_ARGS: ;; UINT128 digest[8]; // transposed digests ;; UINT8 *data_ptr[2]; @@ -437,4 +503,12 @@ Lrounds_16_xx: DBGPRINTL "====================== exit sha512_x2_sse code =====================\n" ret +; void call_sha512_x2_sse_from_c(SHA512_ARGS *args, UINT64 size_in_blocks); +MKGLOBAL(call_sha512_x2_sse_from_c,function,internal) +call_sha512_x2_sse_from_c: + FUNC_SAVE + call sha512_x2_sse + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/sse_t1/sha_mb_sse.c b/lib/sse_t1/sha_mb_sse.c new file mode 100644 index 0000000000000000000000000000000000000000..e528e6ff033c1b03e9ba219b7ed4df27edcf4669 --- /dev/null +++ b/lib/sse_t1/sha_mb_sse.c @@ -0,0 +1,134 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "include/sha_mb_mgr.h" +#include "include/arch_sse_type1.h" + +/* ========================================================================== */ +/* + * SHA1 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha1_sse(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 4, 1, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_mult_sse_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha1_sse(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 4, 0, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_mult_sse_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA224 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha224_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 1, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha_256_mult_sse_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha224_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 0, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha_256_mult_sse_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA256 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha256_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 1, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha_256_mult_sse_from_c, 0); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha256_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 4, 0, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha_256_mult_sse_from_c, 0); +} + +/* ========================================================================== */ +/* + * SHA384 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha384_sse(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 1, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x2_sse_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha384_sse(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 0, 384, + IMB_SHA_512_BLOCK_SIZE, SHA384_PAD_SIZE, + call_sha512_x2_sse_from_c); +} + +/* ========================================================================== */ +/* + * SHA512 MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha512_sse(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 1, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x2_sse_from_c); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha512_sse(MB_MGR_SHA_512_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_512(state, job, 2, 0, 512, + IMB_SHA_512_BLOCK_SIZE, SHA512_PAD_SIZE, + call_sha512_x2_sse_from_c); +} diff --git a/lib/sse/sha_sse.c b/lib/sse_t1/sha_sse.c similarity index 100% rename from lib/sse/sha_sse.c rename to lib/sse_t1/sha_sse.c diff --git a/lib/sse/snow3g_sse.c b/lib/sse_t1/snow3g_sse.c similarity index 93% rename from lib/sse/snow3g_sse.c rename to lib/sse_t1/snow3g_sse.c index 4a32b3b1ab3fa073a9427a35fc040efbd456880a..664007b07e79ec5e127150e80efa00a3c8db183a 100644 --- a/lib/sse/snow3g_sse.c +++ b/lib/sse_t1/snow3g_sse.c @@ -40,3 +40,7 @@ #define CLEAR_SCRATCH_SIMD_REGS clear_scratch_xmms_sse #include "include/snow3g_common.h" +#include "include/ipsec_ooo_mgr.h" + +IMB_DLL_LOCAL void submit_job_snow3g_uea2_sse(MB_MGR_SNOW3G_OOO *, IMB_JOB *); +IMB_DLL_LOCAL void flush_job_snow3g_uea2_sse(MB_MGR_SNOW3G_OOO); diff --git a/lib/sse_t1/snow3g_uea2_by4_sse.asm b/lib/sse_t1/snow3g_uea2_by4_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..5b3a3561b57abcfcb6d33bf9c149f7a2001f696c --- /dev/null +++ b/lib/sse_t1/snow3g_uea2_by4_sse.asm @@ -0,0 +1,1193 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +%include "include/os.asm" +%include "include/reg_sizes.asm" +%include "include/memcpy.asm" +%include "include/imb_job.asm" +%include "include/clear_regs.asm" +%include "include/mb_mgr_datastruct.asm" +%include "include/memcpy.asm" +%include "include/transpose_sse.asm" + +extern snow3g_table_A_mul +extern snow3g_table_A_div +extern snow3g_table_S2 + +align 64 +const_fixup_mask: +times 2 dq 0x7272727272727272 + +align 64 +const_byte_mix_col_rev: +dd 0x00030201, 0x04070605, 0x080b0a09, 0x0c0f0e0d + +align 16 +snow3g_inv_SR_SQ: +db 0xC2, 0xA6, 0x8F, 0x0A, 0x0D, 0xBE, 0xA7, 0x08 +db 0x1D, 0x99, 0x45, 0x59, 0x13, 0xD2, 0x11, 0x9F +db 0xAE, 0xE6, 0xD4, 0xA4, 0x92, 0x8D, 0x58, 0xC1 +db 0xD0, 0x97, 0xC8, 0x84, 0x9D, 0x4F, 0xBC, 0x3B +db 0x2D, 0xEB, 0x27, 0x53, 0x72, 0x4E, 0xE3, 0xEE +db 0xDA, 0x7F, 0xAA, 0x4D, 0x5C, 0x2F, 0x44, 0xDB +db 0x3E, 0x3A, 0x67, 0xC5, 0xC3, 0x6A, 0x16, 0x4C +db 0x38, 0xCC, 0xD7, 0xDD, 0x70, 0x62, 0xF2, 0x19 +db 0x10, 0x09, 0x98, 0x4B, 0x61, 0xC9, 0x86, 0x03 +db 0xA8, 0x6B, 0x5A, 0x33, 0x6E, 0x54, 0x5D, 0x8C +db 0x41, 0x1A, 0xF7, 0xF6, 0x82, 0xC6, 0xF8, 0x80 +db 0xC0, 0xC7, 0xFE, 0xB3, 0x65, 0x2C, 0x7B, 0xBA +db 0xB4, 0xFC, 0x2A, 0x22, 0x0C, 0x73, 0xF5, 0x5F +db 0x64, 0x68, 0x2E, 0x94, 0xB2, 0x24, 0x35, 0x14 +db 0x78, 0xFB, 0xBF, 0x48, 0xDE, 0xED, 0x43, 0x07 +db 0xB6, 0x32, 0xE4, 0xBD, 0x74, 0x7D, 0x57, 0x46 +db 0x3C, 0x37, 0xC4, 0xB7, 0x51, 0x8A, 0xF3, 0x55 +db 0x6C, 0xCF, 0x79, 0xAB, 0x77, 0xA3, 0xE1, 0x93 +db 0xD5, 0x6D, 0x81, 0x5B, 0x2B, 0x9A, 0x7E, 0x8B +db 0x04, 0xB5, 0x85, 0xD3, 0x91, 0xA1, 0x47, 0x52 +db 0xA5, 0xEC, 0xD6, 0xBB, 0x20, 0x87, 0x26, 0xF0 +db 0xAF, 0x4A, 0x89, 0xF4, 0xCE, 0x25, 0xCB, 0x50 +db 0x00, 0x3F, 0xD9, 0x42, 0x90, 0x21, 0x3D, 0xA9 +db 0xE7, 0x29, 0x01, 0xF1, 0x36, 0x5E, 0xFA, 0xCD +db 0xE5, 0x31, 0x1B, 0x05, 0xFD, 0x9E, 0xA0, 0x76 +db 0x30, 0xB1, 0x75, 0xB0, 0x9B, 0x56, 0xEA, 0x1C +db 0xEF, 0x06, 0x69, 0x7A, 0x95, 0x88, 0x15, 0xFF +db 0xCA, 0xAC, 0x0E, 0x23, 0xD8, 0x0F, 0x28, 0x0B +db 0x18, 0xF9, 0x63, 0x1E, 0x83, 0x66, 0x39, 0x9C +db 0xE2, 0x49, 0x1F, 0xE8, 0xD1, 0x34, 0x7C, 0xA2 +db 0xB9, 0xE0, 0x02, 0x12, 0xE9, 0xDF, 0xAD, 0x71 +db 0x96, 0x8E, 0x6F, 0xB8, 0x40, 0x60, 0x17, 0xDC + +align 64 +xmm_bswap: +dd 0x00010203, 0x04050607, 0x08090a0b, 0x0c0d0e0f + +;; used for inverse of AESENC shift rows operation +align 64 +const_fixed_rotate_mask: +dq 0x0b0e0104070a0d00, 0x0306090c0f020508 + +align 64 +idx_rows_sse: +times 4 dd 0x00000000 +times 4 dd 0x10101010 +times 4 dd 0x20202020 +times 4 dd 0x30303030 +times 4 dd 0x40404040 +times 4 dd 0x50505050 +times 4 dd 0x60606060 +times 4 dd 0x70707070 +times 4 dd 0x80808080 +times 4 dd 0x90909090 +times 4 dd 0xa0a0a0a0 +times 4 dd 0xb0b0b0b0 +times 4 dd 0xc0c0c0c0 +times 4 dd 0xd0d0d0d0 +times 4 dd 0xe0e0e0e0 +times 4 dd 0xf0f0f0f0 + +align 64 +ms_byte_mask: +dd 0x0f0b0703 +dd 0x80808080 +dd 0x80808080 +dd 0x80808080 + +align 64 +ls_byte_mask: +dd 0x0c080400 +dd 0x80808080 +dd 0x80808080 +dd 0x80808080 + +align 64 +low_nibble_byte_mask: +times 4 dd 0x0f0f0f0f + +align 64 +mul_alpha: +db 0x00, 0x13, 0x26, 0x35, 0x4C, 0x5F, 0x6A, 0x79 +db 0x98, 0x8B, 0xBE, 0xAD, 0xD4, 0xC7, 0xF2, 0xE1 +db 0x00, 0xCF, 0x37, 0xF8, 0x6E, 0xA1, 0x59, 0x96 +db 0xDC, 0x13, 0xEB, 0x24, 0xB2, 0x7D, 0x85, 0x4A +db 0x00, 0x9F, 0x97, 0x08, 0x87, 0x18, 0x10, 0x8F +db 0xA7, 0x38, 0x30, 0xAF, 0x20, 0xBF, 0xB7, 0x28 +db 0x00, 0xE1, 0x6B, 0x8A, 0xD6, 0x37, 0xBD, 0x5C +db 0x05, 0xE4, 0x6E, 0x8F, 0xD3, 0x32, 0xB8, 0x59 +db 0x00, 0x99, 0x9B, 0x02, 0x9F, 0x06, 0x04, 0x9D +db 0x97, 0x0E, 0x0C, 0x95, 0x08, 0x91, 0x93, 0x0A +db 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77 +db 0x88, 0x99, 0xAA, 0xBB, 0xCC, 0xDD, 0xEE, 0xFF +db 0x00, 0xE7, 0x67, 0x80, 0xCE, 0x29, 0xA9, 0x4E +db 0x35, 0xD2, 0x52, 0xB5, 0xFB, 0x1C, 0x9C, 0x7B +db 0x00, 0x0A, 0x14, 0x1E, 0x28, 0x22, 0x3C, 0x36 +db 0x50, 0x5A, 0x44, 0x4E, 0x78, 0x72, 0x6C, 0x66 + +align 64 +div_alpha: +db 0x00, 0xCD, 0x33, 0xFE, 0x66, 0xAB, 0x55, 0x98 +db 0xCC, 0x01, 0xFF, 0x32, 0xAA, 0x67, 0x99, 0x54 +db 0x00, 0x40, 0x80, 0xC0, 0xA9, 0xE9, 0x29, 0x69 +db 0xFB, 0xBB, 0x7B, 0x3B, 0x52, 0x12, 0xD2, 0x92 +db 0x00, 0x0F, 0x1E, 0x11, 0x3C, 0x33, 0x22, 0x2D +db 0x78, 0x77, 0x66, 0x69, 0x44, 0x4B, 0x5A, 0x55 +db 0x00, 0x18, 0x30, 0x28, 0x60, 0x78, 0x50, 0x48 +db 0xC0, 0xD8, 0xF0, 0xE8, 0xA0, 0xB8, 0x90, 0x88 +db 0x00, 0x31, 0x62, 0x53, 0xC4, 0xF5, 0xA6, 0x97 +db 0x21, 0x10, 0x43, 0x72, 0xE5, 0xD4, 0x87, 0xB6 +db 0x00, 0x5F, 0xBE, 0xE1, 0xD5, 0x8A, 0x6B, 0x34 +db 0x03, 0x5C, 0xBD, 0xE2, 0xD6, 0x89, 0x68, 0x37 +db 0x00, 0xF0, 0x49, 0xB9, 0x92, 0x62, 0xDB, 0x2B +db 0x8D, 0x7D, 0xC4, 0x34, 0x1F, 0xEF, 0x56, 0xA6 +db 0x00, 0x29, 0x52, 0x7B, 0xA4, 0x8D, 0xF6, 0xDF +db 0xE1, 0xC8, 0xB3, 0x9A, 0x45, 0x6C, 0x17, 0x3E + +align 64 +all_fs: +times 4 dd 0xffffffff + +mksection .text + +struc STACK +_keystream: resb (4 * 16) +_gpr_save: resq 10 +_rsp_save: resq 1 +endstruc + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Stores register contents and create the stack frame +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_FUNC_START 0 + mov rax, rsp + sub rsp, STACK_size + and rsp, ~63 + + mov [rsp + _gpr_save + 8 * 0], rbx + mov [rsp + _gpr_save + 8 * 1], rbp + mov [rsp + _gpr_save + 8 * 2], r12 + mov [rsp + _gpr_save + 8 * 3], rsi + mov [rsp + _gpr_save + 8 * 4], rdi + mov [rsp + _gpr_save + 8 * 5], r13 + mov [rsp + _gpr_save + 8 * 6], r14 + mov [rsp + _gpr_save + 8 * 7], r15 + +%ifdef LINUX + mov [rsp + _gpr_save + 8 * 8], r9 +%else + mov [rsp + _gpr_save + 8 * 8], rcx + mov [rsp + _gpr_save + 8 * 9], rdx +%endif + mov [rsp + _rsp_save], rax ;; original SP +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Restores register contents and removes the stack frame +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_FUNC_END 0 + mov rbx, [rsp + _gpr_save + 8 * 0] + mov rbp, [rsp + _gpr_save + 8 * 1] + mov r12, [rsp + _gpr_save + 8 * 2] + mov rsi, [rsp + _gpr_save + 8 * 3] + mov rdi, [rsp + _gpr_save + 8 * 4] + mov r13, [rsp + _gpr_save + 8 * 5] + mov r14, [rsp + _gpr_save + 8 * 6] + mov r15, [rsp + _gpr_save + 8 * 7] +%ifdef LINUX + mov r9, [rsp + _gpr_save + 8 * 8] +%else + mov rcx, [rsp + _gpr_save + 8 * 8] + mov rdx, [rsp + _gpr_save + 8 * 9] +%endif + mov rsp, [rsp + _rsp_save] ;; original SP +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; SSE_LOOKUP_16X8BIT: Search 16 8-bit values in lookup table +;; arg 1 [in] : xmm register with 16x8bit indexes to search +;; arg 2 [in] : memory with 16 8-bit indices to be looked up +;; arg3-15 [clobbered]: xmm registers used as temp variables +;; arg 16 [out]: xmm register to write 16 8-bit values from the table +;; in and out xmm register must be different registers +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SSE_LOOKUP_16X8BIT 15 +%define %%IN_INDEXES_H %1 ;; [in/out] xmm reg with indexes +%define %%TMP_INDEXES_L %2 ;; [clobbered] xmm register +%define %%TMP1 %3 ;; [clobbered] xmm register +%define %%TMP2 %4 ;; [clobbered] xmm register +%define %%TMP3 %5 ;; [clobbered] xmm register +%define %%TMP4 %6 ;; [clobbered] xmm register +%define %%TMP5 %7 ;; [clobbered] xmm register +%define %%TMP6 %8 ;; [clobbered] xmm register +%define %%TMPARG_TAB_VAL_0 %9 ;; [clobbered] xmm register +%define %%TMPARG_TAB_VAL_1 %10 ;; [clobbered] xmm register +%define %%TMPARG_TAB_VAL_2 %11 ;; [clobbered] xmm register +%define %%TMPARG_TAB_VAL_3 %12 ;; [clobbered] xmm register +%define %%TMPARG_TAB_VAL_4 %13 ;; [clobbered] xmm register +%define %%TMPARG_TAB_VAL_5 %14 ;; [clobbered] xmm register +%define %%OUT_SUBSTITUTE_VAL %15 ;; [out] xmm register + + + movdqa %%TMP1, [rel idx_rows_sse + (15 * 16)] ;; 4x0xf0f0f0f0 + movdqa %%TMP2, %%TMP1 + psrlq %%TMP2, 4 ;; 4x0x0f0f0f0f + movdqa %%TMP_INDEXES_L, %%IN_INDEXES_H + pand %%IN_INDEXES_H, %%TMP1 ;; index top nibble + pand %%TMP_INDEXES_L, %%TMP2 ;; index low nibble + + movdqa %%TMP1, %%IN_INDEXES_H + movdqa %%TMP3, %%IN_INDEXES_H + movdqa %%TMP4, %%IN_INDEXES_H + movdqa %%TMP5, %%IN_INDEXES_H + movdqa %%TMP6, %%IN_INDEXES_H + movdqa %%TMP2, %%IN_INDEXES_H + pcmpeqb %%TMP1, [rel idx_rows_sse + (0 * 16)] + movdqa %%TMPARG_TAB_VAL_0, [rel snow3g_inv_SR_SQ + (0 * 16)] + pcmpeqb %%TMP3, [rel idx_rows_sse + (1 * 16)] + movdqa %%TMPARG_TAB_VAL_1, [rel snow3g_inv_SR_SQ + (1 * 16)] + pcmpeqb %%TMP4, [rel idx_rows_sse + (2 * 16)] + movdqa %%TMPARG_TAB_VAL_2, [rel snow3g_inv_SR_SQ + (2 * 16)] + pcmpeqb %%TMP5, [rel idx_rows_sse + (3 * 16)] + movdqa %%TMPARG_TAB_VAL_3, [rel snow3g_inv_SR_SQ + (3 * 16)] + pcmpeqb %%TMP6, [rel idx_rows_sse + (4 * 16)] + movdqa %%TMPARG_TAB_VAL_4, [rel snow3g_inv_SR_SQ + (4 * 16)] + pcmpeqb %%TMP2, [rel idx_rows_sse + (5 * 16)] + movdqa %%TMPARG_TAB_VAL_5, [rel snow3g_inv_SR_SQ + (5 * 16)] + + pshufb %%TMPARG_TAB_VAL_0, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_1, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_2, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_3, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_4, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_5, %%TMP_INDEXES_L + + pand %%TMP1, %%TMPARG_TAB_VAL_0 + pand %%TMP3, %%TMPARG_TAB_VAL_1 + pand %%TMP4, %%TMPARG_TAB_VAL_2 + pand %%TMP5, %%TMPARG_TAB_VAL_3 + pand %%TMP6, %%TMPARG_TAB_VAL_4 + pand %%TMP2, %%TMPARG_TAB_VAL_5 + + por %%TMP1, %%TMP3 + por %%TMP4, %%TMP5 + por %%TMP2, %%TMP6 + movdqa %%OUT_SUBSTITUTE_VAL, %%TMP1 + por %%OUT_SUBSTITUTE_VAL, %%TMP4 + + ;; %%OUT_SUBSTITUTE_VAL & %%TMP2 carry current OR result. + + movdqa %%TMP1, %%IN_INDEXES_H + movdqa %%TMP3, %%IN_INDEXES_H + movdqa %%TMP4, %%IN_INDEXES_H + movdqa %%TMP5, %%IN_INDEXES_H + movdqa %%TMP6, %%IN_INDEXES_H + + pcmpeqb %%TMP1, [rel idx_rows_sse + (6 * 16)] + movdqa %%TMPARG_TAB_VAL_0, [rel snow3g_inv_SR_SQ + (6 * 16)] + pcmpeqb %%TMP3, [rel idx_rows_sse + (7 * 16)] + movdqa %%TMPARG_TAB_VAL_1, [rel snow3g_inv_SR_SQ + (7 * 16)] + pcmpeqb %%TMP4, [rel idx_rows_sse + (8 * 16)] + movdqa %%TMPARG_TAB_VAL_2, [rel snow3g_inv_SR_SQ + (8 * 16)] + pcmpeqb %%TMP5, [rel idx_rows_sse + (9 * 16)] + movdqa %%TMPARG_TAB_VAL_3, [rel snow3g_inv_SR_SQ + (9 * 16)] + pcmpeqb %%TMP6, [rel idx_rows_sse + (10 * 16)] + movdqa %%TMPARG_TAB_VAL_4, [rel snow3g_inv_SR_SQ + (10 * 16)] + + pshufb %%TMPARG_TAB_VAL_0, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_1, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_2, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_3, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_4, %%TMP_INDEXES_L + + pand %%TMP1, %%TMPARG_TAB_VAL_0 + pand %%TMP3, %%TMPARG_TAB_VAL_1 + pand %%TMP4, %%TMPARG_TAB_VAL_2 + pand %%TMP5, %%TMPARG_TAB_VAL_3 + pand %%TMP6, %%TMPARG_TAB_VAL_4 + + por %%TMP1, %%TMP3 + por %%TMP4, %%TMP5 + por %%TMP2, %%TMP6 + por %%OUT_SUBSTITUTE_VAL, %%TMP1 + por %%TMP2, %%TMP4 + + ;; %%OUT_SUBSTITUTE_VAL & %%TMP1 carry current OR result + + movdqa %%TMP1, %%IN_INDEXES_H + movdqa %%TMP3, %%IN_INDEXES_H + movdqa %%TMP4, %%IN_INDEXES_H + movdqa %%TMP5, %%IN_INDEXES_H + movdqa %%TMP6, %%IN_INDEXES_H + + pcmpeqb %%TMP1, [rel idx_rows_sse + (11 * 16)] + movdqa %%TMPARG_TAB_VAL_0, [rel snow3g_inv_SR_SQ + (11 * 16)] + pcmpeqb %%TMP3, [rel idx_rows_sse + (12 * 16)] + movdqa %%TMPARG_TAB_VAL_1, [rel snow3g_inv_SR_SQ + (12 * 16)] + pcmpeqb %%TMP4, [rel idx_rows_sse + (13 * 16)] + movdqa %%TMPARG_TAB_VAL_2, [rel snow3g_inv_SR_SQ + (13 * 16)] + pcmpeqb %%TMP5, [rel idx_rows_sse + (14 * 16)] + movdqa %%TMPARG_TAB_VAL_3, [rel snow3g_inv_SR_SQ + (14 * 16)] + pcmpeqb %%TMP6, [rel idx_rows_sse + (15 * 16)] + movdqa %%TMPARG_TAB_VAL_4, [rel snow3g_inv_SR_SQ + (15 * 16)] + + pshufb %%TMPARG_TAB_VAL_0, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_1, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_2, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_3, %%TMP_INDEXES_L + pshufb %%TMPARG_TAB_VAL_4, %%TMP_INDEXES_L + + pand %%TMP1, %%TMPARG_TAB_VAL_0 + pand %%TMP3, %%TMPARG_TAB_VAL_1 + pand %%TMP4, %%TMPARG_TAB_VAL_2 + pand %%TMP5, %%TMPARG_TAB_VAL_3 + pand %%TMP6, %%TMPARG_TAB_VAL_4 + + por %%TMP1, %%TMP3 + por %%TMP4, %%TMP5 + por %%TMP2, %%TMP6 + por %%OUT_SUBSTITUTE_VAL, %%TMP1 + por %%TMP2, %%TMP4 + por %%OUT_SUBSTITUTE_VAL, %%TMP2 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Search SNOW3G S2 box value per byte from FSM2 indicated in args. +;; Fill single dword in output depending on given lane nr +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro S2_BOX_BYTE_SEARCH 7 +%define %%OUT %1 ;; [out] xmm register (1 dword filled) +%define %%FSM_R2 %2 ;; [in] ptr to FSM2 values per 4 lanes +%define %%TABLE_PTR %3 ;; [in] address of table for search +%define %%LANE %4 ;; [in] lane nr +%define %%BYTE_NR %5 ;; [in] byte number for search (from FSM2) +%define %%BYTE_OFFSET %6 ;; [in] byte offset for output +%define %%TMP_64_1 %7 ;; [clobbered] temp gpr + + movzx %%TMP_64_1, byte[%%FSM_R2 + %%LANE*4 + %%BYTE_NR] + pinsrd %%OUT, [%%TABLE_PTR + %%TMP_64_1*8 + %%BYTE_OFFSET], %%LANE + +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; SNOW3G S2 box calculation for 4 32-bit values passed in 1st input parameter. +;; Clobbers all 15 input xmm registers +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro S2_BOX_SSE 15 +%define %%TMP1 %1 ;; [in/clobbered] xmm containing 4 dwords +%define %%TMP2 %2 ;; [clobbered] temp xmm register +%define %%TMP3 %3 ;; [clobbered] temp xmm register +%define %%TMP4 %4 ;; [clobbered] temp xmm register +%define %%TMP5 %5 ;; [clobbered] temp xmm register +%define %%TMP6 %6 ;; [clobbered] temp xmm register +%define %%TMP7 %7 ;; [clobbered] temp xmm register +%define %%TMP8 %8 ;; [clobbered] temp xmm register +%define %%TMP9 %9 ;; [clobbered] temp xmm register +%define %%TMP10 %10 ;; [clobbered] temp xmm register +%define %%TMP11 %11 ;; [clobbered] temp xmm register +%define %%TMP12 %12 ;; [clobbered] temp xmm register +%define %%TMP13 %13 ;; [clobbered] temp xmm register +%define %%TMP14 %14 ;; [clobbered] temp xmm register +%define %%TMPOUT %15 ;; [out] xmm containing S2 box for 4 input dwords + + ;; Perform invSR(SQ(x)) transform + SSE_LOOKUP_16X8BIT %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, \ + %%TMP11, %%TMP12, %%TMP13, %%TMP14, \ + %%TMPOUT + + pshufb %%TMPOUT, [rel const_fixed_rotate_mask] + pxor %%TMP1, %%TMP1 + movdqa %%TMP2, %%TMPOUT + + ;; aesenclast does not perform mix column operation and + ;; allows to determine the fix-up value to be applied + ;; on result of aesenc to produce correct result for SNOW3G + aesenclast %%TMP2, %%TMP1 + aesenc %%TMPOUT, %%TMP1 + + ;; Using signed compare to return 0xFF when the most significant bit of + ;; no_mixc is set + pcmpgtb %%TMP1, %%TMP2 + movdqa %%TMP5, %%TMP1 + pshufb %%TMP5, [rel const_byte_mix_col_rev] + pxor %%TMP1, %%TMP5 + pand %%TMP1, [rel const_fixup_mask] + pxor %%TMPOUT, %%TMP1 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Perform SNOW3G FSM clock operation for 4 buffers. +;; Passed addresses for FSM_R1-FSM_R3 and LFSR_5 are interpreted as list of 4 +;; 32-bit values each. +;; Values under FSM_R1-FSM_R3 are updated as a result of this macro. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_FSM_CLOCK 11-20 +%define %%FSM_R1 %1 ;; [in] address of 4 FSM values R1 +%define %%FSM_R2 %2 ;; [in] address of 4 FSM values R2 +%define %%FSM_R3 %3 ;; [in] address of 4 FSM values R3 +%define %%TMP1 %4 ;; [clobbered] temp xmm register +%define %%TMP2 %5 ;; [clobbered] temp xmm register +%define %%TMP3 %6 ;; [clobbered] temp xmm register +%define %%TMP4 %7 ;; [clobbered] temp xmm register +%define %%TMP5 %8 ;; [clobbered] temp xmm register +%ifdef SAFE_LOOKUP +%define %%TMP6 %9 ;; [clobbered] temp xmm register +%define %%TMP7 %10 ;; [clobbered] temp xmm register +%define %%TMP8 %11 ;; [clobbered] temp xmm register +%define %%TMP9 %12 ;; [clobbered] temp xmm register +%define %%TMP10 %13 ;; [clobbered] temp xmm register +%define %%TMP11 %14 ;; [clobbered] temp xmm register +%define %%TMP12 %15 ;; [clobbered] temp xmm register +%define %%TMP13 %16 ;; [clobbered] temp xmm register +%define %%TMP14 %17 ;; [clobbered] temp xmm register +%define %%TMP15 %18 ;; [clobbered] temp xmm register +%define %%TMP16 %19 ;; [clobbered] temp xmm register +%define %%LFSR_5 %20 ;; [in] address of 4 LFSR 5 values +%else +%define %%TMP_64 %9 ;; [clobbered] temp gp register +%define %%TMP_64_1 %10 ;; [clobbered] temp gp register +%define %%LFSR_5 %11 ;; [in] address of 4 LFSR 5 values + +%endif ;; SAFE_LOOKUP + + ;; FSM_3 = S2_box(FSM_2) +%ifdef SAFE_LOOKUP + movdqa %%TMP15, [ %%FSM_R2 ] + S2_BOX_SSE %%TMP15, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, \ + %%TMP11, %%TMP12, %%TMP13, %%TMP14, %%TMP1 +%else + lea %%TMP_64, [rel snow3g_table_S2] + ;; w0= S2[(x >> 24) & 0xff]; + ;; w1= S2[(x >> 16) & 0xff]; + ;; w2= S2[(x >> 8) & 0xff]; + ;; w3= S2[x & 0xff]; + S2_BOX_BYTE_SEARCH %%TMP1, %%FSM_R2, %%TMP_64, 0, 3, 0, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP2, %%FSM_R2, %%TMP_64, 0, 2, 1, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP3, %%FSM_R2, %%TMP_64, 0, 1, 2, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP4, %%FSM_R2, %%TMP_64, 0, 0, 3, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP1, %%FSM_R2, %%TMP_64, 1, 3, 0, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP2, %%FSM_R2, %%TMP_64, 1, 2, 1, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP3, %%FSM_R2, %%TMP_64, 1, 1, 2, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP4, %%FSM_R2, %%TMP_64, 1, 0, 3, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP1, %%FSM_R2, %%TMP_64, 2, 3, 0, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP2, %%FSM_R2, %%TMP_64, 2, 2, 1, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP3, %%FSM_R2, %%TMP_64, 2, 1, 2, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP4, %%FSM_R2, %%TMP_64, 2, 0, 3, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP1, %%FSM_R2, %%TMP_64, 3, 3, 0, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP2, %%FSM_R2, %%TMP_64, 3, 2, 1, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP3, %%FSM_R2, %%TMP_64, 3, 1, 2, %%TMP_64_1 + S2_BOX_BYTE_SEARCH %%TMP4, %%FSM_R2, %%TMP_64, 3, 0, 3, %%TMP_64_1 + + pxor %%TMP4, %%TMP3 + pxor %%TMP1, %%TMP2 + pxor %%TMP1, %%TMP4 +%endif ;; SAFE_LOOKUP + + ;; R = (FSM_R3 ^ LFSR_5) + FSM_R2 + movdqa %%TMP5, [%%FSM_R3] + pxor %%TMP5, [%%LFSR_5] + paddd %%TMP5, [%%FSM_R2] + + ;; FSM_3 = S2_box(FSM_2) + movdqa [%%FSM_R3], %%TMP1 + + ;; FSM_R2 = S1_box(FSM_R1) + movdqa %%TMP3, [%%FSM_R1] + + ;; S1 box calculation + pshufb %%TMP3, [rel const_fixed_rotate_mask] + pxor %%TMP2, %%TMP2 + aesenc %%TMP3, %%TMP2 + + movdqa [%%FSM_R2], %%TMP3 + + ;; FSM_1 = R + movdqa [%%FSM_R1], %%TMP5 + +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Perform SNOW3G mul_alpha or div_alpha depending on table passed in %2: +;; (MULxPOW(c, 23, 0xA9) || MULxPOW(c, 245, 0xA9) || MULxPOW(c, 48, 0xA9) +;; || MULxPOW(c, 239, 0xA9)) +;; c = %%LFSR_X +;; Result of mul_alpha and div_alpha operations are precalculated and expected +;; under %%OP_TABLE address. This function searches those tables. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%ifdef SAFE_LOOKUP +%macro ALPHA_OP 7 +%define %%LFSR_X %1 ;; [in/clobbered] xmm with LFSR value +%define %%OP_TABLE %2 ;; [in] address of mulalpha/divalpha val table +%define %%TMP1 %3 ;; [out] temporary xmm register +%define %%TMP2 %4 ;; [clobbered] temporary xmm register +%define %%TMP3 %5 ;; [clobbered] temporary xmm register +%define %%TMP4 %6 ;; [clobbered] temporary xmm register +%define %%TMP5 %7 ;; [clobbered] temporary xmm register + + movdqa %%TMP2, [rel low_nibble_byte_mask] + pand %%TMP2, %%LFSR_X ;; lower part of each byte of LFSR + movdqa %%TMP1, [rel %%OP_TABLE] + pshufb %%TMP1, %%TMP2 + movdqa %%TMP3, [rel %%OP_TABLE + 16] + movdqa %%TMP4, [rel %%OP_TABLE + 32] + movdqa %%TMP5, [rel %%OP_TABLE + 48] + + pshufb %%TMP3, %%TMP2 + pshufb %%TMP4, %%TMP2 + pshufb %%TMP5, %%TMP2 + + punpcklbw %%TMP1, %%TMP3 + punpcklbw %%TMP4, %%TMP5 + movdqa %%TMP2, %%TMP1 + punpcklwd %%TMP2, %%TMP4 + + movdqa %%TMP1, [rel low_nibble_byte_mask] + psrld %%LFSR_X, 4 + pand %%LFSR_X, %%TMP1 + + movdqa %%TMP1, [rel %%OP_TABLE + 64] + movdqa %%TMP3, [rel %%OP_TABLE + 80] + movdqa %%TMP4, [rel %%OP_TABLE + 96] + movdqa %%TMP5, [rel %%OP_TABLE + 112] + + pshufb %%TMP1, %%LFSR_X + pshufb %%TMP3, %%LFSR_X + pshufb %%TMP4, %%LFSR_X + pshufb %%TMP5, %%LFSR_X + + punpcklbw %%TMP1, %%TMP3 + punpcklbw %%TMP4, %%TMP5 + punpcklwd %%TMP1, %%TMP4 + pxor %%TMP1, %%TMP2 +%endmacro + +%else ;; SAFE_LOOKUP + +%macro ALPHA_OP_NOT_SAFE 5 +%define %%LFSR_PTR %1 ;; [in] r64 with address of LFSR register + ;; for mulalpha pass LFSR + ;; for divalpha pass LSFR 11 +%define %%OP_TABLE %2 ;; [in] address of mulalpha/divalpha val table +%define %%TMP1 %3 ;; [out] temporary xmm register +%define %%TMP_64 %4 ;; [clobbered] temporary gp register +%define %%TMP_64_1 %5 ;; [clobbered] temporary gp register + lea %%TMP_64, [rel %%OP_TABLE] + +%ifidn %%OP_TABLE, snow3g_table_A_div + movzx %%TMP_64_1, byte[%%LFSR_PTR] +%else + movzx %%TMP_64_1, byte[%%LFSR_PTR+3] +%endif + movd %%TMP1, [%%TMP_64 + %%TMP_64_1*4] + +%ifidn %%OP_TABLE, snow3g_table_A_div + movzx %%TMP_64_1, byte[%%LFSR_PTR+4] +%else + movzx %%TMP_64_1, byte[%%LFSR_PTR+7] +%endif + pinsrd %%TMP1, [%%TMP_64 + %%TMP_64_1*4], 1 +%ifidn %%OP_TABLE, snow3g_table_A_div + movzx %%TMP_64_1, byte[%%LFSR_PTR+8] +%else + movzx %%TMP_64_1, byte[%%LFSR_PTR+11] +%endif + pinsrd %%TMP1, [%%TMP_64 + %%TMP_64_1*4], 2 +%ifidn %%OP_TABLE, snow3g_table_A_div + movzx %%TMP_64_1, byte[%%LFSR_PTR+12] +%else + movzx %%TMP_64_1, byte[%%LFSR_PTR+15] +%endif + pinsrd %%TMP1, [%%TMP_64 + %%TMP_64_1*4], 3 +%endmacro + +%endif ;; SAFE_LOOKUP + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Perform SNOW3G LFSR shift operation. +;; This operation is common for initialization mode and keystream mode, the only +;; difference is in init mode %1 = keystream otherwise %1 needs ti be set to 0. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SHIFT_LFSRS 9 +%define %%STATE %1 ;; [in] state ptr +%define %%KEYSTREAM %2 ;; [in] in init mode keystream, else 0 +%define %%TMP1 %3 ;; [clobbered] temporary xmm register +%define %%TMP2 %4 ;; [clobbered] temporary xmm register +%define %%TMP3 %5 ;; [clobbered] temporary xmm register +%define %%TMP4 %6 ;; [clobbered] temporary xmm register +%define %%TMP5 %7 ;; [clobbered] temporary xmm register +%define %%TMP6 %8 ;; [clobbered] temporary xmm register +%define %%TMP7 %9 ;; [clobbered] temporary xmm register + + ;; LFSR_0:LFSR_15: LFSR_i = LFSR_(i + 1); + ;; LFSR_15 = keystream / 0 + movdqa %%TMP1, [%%STATE + _snow3g_args_LFSR_1] + movdqa %%TMP2, [%%STATE + _snow3g_args_LFSR_2] + movdqa %%TMP3, [%%STATE + _snow3g_args_LFSR_3] + movdqa %%TMP4, [%%STATE + _snow3g_args_LFSR_4] + movdqa %%TMP5, [%%STATE + _snow3g_args_LFSR_5] + movdqa %%TMP6, [%%STATE + _snow3g_args_LFSR_6] + movdqa [%%STATE + _snow3g_args_LFSR_0], %%TMP1 + movdqa [%%STATE + _snow3g_args_LFSR_1], %%TMP2 + movdqa [%%STATE + _snow3g_args_LFSR_2], %%TMP3 + movdqa [%%STATE + _snow3g_args_LFSR_3], %%TMP4 + movdqa [%%STATE + _snow3g_args_LFSR_4], %%TMP5 + movdqa [%%STATE + _snow3g_args_LFSR_5], %%TMP6 + + movdqa %%TMP1, [%%STATE + _snow3g_args_LFSR_7] + movdqa %%TMP2, [%%STATE + _snow3g_args_LFSR_8] + movdqa %%TMP3, [%%STATE + _snow3g_args_LFSR_9] + movdqa %%TMP4, [%%STATE + _snow3g_args_LFSR_10] + movdqa %%TMP5, [%%STATE + _snow3g_args_LFSR_11] + movdqa %%TMP6, [%%STATE + _snow3g_args_LFSR_12] + movdqa [%%STATE + _snow3g_args_LFSR_6], %%TMP1 + movdqa [%%STATE + _snow3g_args_LFSR_7], %%TMP2 + movdqa [%%STATE + _snow3g_args_LFSR_8], %%TMP3 + movdqa [%%STATE + _snow3g_args_LFSR_9 ], %%TMP4 + movdqa [%%STATE + _snow3g_args_LFSR_10], %%TMP5 + movdqa [%%STATE + _snow3g_args_LFSR_11], %%TMP6 + + movdqa %%TMP1, [%%STATE + _snow3g_args_LFSR_13] + movdqa %%TMP2, [%%STATE + _snow3g_args_LFSR_14] + movdqa %%TMP3, [%%STATE + _snow3g_args_LFSR_15] + + movdqa [%%STATE + _snow3g_args_LFSR_12], %%TMP1 + movdqa [%%STATE + _snow3g_args_LFSR_13], %%TMP2 + movdqa [%%STATE + _snow3g_args_LFSR_14], %%TMP3 + + movdqa [%%STATE + _snow3g_args_LFSR_15], %%KEYSTREAM +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Generate SNOW3G keystream per 4 buffers. Update LFSR/FSM state. +;; This macro is used both in initialization and keystream modes. +;; In initialization mode F is stored on stack. +;; In keystream mode keystream is stored on stack. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_KEY_GEN_SSE 12-18 +%define %%STATE %1 ;; [in] ptr to LFSR/FSM struct +%define %%TMP1 %2 ;; [clobbered] temporary xmm register +%define %%TMP2 %3 ;; [clobbered] temporary xmm register +%define %%TMP3 %4 ;; [clobbered] temporary xmm register +%define %%TMP4 %5 ;; [clobbered] temporary xmm register +%define %%TMP5 %6 ;; [clobbered] temporary xmm register +%define %%TMP6 %7 ;; [clobbered] temporary xmm register +%define %%TMP7 %8 ;; [clobbered] temporary xmm register +%ifdef SAFE_LOOKUP +%define %%TMP8 %9 ;; [clobbered] temporary xmm register +%define %%TMP9 %10 ;; [clobbered] temporary xmm register +%define %%TMP10 %11 ;; [clobbered] temporary xmm register +%define %%TMP11 %12 ;; [clobbered] temporary xmm register +%define %%TMP12 %13 ;; [clobbered] temporary xmm register +%define %%TMP13 %14 ;; [clobbered] temporary xmm register +%define %%TMP14 %15 ;; [clobbered] temporary xmm register +%define %%TMP15 %16 ;; [clobbered] temporary xmm register +%define %%TMP16 %17 ;; [clobbered] temporary xmm register +%define %%DWORD_ITER %18 ;; [in] gp reg with offset for stack storing keystream +%else ;;SAFE_LOOKUP +%define %%TMP15 %9 ;; [clobbered] temporary xmm register +%define %%TMP_64_1 %10 ;; [clobbered] temp gpr +%define %%TMP_64_2 %11 ;; [clobbered] temp gpr +%define %%DWORD_ITER %12 ;; [in] gp reg with offset for stack storing keystream +%endif ;;SAFE_LOOKUP + + + ;; Calculate F = (LFSR_S[15] + FSM_R1) ^ FSM_R2; + movdqa %%TMP1, [ %%STATE + _snow3g_args_LFSR_15 ] + paddd %%TMP1, [ %%STATE + _snow3g_args_FSM_1 ] + pxor %%TMP1, [ %%STATE + _snow3g_args_FSM_2 ] + + ;; Store F/keystream on stack + movdqa %%TMP2, [%%STATE + _snow3g_args_LD_ST_MASK + 4*4] + pandn %%TMP2, %%TMP1 ;; zero in keystream mode + movdqa %%TMP3, [%%STATE + _snow3g_args_LD_ST_MASK + 4*4] + ;; keystream mode: ks = F xor LFSR_0 + pxor %%TMP1, [%%STATE + _snow3g_args_LFSR_0] + pand %%TMP3, %%TMP1 ;; zero in init mode + por %%TMP2, %%TMP3 + shl %%DWORD_ITER, 4 + movdqa [rsp + _keystream + %%DWORD_ITER], %%TMP2 + + ;; FSM Clock +%ifdef SAFE_LOOKUP + SNOW3G_FSM_CLOCK {%%STATE + _snow3g_args_FSM_1}, \ + {%%STATE + _snow3g_args_FSM_2}, \ + {%%STATE + _snow3g_args_FSM_3}, %%TMP1, %%TMP2, \ + %%TMP3, %%TMP4, %%TMP5, %%TMP6, %%TMP7, \ + %%TMP8, %%TMP9, %%TMP10, %%TMP11, %%TMP12, \ + %%TMP13, %%TMP14, %%TMP15, %%TMP16, \ + %%STATE + _snow3g_args_LFSR_5 + movdqa %%TMP15, [ %%STATE + _snow3g_args_LFSR_0 ] + movdqa %%TMP2, [rel ms_byte_mask] + pshufb %%TMP15, %%TMP2 + ALPHA_OP %%TMP15, mul_alpha, %%TMP2, %%TMP3, %%TMP4, \ + %%TMP5, %%TMP6 + ; LFSR clock: div alpha + + movdqa %%TMP15, [%%STATE + _snow3g_args_LFSR_11 ] + movdqa %%TMP7, [rel ls_byte_mask] + pshufb %%TMP15, %%TMP7 + ALPHA_OP %%TMP15, div_alpha, %%TMP7, %%TMP3, %%TMP4, \ + %%TMP5, %%TMP6 +%else + SNOW3G_FSM_CLOCK {%%STATE + _snow3g_args_FSM_1}, \ + {%%STATE + _snow3g_args_FSM_2}, \ + {%%STATE + _snow3g_args_FSM_3}, %%TMP1, %%TMP2, \ + %%TMP3, %%TMP4, %%TMP5, %%TMP_64_1, %%TMP_64_2, \ + %%STATE + _snow3g_args_LFSR_5 + ALPHA_OP_NOT_SAFE {%%STATE + _snow3g_args_LFSR_0}, snow3g_table_A_mul, \ + %%TMP2, %%TMP_64_1, %%TMP_64_2 + + + ALPHA_OP_NOT_SAFE {%%STATE + _snow3g_args_LFSR_11}, snow3g_table_A_div, \ + %%TMP7, %%TMP_64_1, %%TMP_64_2 +%endif ;; SAFE_LOOKUP + + movdqa %%TMP15, [%%STATE + _snow3g_args_LFSR_2 ] + pxor %%TMP15, %%TMP2 + pxor %%TMP15, %%TMP7 + + movdqa %%TMP3, [%%STATE + _snow3g_args_LFSR_0 ] + movdqa %%TMP4, [%%STATE + _snow3g_args_LFSR_11 ] + pslld %%TMP3, 8 + psrld %%TMP4, 8 + pxor %%TMP15, %%TMP3 + pxor %%TMP15, %%TMP4 + + ;; in init mode mask is 0, so this is applies only in init mode + movdqa %%TMP2, [%%STATE + _snow3g_args_LD_ST_MASK + 4*4] + pandn %%TMP2, [rsp + _keystream + %%DWORD_ITER] + pxor %%TMP15, %%TMP2 + + SHIFT_LFSRS %%STATE, %%TMP15, %%TMP1, %%TMP2, %%TMP3, \ + %%TMP4, %%TMP5, %%TMP6, %%TMP7 + ;; restore offset + shr %%DWORD_ITER, 4 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Read and transpose keystreams from stack +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro TRANSPOSE_4X32 6 +%define %%OUT_XMM_LANE_0 %1 ;; [out] 128bit keystream for lane 0 +%define %%OUT_XMM_LANE_1 %2 ;; [out] 128bit keystream for lane 1 +%define %%OUT_XMM_LANE_2 %3 ;; [out] 128bit keystream for lane 0 +%define %%OUT_XMM_LANE_3 %4 ;; [out] 128bit keystream for lane 3 +%define %%XTMP0 %5 ;; [clobbered] temporary xmm register +%define %%XTMP1 %6 ;; [clobbered] temporary xmm register + + movdqa %%OUT_XMM_LANE_2, [rsp + _keystream + 0 * 16] + movdqa %%OUT_XMM_LANE_1, [rsp + _keystream + 1 * 16] + movdqa %%XTMP1, [rsp + _keystream + 2 * 16] + movdqa %%OUT_XMM_LANE_3, [rsp + _keystream + 3 * 16] + + ;; output looks like: {t0 r1 r0 r3} + TRANSPOSE4_U32 %%OUT_XMM_LANE_2, %%OUT_XMM_LANE_1, %%XTMP1, \ + %%OUT_XMM_LANE_3, %%OUT_XMM_LANE_0, %%XTMP0 + +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Checks the masks for init/keystream phases and outputs keystream from stack +;; xored with input, depending on given lane, if phase is keystream. +;; Input arguments and size of output data is controlled by %SIZE: +;; - 16: 8 arguments, output full xmm value passed by %8 +;; - 4: 7 arguments, output exactly 1 DW +;; - other: 7 arguments, output 1-4 bytes depending on %7 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_OUTPUT 7-10 +%define %%SIZE %1 ;; [in] size indicating nr of bytes to output +%define %%STATE %2 ;; [in] ptr to LFSR/FSM struct +%define %%LANE %3 ;; [in] lane nr +%define %%TMP64 %4 ;; [clobbered] r64 gp reg temp +%define %%IN_PTR %5 ;; [clobbered] r64 gp reg temp +%define %%OUT_PTR %6 ;; [clobbered] r64 gp reg temp +%define %%TMP %7 ;; [in] temp xmm, if size is 16 +%define %%LENGTH %7 ;; [clobbered] r64 gp reg temp, if size is not 16 +%define %%VALUE %8 ;; [in] xmm_val, if size is 16 + + + ;; Check if phase for given lane + mov DWORD(%%TMP64), \ + [%%STATE + _snow3g_args_LD_ST_MASK + %%LANE*4] + or DWORD(%%TMP64),DWORD(%%TMP64) + je %%no_output ;; skip output if in init phase + ;; read in/out ptrs + mov %%IN_PTR, [%%STATE + _snow3g_args_in + %%LANE * 8] + mov %%OUT_PTR, [%%STATE + _snow3g_args_out + %%LANE * 8] + + ;; output == input XOR keysteram +%ifidn %%SIZE, 16 + movdqu %%TMP, [%%IN_PTR] + pshufb %%VALUE, [rel xmm_bswap] + pxor %%VALUE, %%TMP + movdqu [%%OUT_PTR], %%VALUE + add %%IN_PTR, %%SIZE + add %%OUT_PTR, %%SIZE +%else + ;; there is always at least 1DW of keystream generated on stack + mov DWORD(%%TMP64), [rsp + _keystream + %%LANE*4] + bswap DWORD(%%TMP64) +%ifidn %%SIZE, 4 + xor DWORD(%%TMP64), [%%IN_PTR] + mov [%%OUT_PTR], DWORD(%%TMP64) +%else ;; up to 4 bytes (defined by %%length) + mov DWORD(%%LENGTH), [%%STATE + _snow3g_lens + %%LANE * 4] + cmp %%LENGTH, 4 + jne %%_not_dw + xor DWORD(%%TMP64), [%%IN_PTR] + mov dword [%%OUT_PTR], DWORD(%%TMP64) + jmp %%_write_done +%%_not_dw: + and %%LENGTH, 3 + cmp %%LENGTH, 2 + jl %%_write_single_byte + + ;; write 2 bytes + xor WORD(%%TMP64), [%%IN_PTR] + mov word [%%OUT_PTR], WORD(%%TMP64) + add %%IN_PTR, 2 + add %%OUT_PTR, 2 + and %%LENGTH, 1 + je %%_write_done + shr %%TMP64, 16 +%%_write_single_byte: + xor BYTE(%%TMP64), [%%IN_PTR] + mov byte [%%OUT_PTR], BYTE(%%TMP64) +%%_write_done: +%endif + ;; if %%LENGTH is less then 4 bytes per given lane that's the last bytes + ;; of total request so pointers are never used again + add %%IN_PTR, 4 + add %%OUT_PTR, 4 +%endif + ;; Update input/output pointers + mov [%%STATE + _snow3g_args_in + %%LANE*8], %%IN_PTR + mov [%%STATE + _snow3g_args_out + %%LANE*8], %%OUT_PTR + +%%no_output: +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Initialize LFSR, FSM registers and write mask for given lane +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_INIT_LANE_SSE 7 +%define %%STATE %1 ;; [in] ptr to MB_MGR_SNOW3G_OOO structure +%define %%LANE %2 ;; [in] nr of lane initialize data in +%define %%P_KEY %3 ;; [in] ptr to key +%define %%P_IV %4 ;; [in] ptr to IV +%define %%TMPXMM_1 %5 ;; [clobbered] temporary xmm reg +%define %%TMPXMM_2 %6 ;; [clobbered] temporary xmm reg +%define %%TMPXMM_3 %7 ;; [clobbered] temporary xmm reg + + movd %%TMPXMM_1, [%%P_KEY] ;; key + movdqa %%TMPXMM_2, %%TMPXMM_1 + pxor %%TMPXMM_2, [rel all_fs] ;; ~key + + movdqu %%TMPXMM_3, [%%P_IV] + pshufb %%TMPXMM_3, [rel xmm_bswap] + + ;; temporarily store swapped IV on stack + movdqu [rsp + _keystream], %%TMPXMM_3 + + ;; LFSR initialisation + movd [%%STATE + _snow3g_args_LFSR_0 + 4*%%LANE], %%TMPXMM_2 + movd [%%STATE + _snow3g_args_LFSR_8 + 4*%%LANE], %%TMPXMM_2 + movd [%%STATE + _snow3g_args_LFSR_4 + 4*%%LANE], %%TMPXMM_1 + movd %%TMPXMM_3, [rsp + _keystream + 8] + pxor %%TMPXMM_1, %%TMPXMM_3 ;; LFSR_12 ^= IV[2](swapped) + movd [%%STATE + _snow3g_args_LFSR_12 + 4*%%LANE], %%TMPXMM_1 + + movd %%TMPXMM_1, [%%P_KEY + 4] + movd %%TMPXMM_2, [%%P_KEY + 4] + pxor %%TMPXMM_2, [rel all_fs] + + movd [%%STATE + _snow3g_args_LFSR_1 + 4*%%LANE], %%TMPXMM_2 + movd %%TMPXMM_3, [rsp + _keystream] + pxor %%TMPXMM_2, %%TMPXMM_3 ;; LFSR_9 ^= IV[0](swapped) + movd [%%STATE + _snow3g_args_LFSR_9 + 4*%%LANE], %%TMPXMM_2 + movd [%%STATE + _snow3g_args_LFSR_5 + 4*%%LANE], %%TMPXMM_1 + movd [%%STATE + _snow3g_args_LFSR_13 + 4*%%LANE], %%TMPXMM_1 + + movd %%TMPXMM_1, [%%P_KEY + 8] + movd %%TMPXMM_2, [%%P_KEY + 8] + pxor %%TMPXMM_2, [rel all_fs] + + movd [%%STATE + _snow3g_args_LFSR_2 + 4*%%LANE], %%TMPXMM_2 + movd %%TMPXMM_3, [rsp + _keystream + 4] + pxor %%TMPXMM_2, %%TMPXMM_3 ;; LFSR_10 ^= IV[1](swapped) + movd [%%STATE + _snow3g_args_LFSR_10 + 4*%%LANE], %%TMPXMM_2 + movd [%%STATE + _snow3g_args_LFSR_6 + 4*%%LANE], %%TMPXMM_1 + movd [%%STATE + _snow3g_args_LFSR_14 + 4*%%LANE], %%TMPXMM_1 + + movd %%TMPXMM_1, [%%P_KEY + 12] + movdqa %%TMPXMM_2, %%TMPXMM_1 + pxor %%TMPXMM_2, [rel all_fs] + + movd [%%STATE + _snow3g_args_LFSR_3 + 4*%%LANE], %%TMPXMM_2 + movd [%%STATE + _snow3g_args_LFSR_11 + 4*%%LANE], %%TMPXMM_2 + movd [%%STATE + _snow3g_args_LFSR_7 + 4*%%LANE], %%TMPXMM_1 + movd %%TMPXMM_3, [rsp + _keystream + 12] + pxor %%TMPXMM_1, %%TMPXMM_3 ;; LFSR_15 ^= IV[3](swapped) + movd [%%STATE + _snow3g_args_LFSR_15 + 4*%%LANE], %%TMPXMM_1 + + ; FSM initialization: FSM_1 = FSM_2 = FSM_3 = 0 + pxor %%TMPXMM_1, %%TMPXMM_1 + movd [%%STATE + _snow3g_args_FSM_1 + 4*%%LANE], %%TMPXMM_1 + movd [%%STATE + _snow3g_args_FSM_2 + 4*%%LANE], %%TMPXMM_1 + movd [%%STATE + _snow3g_args_FSM_3 + 4*%%LANE], %%TMPXMM_1 + + movd [%%STATE + _snow3g_args_LD_ST_MASK + 4*%%LANE], %%TMPXMM_1 + movd [%%STATE + _snow3g_args_LD_ST_MASK + 16+ 4*%%LANE], %%TMPXMM_1 +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Perform SNOW3G encrypt/decrypt operation steps for 4 buffers. Generate number +;; of dwords indicated by %%COMMON_LEN, update LFSR, FSM state. +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_ENC_DEC 23 +%define %%STATE %1 ;; [in] ptr to LFSR/FSM struct +%define %%COMMON_LEN %2 ;; [in/clobbered] dw aligned common length +%define %%IN %3 ;; [clobbered] r64 gp reg temp +%define %%OUT %4 ;; [clobbered] r64 gp reg temp +%define %%LENGTH %5 ;; [clobbered] r64 gp reg temp +%define %%TMP1_64 %6 ;; [clobbered] r64 gp reg temp +%define %%TMP2_64 %7 ;; [clobbered] r64 gp reg temp +%define %%TMP1 %8 ;; [clobbered] temporary xmm register +%define %%TMP2 %9 ;; [clobbered] temporary xmm register +%define %%TMP3 %10 ;; [clobbered] temporary xmm register +%define %%TMP4 %11 ;; [clobbered] temporary xmm register +%define %%TMP5 %12 ;; [clobbered] temporary xmm register +%define %%TMP6 %13 ;; [clobbered] temporary xmm register +%define %%TMP7 %14 ;; [clobbered] temporary xmm register +%define %%TMP8 %15 ;; [clobbered] temporary xmm register +%define %%TMP9 %16 ;; [clobbered] temporary xmm register +%define %%TMP10 %17 ;; [clobbered] temporary xmm register +%define %%TMP11 %18 ;; [clobbered] temporary xmm register +%define %%TMP12 %19 ;; [clobbered] temporary xmm register +%define %%TMP13 %20 ;; [clobbered] temporary xmm register +%define %%TMP14 %21 ;; [clobbered] temporary xmm register +%define %%TMP15 %22 ;; [clobbered] temporary xmm register +%define %%TMP16 %23 ;; [clobbered] temporary xmm register + + sub %%COMMON_LEN, 1 + mov %%LENGTH, %%COMMON_LEN + + shr %%LENGTH, 2 + je %%no_dqws + + +%%next_dqw: + xor %%TMP1_64, %%TMP1_64 + +%%next_dqw_round: +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP1_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP2_64, %%IN, %%TMP1_64 +%endif + + inc %%TMP1_64 + cmp %%TMP1_64, 4 + jb %%next_dqw_round + + TRANSPOSE_4X32 %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, %%TMP6 + SNOW3G_OUTPUT 16, %%STATE, 0, %%IN, %%OUT, %%TMP1_64, %%TMP5, %%TMP1 + SNOW3G_OUTPUT 16, %%STATE, 1, %%IN, %%OUT, %%TMP1_64, %%TMP5, %%TMP2 + SNOW3G_OUTPUT 16, %%STATE, 2, %%IN, %%OUT, %%TMP1_64, %%TMP5, %%TMP3 + SNOW3G_OUTPUT 16, %%STATE, 3, %%IN, %%OUT, %%TMP1_64, %%TMP5, %%TMP4 + + sub %%LENGTH, 1 + jne %%next_dqw + +%%no_dqws: + and %%COMMON_LEN, 0x3 + cmp %%COMMON_LEN, 0 + + je %%no_full_dws_to_write_out + +%%next_dw: + xor %%TMP1_64, %%TMP1_64 +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP1_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP2_64, %%IN, %%TMP1_64 +%endif + + SNOW3G_OUTPUT 4, %%STATE, 0, %%IN, %%OUT, %%TMP1_64, %%TMP2_64 + SNOW3G_OUTPUT 4, %%STATE, 1, %%IN, %%OUT, %%TMP1_64, %%TMP2_64 + SNOW3G_OUTPUT 4, %%STATE, 2, %%IN, %%OUT, %%TMP1_64, %%TMP2_64 + SNOW3G_OUTPUT 4, %%STATE, 3, %%IN, %%OUT, %%TMP1_64, %%TMP2_64 + + sub %%COMMON_LEN, 1 + jne %%next_dw + +%%no_full_dws_to_write_out: + ;; Process last dw/bytes: + xor %%TMP1_64, %%TMP1_64 +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP1_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP2_64, %%IN, %%TMP1_64 +%endif + + SNOW3G_OUTPUT 3, %%STATE, 0, %%IN, %%OUT, %%TMP1_64, %%LENGTH + SNOW3G_OUTPUT 3, %%STATE, 1, %%IN, %%OUT, %%TMP1_64, %%LENGTH + SNOW3G_OUTPUT 3, %%STATE, 2, %%IN, %%OUT, %%TMP1_64, %%LENGTH + SNOW3G_OUTPUT 3, %%STATE, 3, %%IN, %%OUT, %%TMP1_64, %%LENGTH + +%%zero_bytes: +%endmacro + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; Generate 5 double words of key stream for SNOW3G authentication +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +%macro SNOW3G_AUTH_INIT_5_BY_4 24 +%define %%KEY %1 ;; [in] array of pointers to 4 keys +%define %%IV %2 ;; [in] array of pointers to 4 IV's +%define %%DST_PTR %3 ;; [in] destination buffer to put 5DW of keystream for each lane +%define %%TMP1_64 %4 ;; [clobbered] r64 gp reg temp +%define %%TMP2_64 %5 ;; [clobbered] r64 gp reg temp +%define %%TMP3_64 %6 ;; [clobbered] r64 gp reg temp +%define %%TMP4_64 %7 ;; [clobbered] r64 gp reg temp +%define %%TMP1 %8 ;; [clobbered] temporary xmm register +%define %%TMP2 %9 ;; [clobbered] temporary xmm register +%define %%TMP3 %10 ;; [clobbered] temporary xmm register +%define %%TMP4 %11 ;; [clobbered] temporary xmm register +%define %%TMP5 %12 ;; [clobbered] temporary xmm register +%define %%TMP6 %13 ;; [clobbered] temporary xmm register +%define %%TMP7 %14 ;; [clobbered] temporary xmm register +%define %%TMP8 %15 ;; [clobbered] temporary xmm register +%define %%TMP9 %16 ;; [clobbered] temporary xmm register +%define %%TMP10 %17 ;; [clobbered] temporary xmm register +%define %%TMP11 %18 ;; [clobbered] temporary xmm register +%define %%TMP12 %19 ;; [clobbered] temporary xmm register +%define %%TMP13 %20 ;; [clobbered] temporary xmm register +%define %%TMP14 %21 ;; [clobbered] temporary xmm register +%define %%TMP15 %22 ;; [clobbered] temporary xmm register +%define %%TMP16 %23 ;; [clobbered] temporary xmm register +%define %%STATE %24 ;; [in] ptr to LFSR/FSM struct + +%define KEYGEN_STAGE _snow3g_args_LD_ST_MASK +%define INIT1_DONE _snow3g_args_LD_ST_MASK+16 + + ;; Initialize LFSR and FSM registers +%assign i 0 +%rep 4 + mov %%TMP1_64, [%%KEY + i*8] + mov %%TMP2_64, [%%IV + i*8] + SNOW3G_INIT_LANE_SSE %%STATE, i, %%TMP1_64, %%TMP2_64, %%TMP1, %%TMP2, %%TMP3 +%assign i (i+1) +%endrep + + ;; Run 32 iteration in INIT mode (reject keystreams) + mov %%TMP1_64, 32 + xor %%TMP2_64, %%TMP2_64 + +%%next_auth_round: +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP2_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP3_64, %%TMP4_64, %%TMP2_64 +%endif + dec %%TMP1_64 + jnz %%next_auth_round + + ;; Mark INIT1 phase done for all lanes + movdqa %%TMP1, [rel all_fs] + movdqa [state + INIT1_DONE], %%TMP1 + +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP2_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP3_64, %%TMP4_64, %%TMP2_64 +%endif + + ;; Put all lanes in KEYGEN state + movdqa %%TMP1, [rel all_fs] + movdqa [state + KEYGEN_STAGE], %%TMP1 + + ;; Generate 4 dw of keystream for each lane + xor %%TMP1_64, %%TMP1_64 + +%%next_auth_round2: +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP1_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP3_64, %%TMP4_64, %%TMP1_64 +%endif + inc %%TMP1_64 + cmp %%TMP1_64, 4 + jb %%next_auth_round2 + + TRANSPOSE_4X32 %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, %%TMP6 + + ;; Store 4 dw of keystream for each lane + movdqu [%%DST_PTR + 0*32], %%TMP1 + movdqu [%%DST_PTR + 1*32], %%TMP2 + movdqu [%%DST_PTR + 2*32], %%TMP3 + movdqu [%%DST_PTR + 3*32], %%TMP4 + + ;; Generate final dw of keystream for each lane +%ifdef SAFE_LOOKUP + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP8, %%TMP9, %%TMP10, %%TMP11, \ + %%TMP12, %%TMP13, %%TMP14, %%TMP15, %%TMP16, %%TMP2_64 +%else + SNOW3G_KEY_GEN_SSE %%STATE, %%TMP1, %%TMP2, %%TMP3, %%TMP4, %%TMP5, \ + %%TMP6, %%TMP7, %%TMP15, %%TMP3_64, %%TMP4_64, %%TMP2_64 +%endif + + ;; Store final dw of keystream for each lane + mov DWORD(%%TMP1_64), [rsp + _keystream + 0*4] + mov [%%DST_PTR + 16 + (0*32)], DWORD(%%TMP1_64) + mov DWORD(%%TMP1_64), [rsp + _keystream + 1*4] + mov [%%DST_PTR + 16 + (1 * 32)], DWORD(%%TMP1_64) + mov DWORD(%%TMP1_64), [rsp + _keystream + 2*4] + mov [%%DST_PTR + 16 + (2*32)], DWORD(%%TMP1_64) + mov DWORD(%%TMP1_64), [rsp + _keystream + 3*4] + mov [%%DST_PTR + 16 + (3 * 32)], DWORD(%%TMP1_64) + +%endmacro + +mksection stack-noexec diff --git a/lib/sse/snow3g_uia2_by4_sse.asm b/lib/sse_t1/snow3g_uia2_by4_sse.asm similarity index 100% rename from lib/sse/snow3g_uia2_by4_sse.asm rename to lib/sse_t1/snow3g_uia2_by4_sse.asm diff --git a/lib/sse/snow_v_sse.asm b/lib/sse_t1/snow_v_sse.asm similarity index 100% rename from lib/sse/snow_v_sse.asm rename to lib/sse_t1/snow_v_sse.asm diff --git a/lib/sse/zuc_top_sse.c b/lib/sse_t1/zuc_top_sse.c old mode 100755 new mode 100644 similarity index 99% rename from lib/sse/zuc_top_sse.c rename to lib/sse_t1/zuc_top_sse.c index aa5099d4b916131fce6b3325cc7d30f2018deffb..3217db876cb8b8e3bb1608c8550f2f76d9e93c33 --- a/lib/sse/zuc_top_sse.c +++ b/lib/sse_t1/zuc_top_sse.c @@ -1091,7 +1091,7 @@ void _zuc256_eia3_4_buffer_job(const void * const pKey[NUM_SSE_BUFS], const uint8_t *pIn8[NUM_SSE_BUFS] = {NULL}; uint32_t remainCommonBits; uint32_t dataDigested = 0; - uint8_t T[NUM_SSE_BUFS*16] = {0}; + DECLARE_ALIGNED(uint8_t T[NUM_SSE_BUFS*16], 16) = {0}; const uint32_t keyStreamLengthInBits = KEYSTR_ROUND_LEN * 8; DECLARE_ALIGNED(uint32_t *pKeyStrArr[NUM_SSE_BUFS], 16) = {NULL}; unsigned int allCommonBits; diff --git a/lib/sse/zuc_x4_sse.asm b/lib/sse_t1/zuc_x4_sse.asm old mode 100755 new mode 100644 similarity index 97% rename from lib/sse/zuc_x4_sse.asm rename to lib/sse_t1/zuc_x4_sse.asm index 0b924bdb5bce2b02a7588d220f765b49c95fe527..c14105807c7df850004b85e51e57537cbb5a7ba9 --- a/lib/sse/zuc_x4_sse.asm +++ b/lib/sse_t1/zuc_x4_sse.asm @@ -30,7 +30,6 @@ %include "include/zuc_sbox.inc" %include "include/memcpy.asm" %include "include/mb_mgr_datastruct.asm" -%include "include/cet.inc" %include "include/const.inc" %ifndef ZUC_CIPHER_4 @@ -154,18 +153,10 @@ swap_mask: db 0x03, 0x02, 0x01, 0x00, 0x07, 0x06, 0x05, 0x04 db 0x0b, 0x0a, 0x09, 0x08, 0x0f, 0x0e, 0x0d, 0x0c -align 16 -S1_S0_shuf: -db 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E, 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F - align 16 S0_S1_shuf: db 0x01, 0x03, 0x05, 0x07, 0x09, 0x0B, 0x0D, 0x0F, 0x00, 0x02, 0x04, 0x06, 0x08, 0x0A, 0x0C, 0x0E -align 16 -rev_S1_S0_shuf: -db 0x00, 0x08, 0x01, 0x09, 0x02, 0x0A, 0x03, 0x0B, 0x04, 0x0C, 0x05, 0x0D, 0x06, 0x0E, 0x07, 0x0F - align 16 rev_S0_S1_shuf: db 0x08, 0x00, 0x09, 0x01, 0x0A, 0x02, 0x0B, 0x03, 0x0C, 0x04, 0x0D, 0x05, 0x0E, 0x06, 0x0F, 0x07 @@ -534,11 +525,11 @@ mksection .text ; Compress all S0 and S1 input values in each register pshufb %%XTMP1, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 - pshufb %%XTMP2, [rel S1_S0_shuf] ; S1: Bytes 0-7, S0: Bytes 8-15 + pshufb %%XTMP2, [rel S0_S1_shuf] ; S0: Bytes 0-7, S1: Bytes 8-15 movdqa %%XTMP3, %%XTMP1 - shufpd %%XTMP1, %%XTMP2, 0x2 ; All S0 input values - shufpd %%XTMP2, %%XTMP3, 0x2 ; All S1 input values + shufpd %%XTMP1, %%XTMP2, 0x0 ; All S0 input values + shufpd %%XTMP2, %%XTMP3, 0x3 ; All S1 input values ; Compute S0 and S1 values S0_comput_SSE %%XTMP1, %%XTMP3, %%XTMP4, USE_GFNI @@ -548,13 +539,13 @@ mksection .text ; (revert what was done before S0 and S1 computations) movdqa %%XTMP3, %%XTMP1 shufpd %%XTMP1, %%XTMP2, 0x2 ; All S0 input values - shufpd %%XTMP2, %%XTMP3, 0x2 ; All S1 input values + shufpd %%XTMP3, %%XTMP2, 0x1 ; All S1 input values pshufb %%XTMP1, [rel rev_S0_S1_shuf] - pshufb %%XTMP2, [rel rev_S1_S0_shuf] + pshufb %%XTMP3, [rel rev_S0_S1_shuf] movdqa [%%STATE + OFS_R1], %%XTMP1 - movdqa [%%STATE + OFS_R2], %%XTMP2 + movdqa [%%STATE + OFS_R2], %%XTMP3 %endmacro ; @@ -636,11 +627,11 @@ mksection .text %define %%STATE %1 ; [in] ZUC state %define %%ROUND_NUM %2 ; [in] Round number %define %%TMP %3 ; [clobbered] Temporary GP register (used when ROUND_NUM is a register) -%define %%LFSR_0 %4 ; [clobbered] LFSR_0 -%define %%LFSR_4 %5 ; [clobbered] LFSR_2 -%define %%LFSR_10 %6 ; [clobbered] LFSR_5 -%define %%LFSR_13 %7 ; [clobbered] LFSR_7 -%define %%LFSR_15 %8 ; [clobbered] LFSR_9 +%define %%LFSR_0 %4 ; [clobbered] LFSR_0 (XMM) +%define %%LFSR_4 %5 ; [clobbered] LFSR_4 (XMM) +%define %%LFSR_10 %6 ; [clobbered] LFSR_10 (XMM) +%define %%LFSR_13 %7 ; [clobbered] LFSR_13 (XMM) +%define %%LFSR_15 %8 ; [clobbered] LFSR_15 (XMM) %define %%XTMP %9 ; [clobbered] Temporary XMM register %define %%MASK_31 %10 ; [in] Mask_31 %define %%W %11 ; [in/clobbered] In init mode, contains W for all 4 lanes @@ -695,8 +686,8 @@ mksection .text ; The macro clobbers XMM0-15 ; %macro REORDER_LFSR 2 -%define %%STATE %1 -%define %%NUM_ROUNDS %2 +%define %%STATE %1 ; [in] Pointer to LFSR state +%define %%NUM_ROUNDS %2 ; [immediate] Number of key generation rounds %if %%NUM_ROUNDS != 16 %assign %%i 0 @@ -934,9 +925,9 @@ mksection .text FUNC_SAVE ; Zero out R1-R2 - pxor xmm0, xmm0 - movdqa [pState + OFS_R1], xmm0 - movdqa [pState + OFS_R1 + 16], xmm0 + pxor %%XTMP1, %%XTMP1 + movdqa [pState + OFS_R1], %%XTMP1 + movdqa [pState + OFS_R2], %%XTMP1 %if %%KEY_SIZE == 128 ;; Load key and IVs @@ -1078,26 +1069,25 @@ mksection .text %endrep %if %%TAG_SIZE == 4 - movdqa [%%TAGS], %%KSTR1 + movdqu [%%TAGS], %%KSTR1 REORDER_LFSR pState, 1 %elif %%TAG_SIZE == 8 ; Transpose the keystream and store the 8 bytes per buffer consecutively, ; being the initial tag for each buffer - pshufd %%KSTR1, %%KSTR1, 0xD8 - pshufd %%KSTR2, %%KSTR2, 0xD8 - movdqa %%XTMP1, %%KSTR1 punpckldq %%XTMP1, %%KSTR2 - punpckldq %%KSTR1, %%KSTR2 - movdqa [%%TAGS], %%XTMP1 - movdqa [%%TAGS + 16], %%KSTR1 + punpckhdq %%KSTR1, %%KSTR2 + movdqu [%%TAGS], %%XTMP1 + movdqu [%%TAGS + 16], %%KSTR1 REORDER_LFSR pState, 2 %elif %%TAG_SIZE == 16 + ; Transpose the keystream and store the 16 bytes per buffer consecutively, + ; being the initial tag for each buffer TRANSPOSE4_U32 %%KSTR1, %%KSTR2, %%KSTR3, %%KSTR4, %%XTMP5, %%XTMP6 - movdqa [%%TAGS], %%KSTR1 - movdqa [%%TAGS + 16], %%KSTR2 - movdqa [%%TAGS + 16*2], %%KSTR3 - movdqa [%%TAGS + 16*3], %%KSTR4 + movdqu [%%TAGS], %%KSTR1 + movdqu [%%TAGS + 16], %%KSTR2 + movdqu [%%TAGS + 16*2], %%KSTR3 + movdqu [%%TAGS + 16*3], %%KSTR4 REORDER_LFSR pState, 4 %endif @@ -1635,6 +1625,9 @@ exit_cipher: ret +; +; Processes 16 bytes of data and updates the digest +; %macro DIGEST_16_BYTES 14 %define %%KS %1 ; [in] Pointer to keystream %define %%XDATA %2 ; [in] XMM register with input data @@ -2059,9 +2052,9 @@ remainder_key_sz_128: movq %%TMP, %%XTMP6 xor [%%T], %%TMP %else ;; %%TAG_SZ == 16 - movdqa %%XTMP1, [%%T] + movdqu %%XTMP1, [%%T] pxor %%XTMP1, %%XTMP6 - movdqa [%%T], %%XTMP1 + movdqu [%%T], %%XTMP1 %endif ;; Copy last 16 bytes of KS to the front diff --git a/lib/sse_t2/README b/lib/sse_t2/README new file mode 100644 index 0000000000000000000000000000000000000000..c296fc11cd6e54eed3221db2857820524922a6fe --- /dev/null +++ b/lib/sse_t2/README @@ -0,0 +1,3 @@ +SSE TYPE2: +- SSE TYPE1: SSE4.2, AESNI, PCLMULQDQ, CMOV, BSWAP +- SHANI diff --git a/lib/sse/mb_mgr_hmac_sha1_flush_ni_sse.asm b/lib/sse_t2/mb_mgr_hmac_sha1_flush_ni_sse.asm similarity index 95% rename from lib/sse/mb_mgr_hmac_sha1_flush_ni_sse.asm rename to lib/sse_t2/mb_mgr_hmac_sha1_flush_ni_sse.asm index 79e457f104d26e0f01148b1ad427929000b6480d..d4a2db0d5a32c9fa050f2505cb988e4cdf530f25 100644 --- a/lib/sse/mb_mgr_hmac_sha1_flush_ni_sse.asm +++ b/lib/sse_t2/mb_mgr_hmac_sha1_flush_ni_sse.asm @@ -50,7 +50,7 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" -extern sha1_ni +extern sha1_ni_x1 mksection .rodata default rel @@ -66,9 +66,11 @@ mksection .text %ifdef LINUX %define arg1 rdi %define arg2 rsi +%define arg3 rdx %else %define arg1 rcx %define arg2 rdx +%define arg3 r8 %endif %define state arg1 @@ -93,8 +95,8 @@ mksection .text %define extra_blocks arg2 %define p arg2 -%define tmp4 r8 -%define p2 r8 +%define tmp4 r9 +%define p2 r9 ; This routine clobbers rbx, rbp struc STACK @@ -133,27 +135,19 @@ flush_job_hmac_ni_sse: DBGPRINTL64 "idx:", idx copy_lane_data: - ; copy valid lane (idx) to empty lanes - mov tmp, [state + _args_data_ptr + PTR_SZ*idx] movzx len2, word [state + _lens + idx*2] - - DBGPRINTL64 "ptr", tmp - - ; there are only two lanes so if one is empty it is easy to determine which one - xor idx, 1 - mov [state + _args_data_ptr + PTR_SZ*idx], tmp - xor idx, 1 + mov arg3, idx ; No need to find min length - only two lanes available cmp len2, 0 je len_is_0 - ; Set length on both lanes to 0 - mov dword [state + _lens], 0 + ; Set length on lane to 0 + mov word [state + _lens + idx*2], 0 ; "state" and "args" are the same address, arg1 ; len is arg2 - call sha1_ni + call sha1_ni_x1 ; state is intact len_is_0: diff --git a/lib/sse/mb_mgr_hmac_sha1_submit_ni_sse.asm b/lib/sse_t2/mb_mgr_hmac_sha1_submit_ni_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha1_submit_ni_sse.asm rename to lib/sse_t2/mb_mgr_hmac_sha1_submit_ni_sse.asm diff --git a/lib/sse/mb_mgr_hmac_sha224_flush_ni_sse.asm b/lib/sse_t2/mb_mgr_hmac_sha224_flush_ni_sse.asm similarity index 96% rename from lib/sse/mb_mgr_hmac_sha224_flush_ni_sse.asm rename to lib/sse_t2/mb_mgr_hmac_sha224_flush_ni_sse.asm index 56d07d201757410e3e21e836b176b8b80721f07f..213f5361b9662934f50e754cbd2020748af4bb4b 100644 --- a/lib/sse/mb_mgr_hmac_sha224_flush_ni_sse.asm +++ b/lib/sse_t2/mb_mgr_hmac_sha224_flush_ni_sse.asm @@ -25,4 +25,4 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; %define SHA224 -%include "sse/mb_mgr_hmac_sha256_flush_ni_sse.asm" +%include "sse_t2/mb_mgr_hmac_sha256_flush_ni_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_sha224_submit_ni_sse.asm b/lib/sse_t2/mb_mgr_hmac_sha224_submit_ni_sse.asm similarity index 96% rename from lib/sse/mb_mgr_hmac_sha224_submit_ni_sse.asm rename to lib/sse_t2/mb_mgr_hmac_sha224_submit_ni_sse.asm index b430f79ba5ce3b31e995a54749f8c344d50ed354..e95a0ccd3f60d6e0b2904554442ad3428053d516 100644 --- a/lib/sse/mb_mgr_hmac_sha224_submit_ni_sse.asm +++ b/lib/sse_t2/mb_mgr_hmac_sha224_submit_ni_sse.asm @@ -25,4 +25,4 @@ ;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ;; %define SHA224 -%include "sse/mb_mgr_hmac_sha256_submit_ni_sse.asm" +%include "sse_t2/mb_mgr_hmac_sha256_submit_ni_sse.asm" diff --git a/lib/sse/mb_mgr_hmac_sha256_flush_ni_sse.asm b/lib/sse_t2/mb_mgr_hmac_sha256_flush_ni_sse.asm similarity index 95% rename from lib/sse/mb_mgr_hmac_sha256_flush_ni_sse.asm rename to lib/sse_t2/mb_mgr_hmac_sha256_flush_ni_sse.asm index a137d9e2d7aefa9f276fa60559b794610954653f..07b284b838d7a026749c1db57a671aafcc15987f 100644 --- a/lib/sse/mb_mgr_hmac_sha256_flush_ni_sse.asm +++ b/lib/sse_t2/mb_mgr_hmac_sha256_flush_ni_sse.asm @@ -41,14 +41,16 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" -extern sha256_ni +extern sha256_ni_x1 %ifdef LINUX %define arg1 rdi %define arg2 rsi +%define arg3 rdx %else %define arg1 rcx %define arg2 rdx +%define arg3 r8 %endif %define state arg1 @@ -73,7 +75,7 @@ extern sha256_ni %define extra_blocks arg2 %define p arg2 -%define tmp4 r8 +%define tmp4 r11 %define tmp5 r9 @@ -136,26 +138,21 @@ flush_job_hmac_sha_256_ni_sse: DBGPRINTL64 "idx:", idx copy_lane_data: - ; copy idx to empty lanes - mov tmp, [state + _args_data_ptr_sha256 + PTR_SZ*idx] xor len2, len2 mov WORD(len2), word [state + _lens_sha256 + idx*2] - ; there are only two lanes so if one is empty it is easy to determine which one - xor idx, 1 - mov [state + _args_data_ptr_sha256 + PTR_SZ*idx], tmp - xor idx, 1 + mov arg3, idx ; No need to find min length - only two lanes available cmp len2, 0 je len_is_0 - ; set length on both lanes to 0 - mov dword [state + _lens_sha256], 0 + ; set length lane to 0 + mov dword [state + _lens_sha256], 0 - ; "state" and "args" are the same address, arg1 + ; "state" and "args" are the same address, arg1 ; len is arg2 - call sha256_ni + call sha256_ni_x1 ; state and idx are intact len_is_0: diff --git a/lib/sse/mb_mgr_hmac_sha256_submit_ni_sse.asm b/lib/sse_t2/mb_mgr_hmac_sha256_submit_ni_sse.asm similarity index 100% rename from lib/sse/mb_mgr_hmac_sha256_submit_ni_sse.asm rename to lib/sse_t2/mb_mgr_hmac_sha256_submit_ni_sse.asm diff --git a/lib/sse_t2/mb_mgr_sse_t2.c b/lib/sse_t2/mb_mgr_sse_t2.c new file mode 100644 index 0000000000000000000000000000000000000000..8ec51a180fa827dea1966181a33ff15f2b47d2f1 --- /dev/null +++ b/lib/sse_t2/mb_mgr_sse_t2.c @@ -0,0 +1,488 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define SSE + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/snow3g_submit.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_sse_type1.h" +#include "include/arch_sse_type2.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms +#define RESTORE_XMMS restore_xmms + +/* JOB API */ +#define SUBMIT_JOB submit_job_sse_t2 +#define FLUSH_JOB flush_job_sse_t2 +#define QUEUE_SIZE queue_size_sse_t2 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_sse_t2 +#define GET_NEXT_JOB get_next_job_sse_t2 +#define GET_COMPLETED_JOB get_completed_job_sse_t2 +#define GET_NEXT_BURST get_next_burst_sse_t2 +#define SUBMIT_BURST submit_burst_sse_t2 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_sse_t2 +#define FLUSH_BURST flush_burst_sse_t2 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_sse_t2 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_sse_t2 +#define SUBMIT_HASH_BURST submit_hash_burst_sse_t2 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_sse_t2 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_SSE_T2 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_SSE_T2 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_SSE_T2 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_SSE_T2 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_SSE_T2 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_sse +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_sse +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_sse +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_sse +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_sse +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_sse + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_aes_gcm_dec_sse +#define SUBMIT_JOB_AES_GCM_ENC submit_job_aes_gcm_enc_sse + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_sse +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_sse +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_sse + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_sse +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_sse +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_sse + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_sse +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_sse +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_sse + +#define AES_CBC_DEC_128 aes_cbc_dec_128_sse +#define AES_CBC_DEC_192 aes_cbc_dec_192_sse +#define AES_CBC_DEC_256 aes_cbc_dec_256_sse + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_sse +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_sse +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_sse +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_sse + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_sse +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_sse +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_sse +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_sse +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_sse +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_sse + +#define AES_ECB_ENC_128 aes_ecb_enc_128_by4_sse +#define AES_ECB_ENC_192 aes_ecb_enc_192_by4_sse +#define AES_ECB_ENC_256 aes_ecb_enc_256_by4_sse +#define AES_ECB_DEC_128 aes_ecb_dec_128_by4_sse +#define AES_ECB_DEC_192 aes_ecb_dec_192_by4_sse +#define AES_ECB_DEC_256 aes_ecb_dec_256_by4_sse + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_sse +#define AES_CTR_192 aes_cntr_192_sse +#define AES_CTR_256 aes_cntr_256_sse +#define AES_CTR_128_BIT aes_cntr_bit_128_sse +#define AES_CTR_192_BIT aes_cntr_bit_192_sse +#define AES_CTR_256_BIT aes_cntr_bit_256_sse + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_sse +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_sse + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_sse +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_sse + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_sse +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_sse + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_sse +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_sse + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_sse +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_sse + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_sse +#define AES_CFB_256_ONE aes_cfb_256_one_sse + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_sse +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_sse + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_sse +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_sse +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_sse +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_sse + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_ni_sse +#define FLUSH_JOB_SHA1 flush_job_sha1_ni_sse +#define SUBMIT_JOB_SHA224 submit_job_sha224_ni_sse +#define FLUSH_JOB_SHA224 flush_job_sha224_ni_sse +#define SUBMIT_JOB_SHA256 submit_job_sha256_ni_sse +#define FLUSH_JOB_SHA256 flush_job_sha256_ni_sse +#define SUBMIT_JOB_SHA384 submit_job_sha384_sse +#define FLUSH_JOB_SHA384 flush_job_sha384_sse +#define SUBMIT_JOB_SHA512 submit_job_sha512_sse +#define FLUSH_JOB_SHA512 flush_job_sha512_sse + +/* HMAC-SHA1/224/256/384/512/MD5 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_ni_sse +#define FLUSH_JOB_HMAC flush_job_hmac_ni_sse +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_ni_sse +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_ni_sse +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_ni_sse +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_ni_sse +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_sse +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_sse +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_sse +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_sse +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_sse +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_sse + +/* DES & 3DES */ + +/* - default x86-64 implementation */ + +/* DES-DOCSIS */ + +/* - default x86-64 implementation */ + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_sse +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_sse +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_sse +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_no_gfni_sse +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_no_gfni_sse +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_no_gfni_sse +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_no_gfni_sse +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_no_gfni_sse +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_no_gfni_sse +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_no_gfni_sse +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_no_gfni_sse + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_sse +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_sse + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_sse(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_sse(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_sse +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_sse + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_sse_local + +/* ====================================================================== */ + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 4); + ooo_mgr_aes_reset(state->aes192_ooo, 4); + ooo_mgr_aes_reset(state->aes256_ooo, 4); + + /* DOCSIS SEC BPI uses same settings as AES CBC */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 4); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 4); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 4); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, 2); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, 2); + + /* Init HMAC/SHA_256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, 2); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + SSE_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + SSE_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, SSE_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 4); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 4); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 4); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 4); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 4); + + /* Init AES-CBCS out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 4); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, 2); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, 2); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, 2); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, SSE_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, SSE_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_sse_t2_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for SSE interface are present */ + if ((state->features & IMB_CPUFLAGS_SSE_T2) != IMB_CPUFLAGS_SSE_T2) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_SSE; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_sse; + state->keyexp_192 = aes_keyexp_192_sse; + state->keyexp_256 = aes_keyexp_256_sse; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_sse; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_sse; + + state->xcbc_keyexp = aes_xcbc_expand_key_sse; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_sse; + state->sha1 = sha1_sse; + state->sha224_one_block = sha224_one_block_sse; + state->sha224 = sha224_sse; + state->sha256_one_block = sha256_one_block_sse; + state->sha256 = sha256_sse; + state->sha384_one_block = sha384_one_block_sse; + state->sha384 = sha384_sse; + state->sha512_one_block = sha512_one_block_sse; + state->sha512 = sha512_sse; + state->md5_one_block = md5_one_block_sse; + + state->aes128_cfb_one = aes_cfb_128_one_sse; + + state->eea3_1_buffer = zuc_eea3_1_buffer_sse; + state->eea3_4_buffer = zuc_eea3_4_buffer_sse; + state->eea3_n_buffer = zuc_eea3_n_buffer_sse; + state->eia3_n_buffer = zuc_eia3_n_buffer_sse; + state->eia3_1_buffer = zuc_eia3_1_buffer_sse; + + state->f8_1_buffer = kasumi_f8_1_buffer_sse; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_sse; + state->f8_2_buffer = kasumi_f8_2_buffer_sse; + state->f8_3_buffer = kasumi_f8_3_buffer_sse; + state->f8_4_buffer = kasumi_f8_4_buffer_sse; + state->f8_n_buffer = kasumi_f8_n_buffer_sse; + state->f9_1_buffer = kasumi_f9_1_buffer_sse; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_sse; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_sse; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_sse; + state->kasumi_key_sched_size = kasumi_key_sched_size_sse; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_sse; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_sse; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_sse; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_sse; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_sse; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_sse; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_sse; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_sse; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_sse; + state->snow3g_init_key_sched = snow3g_init_key_sched_sse; + state->snow3g_key_sched_size = snow3g_key_sched_size_sse; + + state->hec_32 = hec_32_sse; + state->hec_64 = hec_64_sse; + + state->crc32_ethernet_fcs = ethernet_fcs_sse; + state->crc16_x25 = crc16_x25_sse; + state->crc32_sctp = crc32_sctp_sse; + state->crc24_lte_a = crc24_lte_a_sse; + state->crc24_lte_b = crc24_lte_b_sse; + state->crc16_fp_data = crc16_fp_data_sse; + state->crc11_fp_header = crc11_fp_header_sse; + state->crc7_fp_header = crc7_fp_header_sse; + state->crc10_iuup_data = crc10_iuup_data_sse; + state->crc6_iuup_header = crc6_iuup_header_sse; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_sse; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_sse; + + state->chacha20_poly1305_init = init_chacha20_poly1305_sse; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_sse; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_sse; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_sse; + + state->gcm128_enc = aes_gcm_enc_128_sse; + state->gcm192_enc = aes_gcm_enc_192_sse; + state->gcm256_enc = aes_gcm_enc_256_sse; + state->gcm128_dec = aes_gcm_dec_128_sse; + state->gcm192_dec = aes_gcm_dec_192_sse; + state->gcm256_dec = aes_gcm_dec_256_sse; + state->gcm128_init = aes_gcm_init_128_sse; + state->gcm192_init = aes_gcm_init_192_sse; + state->gcm256_init = aes_gcm_init_256_sse; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_sse; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_sse; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_sse; + state->gcm128_enc_update = aes_gcm_enc_128_update_sse; + state->gcm192_enc_update = aes_gcm_enc_192_update_sse; + state->gcm256_enc_update = aes_gcm_enc_256_update_sse; + state->gcm128_dec_update = aes_gcm_dec_128_update_sse; + state->gcm192_dec_update = aes_gcm_dec_192_update_sse; + state->gcm256_dec_update = aes_gcm_dec_256_update_sse; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_sse; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_sse; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_sse; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_sse; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_sse; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_sse; + state->gcm128_precomp = aes_gcm_precomp_128_sse; + state->gcm192_precomp = aes_gcm_precomp_192_sse; + state->gcm256_precomp = aes_gcm_precomp_256_sse; + state->gcm128_pre = aes_gcm_pre_128_sse; + state->gcm192_pre = aes_gcm_pre_192_sse; + state->gcm256_pre = aes_gcm_pre_256_sse; + + state->ghash = ghash_sse; + state->ghash_pre = ghash_pre_sse; + + state->gmac128_init = imb_aes_gmac_init_128_sse; + state->gmac192_init = imb_aes_gmac_init_192_sse; + state->gmac256_init = imb_aes_gmac_init_256_sse; + state->gmac128_update = imb_aes_gmac_update_128_sse; + state->gmac192_update = imb_aes_gmac_update_192_sse; + state->gmac256_update = imb_aes_gmac_update_256_sse; + state->gmac128_finalize = imb_aes_gmac_finalize_128_sse; + state->gmac192_finalize = imb_aes_gmac_finalize_192_sse; + state->gmac256_finalize = imb_aes_gmac_finalize_256_sse; +} + +#include "mb_mgr_code.h" diff --git a/lib/sse_t2/sha1_ni_x1_sse.asm b/lib/sse_t2/sha1_ni_x1_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..8022746b91767bd79504f6848af9e3821fe59726 --- /dev/null +++ b/lib/sse_t2/sha1_ni_x1_sse.asm @@ -0,0 +1,337 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +;; Stack must be aligned to 32 bytes before call +;; +;; Registers: RAX RBX RCX RDX RBP RSI RDI R8 R9 R10 R11 R12 R13 R14 R15 +;; ----------------------------------------------------------- +;; Windows clobbers: RDX R10 R11 +;; Windows preserves: RAX RBX RCX RBP RSI RDI R8 R9 R12 R13 R14 R15 +;; ----------------------------------------------------------- +;; Linux clobbers: RDI R10 R11 +;; Linux preserves: RAX RBX RCX RDX RBP RSI R8 R9 R12 R13 R14 R15 +;; ----------------------------------------------------------- +;; +;; Linux/Windows clobbers: xmm0 - xmm15 + +%include "include/os.asm" +;%define DO_DBGPRINT +%include "include/dbgprint.asm" +%include "include/clear_regs.asm" +%include "include/mb_mgr_datastruct.asm" + +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%define arg4 rcx +%else +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%define arg4 r9 +%endif + +%define args arg1 +%define NUM_BLKS arg2 +%define lane arg3 +%define tmp arg4 +; reso = resdq => 16 bytes +struc frame +.ABCD_SAVE reso 1 +.E_SAVE reso 1 +.ABCD_SAVEb reso 1 +.E_SAVEb reso 1 +.XMM_SAVE reso 3 +.align resq 1 +endstruc + +%define INP r10 + +%define ABCD xmm0 +%define E0 xmm1 ; Need two E's b/c they ping pong +%define E1 xmm2 +%define MSG0 xmm3 +%define MSG1 xmm4 +%define MSG2 xmm5 +%define MSG3 xmm6 + +%define SHUF_MASK xmm14 +%define E_MASK xmm15 + +mksection .rodata +default rel +align 64 +PSHUFFLE_BYTE_FLIP_MASK: ;ddq 0x000102030405060708090a0b0c0d0e0f + dq 0x08090a0b0c0d0e0f, 0x0001020304050607 +UPPER_WORD_MASK: ;ddq 0xFFFFFFFF000000000000000000000000 + dq 0x0000000000000000, 0xFFFFFFFF00000000 + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void sha1_ni_x1(SHA1_ARGS *args, UINT32 size_in_blocks, uint64_t lane) +;; arg1 : pointer to args +;; arg2 : size (in blocks) ;; assumed to be >= 1 +;; arg3 : lane number + +mksection .text +MKGLOBAL(sha1_ni_x1,function,internal) +align 32 +sha1_ni_x1: + sub rsp, frame_size + + movdqa [rsp + frame.XMM_SAVE], xmm6 + movdqa [rsp + frame.XMM_SAVE + 16], xmm14 + movdqa [rsp + frame.XMM_SAVE + 16*2], xmm15 + + shl NUM_BLKS, 6 ; convert to bytes + jz done_hash + + ;; load input pointers + mov INP, [args + _data_ptr_sha1 + lane*PTR_SZ] + + add NUM_BLKS, INP ; pointer to end of data block -> loop exit condition + + ;; load initial digest + mov tmp, SHA1NI_DIGEST_ROW_SIZE + imul tmp, lane + movdqu ABCD, [args + tmp] + pxor E0, E0 + pinsrd E0, [args + tmp + 4*SHA1_DIGEST_WORD_SIZE], 3 + pshufd ABCD, ABCD, 0x1B + + movdqa SHUF_MASK, [rel PSHUFFLE_BYTE_FLIP_MASK] + movdqa E_MASK, [rel UPPER_WORD_MASK] + +loop0: + ;; Copy digests + movdqa [rsp + frame.ABCD_SAVE], ABCD + movdqa [rsp + frame.E_SAVE], E0 + + ;; Only needed if not using sha1nexte for rounds 0-3 + pand E0, E_MASK + + ;; Rounds 0-3 + movdqu MSG0, [INP + 0*16] + pshufb MSG0, SHUF_MASK + paddd E0, MSG0 + movdqa E1, ABCD + sha1rnds4 ABCD, E0, 0 + + ;; Rounds 4-7 + movdqu MSG1, [INP + 1*16] + pshufb MSG1, SHUF_MASK + sha1nexte E1, MSG1 + movdqa E0, ABCD + sha1rnds4 ABCD, E1, 0 + sha1msg1 MSG0, MSG1 + + ;; Rounds 8-11 + movdqu MSG2, [INP + 2*16] + pshufb MSG2, SHUF_MASK + sha1nexte E0, MSG2 + movdqa E1, ABCD + sha1rnds4 ABCD, E0, 0 + sha1msg1 MSG1, MSG2 + pxor MSG0, MSG2 + + ;; Rounds 12-15 + movdqu MSG3, [INP + 3*16] + pshufb MSG3, SHUF_MASK + sha1nexte E1, MSG3 + movdqa E0, ABCD + sha1msg2 MSG0, MSG3 + sha1rnds4 ABCD, E1, 0 + sha1msg1 MSG2, MSG3 + pxor MSG1, MSG3 + + ;; Rounds 16-19 + sha1nexte E0, MSG0 + movdqa E1, ABCD + sha1msg2 MSG1, MSG0 + sha1rnds4 ABCD, E0, 0 + sha1msg1 MSG3, MSG0 + pxor MSG2, MSG0 + + ;; Rounds 20-23 + sha1nexte E1, MSG1 + movdqa E0, ABCD + sha1msg2 MSG2, MSG1 + sha1rnds4 ABCD, E1, 1 + sha1msg1 MSG0, MSG1 + pxor MSG3, MSG1 + + ;; Rounds 24-27 + sha1nexte E0, MSG2 + movdqa E1, ABCD + sha1msg2 MSG3, MSG2 + sha1rnds4 ABCD, E0, 1 + sha1msg1 MSG1, MSG2 + pxor MSG0, MSG2 + + ;; Rounds 28-31 + sha1nexte E1, MSG3 + movdqa E0, ABCD + sha1msg2 MSG0, MSG3 + sha1rnds4 ABCD, E1, 1 + sha1msg1 MSG2, MSG3 + pxor MSG1, MSG3 + + ;; Rounds 32-35 + sha1nexte E0, MSG0 + movdqa E1, ABCD + sha1msg2 MSG1, MSG0 + sha1rnds4 ABCD, E0, 1 + sha1msg1 MSG3, MSG0 + pxor MSG2, MSG0 + + ;; Rounds 36-39 + sha1nexte E1, MSG1 + movdqa E0, ABCD + sha1msg2 MSG2, MSG1 + sha1rnds4 ABCD, E1, 1 + sha1msg1 MSG0, MSG1 + pxor MSG3, MSG1 + + ;; Rounds 40-43 + sha1nexte E0, MSG2 + movdqa E1, ABCD + sha1msg2 MSG3, MSG2 + sha1rnds4 ABCD, E0, 2 + sha1msg1 MSG1, MSG2 + pxor MSG0, MSG2 + + ;; Rounds 44-47 + sha1nexte E1, MSG3 + movdqa E0, ABCD + sha1msg2 MSG0, MSG3 + sha1rnds4 ABCD, E1, 2 + sha1msg1 MSG2, MSG3 + pxor MSG1, MSG3 + + ;; Rounds 48-51 + sha1nexte E0, MSG0 + movdqa E1, ABCD + sha1msg2 MSG1, MSG0 + sha1rnds4 ABCD, E0, 2 + sha1msg1 MSG3, MSG0 + pxor MSG2, MSG0 + + ;; Rounds 52-55 + sha1nexte E1, MSG1 + movdqa E0, ABCD + sha1msg2 MSG2, MSG1 + sha1rnds4 ABCD, E1, 2 + sha1msg1 MSG0, MSG1 + pxor MSG3, MSG1 + + ;; Rounds 56-59 + sha1nexte E0, MSG2 + movdqa E1, ABCD + sha1msg2 MSG3, MSG2 + sha1rnds4 ABCD, E0, 2 + sha1msg1 MSG1, MSG2 + pxor MSG0, MSG2 + + ;; Rounds 60-63 + sha1nexte E1, MSG3 + movdqa E0, ABCD + sha1msg2 MSG0, MSG3 + sha1rnds4 ABCD, E1, 3 + sha1msg1 MSG2, MSG3 + pxor MSG1, MSG3 + + ;; Rounds 64-67 + sha1nexte E0, MSG0 + movdqa E1, ABCD + sha1msg2 MSG1, MSG0 + sha1rnds4 ABCD, E0, 3 + sha1msg1 MSG3, MSG0 + pxor MSG2, MSG0 + + ;; Rounds 68-71 + sha1nexte E1, MSG1 + movdqa E0, ABCD + sha1msg2 MSG2, MSG1 + sha1rnds4 ABCD, E1, 3 + pxor MSG3, MSG1 + + ;; Rounds 72-75 + sha1nexte E0, MSG2 + movdqa E1, ABCD + sha1msg2 MSG3, MSG2 + sha1rnds4 ABCD, E0, 3 + + ;; Rounds 76-79 + sha1nexte E1, MSG3 + movdqa E0, ABCD + sha1rnds4 ABCD, E1, 3 + + ;; Need to rotate E left by 30 + movdqa E1, E0 + pslld E0, 30 + psrld E1, 2 + pxor E0, E1 + + paddd ABCD, [rsp + frame.ABCD_SAVE] + paddd E0, [rsp + frame.E_SAVE] + + add INP, 64 + cmp INP, NUM_BLKS + jne loop0 + + ;; write out digests + pshufd ABCD, ABCD, 0x1B + movdqu [args + tmp], ABCD + pextrd [args + tmp + 4*SHA1_DIGEST_WORD_SIZE], E0, 3 + + ;; update input pointers + mov [args + _data_ptr_sha1 + lane*PTR_SZ], INP + +done_hash: + + ;; Clear stack frame (4*16 bytes) +%ifdef SAFE_DATA + pxor MSG0, MSG0 + pxor MSG1, MSG1 + pxor MSG2, MSG2 + pxor MSG3, MSG3 + + movdqa [rsp + 0*16], MSG0 + movdqa [rsp + 1*16], MSG0 + movdqa [rsp + 2*16], MSG0 + movdqa [rsp + 3*16], MSG0 +%endif + + movdqa xmm6, [rsp + frame.XMM_SAVE] + movdqa xmm14, [rsp + frame.XMM_SAVE + 16] + movdqa xmm15, [rsp + frame.XMM_SAVE + 16*2] + add rsp, frame_size + + ret + +mksection stack-noexec diff --git a/lib/sse/sha1_ni_x2_sse.asm b/lib/sse_t2/sha1_ni_x2_sse.asm similarity index 80% rename from lib/sse/sha1_ni_x2_sse.asm rename to lib/sse_t2/sha1_ni_x2_sse.asm index f726ea7529b8baa39e1226abc44cd59682136961..27a61bbcb54dd01c2a4cb1770ea075cb36d6d378 100644 --- a/lib/sse/sha1_ni_x2_sse.asm +++ b/lib/sse_t2/sha1_ni_x2_sse.asm @@ -42,7 +42,6 @@ ;%define DO_DBGPRINT %include "include/dbgprint.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" %include "include/mb_mgr_datastruct.asm" %ifdef LINUX @@ -66,6 +65,7 @@ struc frame .E_SAVE reso 1 .ABCD_SAVEb reso 1 .E_SAVEb reso 1 +.XMM_SAVE reso 10 .align resq 1 endstruc @@ -106,11 +106,85 @@ UPPER_WORD_MASK: ;ddq 0xFFFFFFFF000000000000000000000000 ;; arg2 : size (in blocks) ;; assumed to be >= 1 mksection .text + +%define XMM_STORAGE 16*10 +%define GP_STORAGE 8*5 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes +%ifndef LINUX + movdqa [rsp + 0*16], xmm6 + movdqa [rsp + 1*16], xmm7 + movdqa [rsp + 2*16], xmm8 + movdqa [rsp + 3*16], xmm9 + movdqa [rsp + 4*16], xmm10 + movdqa [rsp + 5*16], xmm11 + movdqa [rsp + 6*16], xmm12 + movdqa [rsp + 7*16], xmm13 + movdqa [rsp + 8*16], xmm14 + movdqa [rsp + 9*16], xmm15 + mov [rsp + GP_OFFSET], rdi + mov [rsp + GP_OFFSET + 8], rsi +%endif + mov [rsp + GP_OFFSET + 2*8], rbx + mov [rsp + GP_OFFSET + 3*8], rbp + mov [rsp + GP_OFFSET + 4*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 +%ifndef LINUX + movdqa xmm6, [rsp + 0*16] + movdqa xmm7, [rsp + 1*16] + movdqa xmm8, [rsp + 2*16] + movdqa xmm9, [rsp + 3*16] + movdqa xmm10, [rsp + 4*16] + movdqa xmm11, [rsp + 5*16] + movdqa xmm12, [rsp + 6*16] + movdqa xmm13, [rsp + 7*16] + movdqa xmm14, [rsp + 8*16] + movdqa xmm15, [rsp + 9*16] + mov rdi, [rsp + GP_OFFSET] + mov rsi, [rsp + GP_OFFSET + 8] + +%ifdef SAFE_DATA + pxor xmm5, xmm5 + movdqa [rsp + 0*16], xmm5 + movdqa [rsp + 1*16], xmm5 + movdqa [rsp + 2*16], xmm5 + movdqa [rsp + 3*16], xmm5 + movdqa [rsp + 4*16], xmm5 + movdqa [rsp + 5*16], xmm5 + movdqa [rsp + 6*16], xmm5 + movdqa [rsp + 7*16], xmm5 + movdqa [rsp + 8*16], xmm5 + movdqa [rsp + 9*16], xmm5 +%endif + mov rbx, [rsp + GP_OFFSET + 2*8] + mov rbp, [rsp + GP_OFFSET + 3*8] +%endif + mov rsp, [rsp + GP_OFFSET + 4*8] ;; rsp pointer +%endmacro + MKGLOBAL(sha1_ni,function,internal) align 32 sha1_ni: sub rsp, frame_size + movdqa [rsp + frame.XMM_SAVE], xmm6 + movdqa [rsp + frame.XMM_SAVE + 16], xmm7 + movdqa [rsp + frame.XMM_SAVE + 16*2], xmm8 + movdqa [rsp + frame.XMM_SAVE + 16*3], xmm9 + movdqa [rsp + frame.XMM_SAVE + 16*4], xmm10 + movdqa [rsp + frame.XMM_SAVE + 16*5], xmm11 + movdqa [rsp + frame.XMM_SAVE + 16*6], xmm12 + movdqa [rsp + frame.XMM_SAVE + 16*7], xmm13 + movdqa [rsp + frame.XMM_SAVE + 16*8], xmm14 + movdqa [rsp + frame.XMM_SAVE + 16*9], xmm15 DBGPRINTL "enter sha1-ni-x2" shl NUM_BLKS, 6 ; convert to bytes @@ -485,8 +559,26 @@ done_hash: clear_all_xmms_sse_asm %endif + movdqa xmm6, [rsp + frame.XMM_SAVE] + movdqa xmm7, [rsp + frame.XMM_SAVE + 16], + movdqa xmm8, [rsp + frame.XMM_SAVE + 16*2] + movdqa xmm9, [rsp + frame.XMM_SAVE + 16*3] + movdqa xmm10, [rsp + frame.XMM_SAVE + 16*4] + movdqa xmm11, [rsp + frame.XMM_SAVE + 16*5] + movdqa xmm12, [rsp + frame.XMM_SAVE + 16*6] + movdqa xmm13, [rsp + frame.XMM_SAVE + 16*7] + movdqa xmm14, [rsp + frame.XMM_SAVE + 16*8] + movdqa xmm15, [rsp + frame.XMM_SAVE + 16*9] add rsp, frame_size ret +; void call_sha1_ni_x2_sse_from_c(SHA1_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha1_ni_x2_sse_from_c,function,internal) +call_sha1_ni_x2_sse_from_c: + FUNC_SAVE + call sha1_ni + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/sse_t2/sha256_ni_x1_sse.asm b/lib/sse_t2/sha256_ni_x1_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..b6a026476071a29f6bb13167adac7e3d6291031c --- /dev/null +++ b/lib/sse_t2/sha256_ni_x1_sse.asm @@ -0,0 +1,424 @@ +; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +;; Stack must be aligned to 32 bytes before call +;; +;; Registers: RAX RBX RCX RDX RBP RSI RDI R8 R9 R10 R11 R12 R13 R14 R15 +;; ----------------------------------------------------------- +;; Windows clobbers: RCX RDX RSI RDI R11 +;; Windows preserves: RAX RBX RBP R8 R9 R10 R12 R13 R14 R15 +;; ----------------------------------------------------------- +;; Linux clobbers: RCX RDX RSI RDI R11 +;; Linux preserves: RAX RBX RBP R8 R9 R10 R12 R13 R14 R15 +;; ----------------------------------------------------------- +;; +;; Linux/Windows clobbers: xmm0 - xmm15 + +%include "include/os.asm" +%include "include/cet.inc" +%include "include/mb_mgr_datastruct.asm" +%include "include/clear_regs.asm" + +; resdq = res0 => 16 bytes +struc frame +.ABEF_SAVE reso 1 +.CDGH_SAVE reso 1 +.align resq 1 +endstruc + +%ifdef LINUX +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%define arg4 rcx +%else +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%define arg4 r9 +%endif + +%define args arg1 +%define NUM_BLKS arg2 +%define lane arg3 + +%define INP r10 + +;; MSG MUST be xmm0 (implicit argument) +%define MSG xmm0 +%define STATE0 xmm1 +%define STATE1 xmm2 +%define MSGTMP0 xmm3 +%define MSGTMP1 xmm4 +%define MSGTMP2 xmm5 +%define MSGTMP3 xmm6 +%define MSGTMP4 xmm7 +%define MSGTMP xmm14 +%define SHUF_MASK xmm15 + +mksection .rodata +default rel + +extern K256 + +align 64 +PSHUFFLE_BYTE_FLIP_MASK: + dq 0x0405060700010203, 0x0c0d0e0f08090a0b + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; void sha256_ni_x1(SHA256_ARGS *args, UINT32 size_in_blocks) +;; arg1 : pointer to args +;; arg2 : size (in blocks) ;; assumed to be >= 1 +mksection .text + +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + movdqa [rsp + 3*16], xmm6 + movdqa [rsp + 4*16], xmm7 + movdqa [rsp + 5*16], xmm8 + movdqa [rsp + 6*16], xmm9 + movdqa [rsp + 7*16], xmm10 + movdqa [rsp + 8*16], xmm11 + movdqa [rsp + 9*16], xmm12 + movdqa [rsp + 10*16], xmm13 + movdqa [rsp + 11*16], xmm14 + movdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + movdqa xmm6, [rsp + 3*16] + movdqa xmm7, [rsp + 4*16] + movdqa xmm8, [rsp + 5*16] + movdqa xmm9, [rsp + 6*16] + movdqa xmm10, [rsp + 7*16] + movdqa xmm11, [rsp + 8*16] + movdqa xmm12, [rsp + 9*16] + movdqa xmm13, [rsp + 10*16] + movdqa xmm14, [rsp + 11*16] + movdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + pxor xmm5, xmm5 + movdqa [rsp + 3*16], xmm5 + movdqa [rsp + 4*16], xmm5 + movdqa [rsp + 5*16], xmm5 + movdqa [rsp + 6*16], xmm5 + movdqa [rsp + 7*16], xmm5 + movdqa [rsp + 8*16], xmm5 + movdqa [rsp + 9*16], xmm5 + movdqa [rsp + 10*16], xmm5 + movdqa [rsp + 11*16], xmm5 + movdqa [rsp + 12*16], xmm5 + +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + +MKGLOBAL(sha256_ni_x1,function,internal) +align 32 +sha256_ni_x1: + sub rsp, frame_size + + shl NUM_BLKS, 6 ; convert to bytes + jz done_hash + + ;; load input pointers + mov INP, [args + _data_ptr_sha256 + lane*PTR_SZ] + + add NUM_BLKS, INP ; pointer to end of data + + ;; load initial digest + ;; Probably need to reorder these appropriately + ;; DCBA, HGFE -> ABEF, CDGH + shl lane, 5 + movdqu STATE0, [args + lane] + movdqu STATE1, [args + lane + 16] + + pshufd STATE0, STATE0, 0xB1 ; CDAB + pshufd STATE1, STATE1, 0x1B ; EFGH + movdqa MSGTMP4, STATE0 + palignr STATE0, STATE1, 8 ; ABEF + pblendw STATE1, MSGTMP4, 0xF0 ; CDGH + + movdqa SHUF_MASK, [rel PSHUFFLE_BYTE_FLIP_MASK] + +.loop0: + ;; Save digests + movdqa [rsp + frame.ABEF_SAVE], STATE0 + movdqa [rsp + frame.CDGH_SAVE], STATE1 + + ;; Rounds 0-3 + movdqu MSG, [INP + 0*16] + pshufb MSG, SHUF_MASK + movdqa MSGTMP0, MSG + paddd MSG, [rel K256 + 0*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + + ;; Rounds 4-7 + movdqu MSG, [INP + 1*16] + pshufb MSG, SHUF_MASK + movdqa MSGTMP1, MSG + paddd MSG, [rel K256 + 1*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP0, MSGTMP1 + + ;; Rounds 8-11 + movdqu MSG, [INP + 2*16] + pshufb MSG, SHUF_MASK + movdqa MSGTMP2, MSG + paddd MSG, [rel K256 + 2*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP1, MSGTMP2 + + ;; Rounds 12-15 + movdqu MSG, [INP + 3*16] + pshufb MSG, SHUF_MASK + movdqa MSGTMP3, MSG + paddd MSG, [rel K256 + 3*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP3 + palignr MSGTMP, MSGTMP2, 4 + paddd MSGTMP0, MSGTMP + sha256msg2 MSGTMP0, MSGTMP3 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP2, MSGTMP3 + + ;; Rounds 16-19 + movdqa MSG, MSGTMP0 + paddd MSG, [rel K256 + 4*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP0 + palignr MSGTMP, MSGTMP3, 4 + paddd MSGTMP1, MSGTMP + sha256msg2 MSGTMP1, MSGTMP0 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP3, MSGTMP0 + + ;; Rounds 20-23 + movdqa MSG, MSGTMP1 + paddd MSG, [rel K256 + 5*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP1 + palignr MSGTMP, MSGTMP0, 4 + paddd MSGTMP2, MSGTMP + sha256msg2 MSGTMP2, MSGTMP1 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP0, MSGTMP1 + + ;; Rounds 24-27 + movdqa MSG, MSGTMP2 + paddd MSG, [rel K256 + 6*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP2 + palignr MSGTMP, MSGTMP1, 4 + paddd MSGTMP3, MSGTMP + sha256msg2 MSGTMP3, MSGTMP2 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP1, MSGTMP2 + + ;; Rounds 28-31 + movdqa MSG, MSGTMP3 + paddd MSG, [rel K256 + 7*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP3 + palignr MSGTMP, MSGTMP2, 4 + paddd MSGTMP0, MSGTMP + sha256msg2 MSGTMP0, MSGTMP3 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP2, MSGTMP3 + + ;; Rounds 32-35 + movdqa MSG, MSGTMP0 + paddd MSG, [rel K256 + 8*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP0 + palignr MSGTMP, MSGTMP3, 4 + paddd MSGTMP1, MSGTMP + sha256msg2 MSGTMP1, MSGTMP0 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP3, MSGTMP0 + + ;; Rounds 36-39 + movdqa MSG, MSGTMP1 + paddd MSG, [rel K256 + 9*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP1 + palignr MSGTMP, MSGTMP0, 4 + paddd MSGTMP2, MSGTMP + sha256msg2 MSGTMP2, MSGTMP1 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP0, MSGTMP1 + + ;; Rounds 40-43 + movdqa MSG, MSGTMP2 + paddd MSG, [rel K256 + 10*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP2 + palignr MSGTMP, MSGTMP1, 4 + paddd MSGTMP3, MSGTMP + sha256msg2 MSGTMP3, MSGTMP2 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP1, MSGTMP2 + + ;; Rounds 44-47 + movdqa MSG, MSGTMP3 + paddd MSG, [rel K256 + 11*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP3 + palignr MSGTMP, MSGTMP2, 4 + paddd MSGTMP0, MSGTMP + sha256msg2 MSGTMP0, MSGTMP3 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP2, MSGTMP3 + + ;; Rounds 48-51 + movdqa MSG, MSGTMP0 + paddd MSG, [rel K256 + 12*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP0 + palignr MSGTMP, MSGTMP3, 4 + paddd MSGTMP1, MSGTMP + sha256msg2 MSGTMP1, MSGTMP0 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + sha256msg1 MSGTMP3, MSGTMP0 + + ;; Rounds 52-55 + movdqa MSG, MSGTMP1 + paddd MSG, [rel K256 + 13*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP1 + palignr MSGTMP, MSGTMP0, 4 + paddd MSGTMP2, MSGTMP + sha256msg2 MSGTMP2, MSGTMP1 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + + ;; Rounds 56-59 + movdqa MSG, MSGTMP2 + paddd MSG, [rel K256 + 14*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + movdqa MSGTMP, MSGTMP2 + palignr MSGTMP, MSGTMP1, 4 + paddd MSGTMP3, MSGTMP + sha256msg2 MSGTMP3, MSGTMP2 + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + + ;; Rounds 60-63 + movdqa MSG, MSGTMP3 + paddd MSG, [rel K256 + 15*16] + sha256rnds2 STATE1, STATE0, MSG ; MSG is implicit argument + pshufd MSG, MSG, 0x0E + sha256rnds2 STATE0, STATE1, MSG ; MSG is implicit argument + + paddd STATE0, [rsp + frame.ABEF_SAVE] + paddd STATE1, [rsp + frame.CDGH_SAVE] + + add INP, 64 + cmp INP, NUM_BLKS + jne .loop0 + + + ; Reorder for writeback + pshufd STATE0, STATE0, 0x1B ; FEBA + pshufd STATE1, STATE1, 0xB1 ; DCHG + movdqa MSGTMP4, STATE0 + pblendw STATE0, STATE1, 0xF0 ; DCBA + palignr STATE1, MSGTMP4, 8 ; HGFE + + ;; update digests + movdqu [args + lane + 0*16], STATE0 + movdqu [args + lane + 1*16], STATE1 + shr lane, 5 + + ;; update data pointers + mov [args + _data_ptr_sha256 + lane*PTR_SZ], INP + +done_hash: + + ;; Clear stack frame (4*16 bytes) +%ifdef SAFE_DATA + clear_all_xmms_sse_asm + movdqa [rsp + frame.ABEF_SAVE], xmm0 + movdqa [rsp + frame.CDGH_SAVE], xmm0 +%endif + + add rsp, frame_size + ret + +; void call_sha256_ni_x1_sse_from_c(SHA256_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha256_ni_x1_sse_from_c,function,internal) +call_sha256_ni_x1_sse_from_c: + FUNC_SAVE + call sha256_ni_x1 + FUNC_RESTORE + ret + +mksection stack-noexec diff --git a/lib/sse/sha256_ni_x2_sse.asm b/lib/sse_t2/sha256_ni_x2_sse.asm similarity index 90% rename from lib/sse/sha256_ni_x2_sse.asm rename to lib/sse_t2/sha256_ni_x2_sse.asm index e4d8bceda91cafc2f02edc737d8f4adcf963bd3b..c9aafac196397e80bdb4f717ddf0666223c3f4c6 100644 --- a/lib/sse/sha256_ni_x2_sse.asm +++ b/lib/sse_t2/sha256_ni_x2_sse.asm @@ -41,7 +41,6 @@ %include "include/os.asm" ;%define DO_DBGPRINT %include "include/dbgprint.asm" -%include "include/cet.inc" %include "include/mb_mgr_datastruct.asm" %include "include/clear_regs.asm" @@ -96,25 +95,10 @@ endstruc mksection .rodata default rel -align 64 -K256: - dd 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5 - dd 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5 - dd 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3 - dd 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174 - dd 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc - dd 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da - dd 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7 - dd 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967 - dd 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13 - dd 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85 - dd 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3 - dd 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070 - dd 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5 - dd 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3 - dd 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208 - dd 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 +extern K256 + +align 16 PSHUFFLE_BYTE_FLIP_MASK: dq 0x0405060700010203, 0x0c0d0e0f08090a0b @@ -126,6 +110,73 @@ PSHUFFLE_BYTE_FLIP_MASK: ;; arg1 : pointer to args ;; arg2 : size (in blocks) ;; assumed to be >= 1 mksection .text + +%define XMM_STORAGE 10*16 +%define GP_STORAGE 6*8 + +%define VARIABLE_OFFSET XMM_STORAGE + GP_STORAGE +%define GP_OFFSET XMM_STORAGE + +%macro FUNC_SAVE 0 + mov r11, rsp + sub rsp, VARIABLE_OFFSET + and rsp, ~15 ; align rsp to 16 bytes + + mov [rsp + 0*8], rbx + mov [rsp + 1*8], rbp + mov [rsp + 2*8], r12 +%ifndef LINUX + mov [rsp + 3*8], rsi + mov [rsp + 4*8], rdi + movdqa [rsp + 3*16], xmm6 + movdqa [rsp + 4*16], xmm7 + movdqa [rsp + 5*16], xmm8 + movdqa [rsp + 6*16], xmm9 + movdqa [rsp + 7*16], xmm10 + movdqa [rsp + 8*16], xmm11 + movdqa [rsp + 9*16], xmm12 + movdqa [rsp + 10*16], xmm13 + movdqa [rsp + 11*16], xmm14 + movdqa [rsp + 12*16], xmm15 +%endif ; LINUX + mov [rsp + 5*8], r11 ;; rsp pointer +%endmacro + +%macro FUNC_RESTORE 0 + mov rbx, [rsp + 0*8] + mov rbp, [rsp + 1*8] + mov r12, [rsp + 2*8] +%ifndef LINUX + mov rsi, [rsp + 3*8] + mov rdi, [rsp + 4*8] + movdqa xmm6, [rsp + 3*16] + movdqa xmm7, [rsp + 4*16] + movdqa xmm8, [rsp + 5*16] + movdqa xmm9, [rsp + 6*16] + movdqa xmm10, [rsp + 7*16] + movdqa xmm11, [rsp + 8*16] + movdqa xmm12, [rsp + 9*16] + movdqa xmm13, [rsp + 10*16] + movdqa xmm14, [rsp + 11*16] + movdqa xmm15, [rsp + 12*16] + +%ifdef SAFE_DATA + pxor xmm5, xmm5 + movdqa xmm5, [rsp + 3*16] + movdqa xmm5, [rsp + 4*16] + movdqa xmm5, [rsp + 5*16] + movdqa xmm5, [rsp + 6*16] + movdqa xmm5, [rsp + 7*16] + movdqa xmm5, [rsp + 8*16] + movdqa xmm5, [rsp + 9*16] + movdqa xmm5, [rsp + 10*16] + movdqa xmm5, [rsp + 11*16] + movdqa xmm5, [rsp + 12*16] +%endif +%endif ; LINUX + mov rsp, [rsp + 5*8] ;; rsp pointer +%endmacro + MKGLOBAL(sha256_ni,function,internal) align 32 sha256_ni: @@ -609,4 +660,12 @@ done_hash: add rsp, frame_size ret +; void call_sha256_ni_x2_sse_from_c(SHA256_ARGS *args, UINT32 size_in_blocks); +MKGLOBAL(call_sha256_ni_x2_sse_from_c,function,internal) +call_sha256_ni_x2_sse_from_c: + FUNC_SAVE + call sha256_ni + FUNC_RESTORE + ret + mksection stack-noexec diff --git a/lib/sse_t2/sha_ni_mb_sse.c b/lib/sse_t2/sha_ni_mb_sse.c new file mode 100644 index 0000000000000000000000000000000000000000..5a6cbec8caa87f9ed35d6f7a930c4af69a39e946 --- /dev/null +++ b/lib/sse_t2/sha_ni_mb_sse.c @@ -0,0 +1,92 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include "include/sha_mb_mgr.h" +#include "include/arch_sse_type2.h" + +/* ========================================================================== */ +/* + * SHA1-NI MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha1_ni_sse(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 2, 1, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_ni_x2_sse_from_c, 1); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha1_ni_sse(MB_MGR_SHA_1_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_1(state, job, 2, 0, 1, + IMB_SHA1_BLOCK_SIZE, SHA1_PAD_SIZE, + call_sha1_ni_x2_sse_from_c, 1); +} + +/* ========================================================================== */ +/* + * SHA224-NI MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha224_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 2, 1, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha256_ni_x2_sse_from_c, 1); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha224_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 2, 0, 224, + IMB_SHA_256_BLOCK_SIZE, SHA224_PAD_SIZE, + call_sha256_ni_x2_sse_from_c, 1); +} + +/* ========================================================================== */ +/* + * SHA256-NI MB API + */ + +IMB_DLL_LOCAL +IMB_JOB *submit_job_sha256_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 2, 1, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha256_ni_x2_sse_from_c, 1); +} + +IMB_DLL_LOCAL +IMB_JOB *flush_job_sha256_ni_sse(MB_MGR_SHA_256_OOO *state, IMB_JOB *job) +{ + return submit_flush_job_sha_256(state, job, 2, 0, 256, + IMB_SHA_256_BLOCK_SIZE, SHA256_PAD_SIZE, + call_sha256_ni_x2_sse_from_c, 1); +} \ No newline at end of file diff --git a/lib/sse_t3/README b/lib/sse_t3/README new file mode 100644 index 0000000000000000000000000000000000000000..8854263ef3ecbe4af7a19f1a433fb1459b5d1b2e --- /dev/null +++ b/lib/sse_t3/README @@ -0,0 +1,3 @@ +SSE TYPE3: +- SSE TYPE2: SSE4.2, AESNI, PCLMULQDQ, CMOV, BSWAP, SHANI +- GFNI diff --git a/lib/sse/aes128_cbc_dec_by8_sse.asm b/lib/sse_t3/aes128_cbc_dec_by8_sse.asm similarity index 99% rename from lib/sse/aes128_cbc_dec_by8_sse.asm rename to lib/sse_t3/aes128_cbc_dec_by8_sse.asm index 31c535dcd011da53d5f960e8840b95073fed92e6..862edf5a321ec46293b43219fdd4b67c371a4c81 100644 --- a/lib/sse/aes128_cbc_dec_by8_sse.asm +++ b/lib/sse_t3/aes128_cbc_dec_by8_sse.asm @@ -43,7 +43,6 @@ ; %include "include/os.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" %ifndef AES_CBC_DEC_128 %define AES_CBC_DEC_128 aes_cbc_dec_128_by8_sse @@ -225,7 +224,6 @@ mksection .text align 32 MKGLOBAL(AES_CBC_DEC_128,function,internal) AES_CBC_DEC_128: - endbranch64 %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif @@ -307,7 +305,6 @@ mult_of_8_blks: movdqa xkey10, [p_keys + 10*16] main_loop2: - endbranch64 main_loop3: ; num_bytes is a multiple of 8 and >0 diff --git a/lib/sse/aes128_cbc_enc_x8_sse.asm b/lib/sse_t3/aes128_cbc_enc_x8_sse.asm similarity index 99% rename from lib/sse/aes128_cbc_enc_x8_sse.asm rename to lib/sse_t3/aes128_cbc_enc_x8_sse.asm index 06f7b4b050b5fa5cfd7b3e2c397eb0823b9836d0..2888cf236aded79fc75c0cbcbeca41d2bd4048f4 100644 --- a/lib/sse/aes128_cbc_enc_x8_sse.asm +++ b/lib/sse_t3/aes128_cbc_enc_x8_sse.asm @@ -32,7 +32,6 @@ %include "include/os.asm" %include "include/mb_mgr_datastruct.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" %macro PXOR2 2 movdqu XTMP, %2 @@ -119,7 +118,6 @@ aes128_cbc_mac_x8_sse: MKGLOBAL(aes_cbc_enc_128_x8_sse,function,internal) aes_cbc_enc_128_x8_sse: %endif - endbranch64 sub rsp, STACK_size mov [GPR_SAVE_AREA + 8*0], rbp %ifdef CBC_MAC diff --git a/lib/sse/aes128_cbc_mac_x8_sse.asm b/lib/sse_t3/aes128_cbc_mac_x8_sse.asm similarity index 97% rename from lib/sse/aes128_cbc_mac_x8_sse.asm rename to lib/sse_t3/aes128_cbc_mac_x8_sse.asm index af18d03acd5d28066934f2137721ac7207930f28..119a60d50d821421e2391fcb003dff037335e22b 100644 --- a/lib/sse/aes128_cbc_mac_x8_sse.asm +++ b/lib/sse_t3/aes128_cbc_mac_x8_sse.asm @@ -28,4 +28,4 @@ ;;; Routine to compute CBC-MAC based on 128 bit CBC AES encryptionk code %define CBC_MAC -%include "sse/aes128_cbc_enc_x8_sse.asm" +%include "sse_t3/aes128_cbc_enc_x8_sse.asm" diff --git a/lib/sse_t3/aes128_ecb_by8_sse.asm b/lib/sse_t3/aes128_ecb_by8_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..9c6a5aedc70b7cc7022fd63860b040a75609b410 --- /dev/null +++ b/lib/sse_t3/aes128_ecb_by8_sse.asm @@ -0,0 +1,165 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB 128 encrypt/decrypt on 16n bytes doing AES by 8 + +%include "include/os.asm" +%include "include/clear_regs.asm" +%include "include/aes_common.asm" + +%ifdef LINUX +%define IN rdi +%define KEYS rsi +%define OUT rdx +%define LEN rcx +%else +%define IN rcx +%define KEYS rdx +%define OUT r8 +%define LEN r9 +%endif +%define IDX rax +%define TMP r11 +%define XDATA0 xmm0 +%define XDATA1 xmm1 +%define XDATA2 xmm2 +%define XDATA3 xmm3 +%define XDATA4 xmm4 +%define XDATA5 xmm5 +%define XDATA6 xmm6 +%define XDATA7 xmm7 +%define XKEY1 xmm8 + +%ifndef AES_ECB_NROUNDS +%define AES_ECB_NROUNDS 10 +%endif + +%if AES_ECB_NROUNDS == 10 +%define KEYSIZE 128 +%elif AES_ECB_NROUNDS == 12 +%define KEYSIZE 192 +%else +%define KEYSIZE 256 +%endif + +%define AES_ECB_ENC aes_ecb_enc_ %+ KEYSIZE %+ _by8_sse +%define AES_ECB_DEC aes_ecb_dec_ %+ KEYSIZE %+ _by8_sse + +%macro AES_ECB 1 +%define %%DIR %1 ; [in] Direction (ENC/DEC) +%ifidn %%DIR, ENC +%define AES XMM_AESENC_ROUND_BLOCKS_SSE_0_8 +%else ; DIR = DEC +%define AES XMM_AESDEC_ROUND_BLOCKS_SSE_0_8 +%endif + or LEN, LEN + jz %%done + xor IDX, IDX + mov TMP, LEN + and TMP, 127 ; number of initial bytes (0 to 7 AES blocks) + jz %%main_loop + ; branch to different code block based on remainder + cmp TMP, 4*16 + je %%initial_num_blocks_is_4 + jb %%initial_num_blocks_is_3_1 + cmp TMP, 6*16 + je %%initial_num_blocks_is_6 + jb %%initial_num_blocks_is_5 + ja %%initial_num_blocks_is_7 +%%initial_num_blocks_is_3_1: + ;; 3, 2 or 1 + cmp TMP, 2*16 + ja %%initial_num_blocks_is_3 + je %%initial_num_blocks_is_2 + ;; fall through for `jmp %%initial_num_blocks_is_1` +%assign num_blocks 1 +%rep 7 +%%initial_num_blocks_is_ %+ num_blocks : + ; load initial blocks + XMM_LOAD_BLOCKS_SSE_0_8 num_blocks, IN, 0, XDATA0,\ + XDATA1, XDATA2, XDATA3, XDATA4, XDATA5,\ + XDATA6, XDATA7 +%assign %%I 0 +; Perform AES encryption/decryption on initial blocks +%rep (AES_ECB_NROUNDS + 1) ; 10/12/14 + movdqu XKEY1, [KEYS + %%I*16] + AES XDATA0, XDATA1, XDATA2, XDATA3, XDATA4,\ + XDATA5, XDATA6, XDATA7, XKEY1, %%I, no_data,\ + no_data, no_data, no_data, no_data, no_data,\ + no_data, no_data, num_blocks, (AES_ECB_NROUNDS - 1) +%assign %%I (%%I + 1) +%endrep + ; store initial blocks + XMM_STORE_BLOCKS_SSE_0_8 num_blocks, OUT, 0, XDATA0, XDATA1,\ + XDATA2, XDATA3, XDATA4, XDATA5, XDATA6, XDATA7 + add IDX, num_blocks*16 + cmp IDX, LEN + je %%done +%assign num_blocks (num_blocks + 1) + jmp %%main_loop +%endrep +align 16 +%%main_loop: + ; load next 8 blocks + XMM_LOAD_BLOCKS_SSE_0_8 8, {IN + IDX}, 0, XDATA0,\ + XDATA1, XDATA2, XDATA3, XDATA4, XDATA5,\ + XDATA6, XDATA7 +%assign %%I 0 +; Perform AES encryption/decryption on 8 blocks +%rep (AES_ECB_NROUNDS + 1) ; 10/12/14 + movdqu XKEY1, [KEYS + %%I*16] + AES XDATA0, XDATA1, XDATA2, XDATA3, XDATA4,\ + XDATA5, XDATA6, XDATA7, XKEY1, %%I, no_data,\ + no_data, no_data, no_data, no_data, no_data,\ + no_data, no_data, 8, (AES_ECB_NROUNDS - 1) +%assign %%I (%%I + 1) +%endrep + ; store 8 blocks + XMM_STORE_BLOCKS_SSE_0_8 8, {OUT + IDX}, 0, XDATA0, XDATA1,\ + XDATA2, XDATA3, XDATA4, XDATA5, XDATA6, XDATA7 + add IDX, 8*16 + cmp IDX, LEN + jne %%main_loop +%%done: +%ifdef SAFE_DATA + clear_all_xmms_sse_asm +%endif +%endmacro + +mksection .text +align 16 +MKGLOBAL(AES_ECB_ENC,function,internal) +AES_ECB_ENC: + AES_ECB ENC + ret +align 16 +MKGLOBAL(AES_ECB_DEC,function,internal) +AES_ECB_DEC: + AES_ECB DEC + ret + +mksection stack-noexec diff --git a/lib/sse/aes192_cbc_dec_by8_sse.asm b/lib/sse_t3/aes192_cbc_dec_by8_sse.asm similarity index 99% rename from lib/sse/aes192_cbc_dec_by8_sse.asm rename to lib/sse_t3/aes192_cbc_dec_by8_sse.asm index 2451c3a8a117c8c43e5a84c4836eab2bab3f32a2..c04f546b74ac5d4da40ad8ff8e9ebad2c2bee6f2 100644 --- a/lib/sse/aes192_cbc_dec_by8_sse.asm +++ b/lib/sse_t3/aes192_cbc_dec_by8_sse.asm @@ -43,7 +43,7 @@ ; %include "include/os.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + %ifndef AES_CBC_DEC_192 %define AES_CBC_DEC_192 aes_cbc_dec_192_by8_sse %endif @@ -248,7 +248,6 @@ align 32 ;; AES_CBC_DEC_192(void *in, void *IV, void *keys, void *out, UINT64 num_bytes) MKGLOBAL(AES_CBC_DEC_192,function,internal) AES_CBC_DEC_192: - endbranch64 %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif diff --git a/lib/sse/aes192_cbc_enc_x8_sse.asm b/lib/sse_t3/aes192_cbc_enc_x8_sse.asm similarity index 100% rename from lib/sse/aes192_cbc_enc_x8_sse.asm rename to lib/sse_t3/aes192_cbc_enc_x8_sse.asm diff --git a/lib/sse_t3/aes192_ecb_by8_sse.asm b/lib/sse_t3/aes192_ecb_by8_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..8bc905b4c001dd304adb8dce4e8bd2244595b4b5 --- /dev/null +++ b/lib/sse_t3/aes192_ecb_by8_sse.asm @@ -0,0 +1,33 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB 192 encrypt/decrypt on 16n bytes doing AES by 8 + +%define AES_ECB_NROUNDS 12 + +%include "include/os.asm" +%include "sse_t3/aes128_ecb_by8_sse.asm" diff --git a/lib/sse/aes256_cbc_dec_by8_sse.asm b/lib/sse_t3/aes256_cbc_dec_by8_sse.asm similarity index 99% rename from lib/sse/aes256_cbc_dec_by8_sse.asm rename to lib/sse_t3/aes256_cbc_dec_by8_sse.asm index 5dfe6ea70012eb1f50118cfd4592fca233bca3a4..bc0fb718e20c4ae06b7b8c2c25385d5f72b52683 100644 --- a/lib/sse/aes256_cbc_dec_by8_sse.asm +++ b/lib/sse_t3/aes256_cbc_dec_by8_sse.asm @@ -44,7 +44,7 @@ %include "include/os.asm" %include "include/clear_regs.asm" -%include "include/cet.inc" + %ifndef AES_CBC_DEC_256 %define AES_CBC_DEC_256 aes_cbc_dec_256_by8_sse %endif @@ -265,7 +265,6 @@ align 32 ;; AES_CBC_DEC_256(void *in, void *IV, void *keys, void *out, UINT64 num_bytes) MKGLOBAL(AES_CBC_DEC_256,function,internal) AES_CBC_DEC_256: - endbranch64 %ifndef LINUX mov num_bytes, [rsp + 8*5] %endif diff --git a/lib/sse/aes256_cbc_enc_x8_sse.asm b/lib/sse_t3/aes256_cbc_enc_x8_sse.asm similarity index 100% rename from lib/sse/aes256_cbc_enc_x8_sse.asm rename to lib/sse_t3/aes256_cbc_enc_x8_sse.asm diff --git a/lib/sse/aes256_cbc_mac_x8_sse.asm b/lib/sse_t3/aes256_cbc_mac_x8_sse.asm similarity index 97% rename from lib/sse/aes256_cbc_mac_x8_sse.asm rename to lib/sse_t3/aes256_cbc_mac_x8_sse.asm index c6f0b8d6bfa0dfc0d093a46ae2ebeb0d88f86a3e..75d861a0bacdf395ab6d93a329265d92ba967416 100644 --- a/lib/sse/aes256_cbc_mac_x8_sse.asm +++ b/lib/sse_t3/aes256_cbc_mac_x8_sse.asm @@ -28,4 +28,4 @@ ;;; Routine to compute CBC-MAC based on 256 bit CBC AES encryption code %define CBC_MAC -%include "sse/aes256_cbc_enc_x8_sse.asm" +%include "sse_t3/aes256_cbc_enc_x8_sse.asm" diff --git a/lib/sse_t3/aes256_ecb_by8_sse.asm b/lib/sse_t3/aes256_ecb_by8_sse.asm new file mode 100644 index 0000000000000000000000000000000000000000..d40d245d8560816ab06f2c80249fd2e916611183 --- /dev/null +++ b/lib/sse_t3/aes256_ecb_by8_sse.asm @@ -0,0 +1,33 @@ +;; +;; Copyright (c) 2022, Intel Corporation +;; +;; Redistribution and use in source and binary forms, with or without +;; modification, are permitted provided that the following conditions are met: +;; +;; * Redistributions of source code must retain the above copyright notice, +;; this list of conditions and the following disclaimer. +;; * Redistributions in binary form must reproduce the above copyright +;; notice, this list of conditions and the following disclaimer in the +;; documentation and/or other materials provided with the distribution. +;; * Neither the name of Intel Corporation nor the names of its contributors +;; may be used to endorse or promote products derived from this software +;; without specific prior written permission. +;; +;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +;; + +; routine to do AES ECB 256 encrypt/decrypt on 16n bytes doing AES by 8 + +%define AES_ECB_NROUNDS 14 + +%include "include/os.asm" +%include "sse_t3/aes128_ecb_by8_sse.asm" diff --git a/lib/sse/mb_mgr_aes128_cbc_enc_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes128_cbc_enc_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes128_cbc_enc_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes128_cbc_enc_flush_x8_sse.asm index b9cac85ac6d576e5f12d371f37cdf37ca1249f80..0fa6e860ee63875075b7e61a26d12b198dc41999 100644 --- a/lib/sse/mb_mgr_aes128_cbc_enc_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes128_cbc_enc_flush_x8_sse.asm @@ -29,4 +29,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_128_x8_sse %define FLUSH_JOB_AES_ENC flush_job_aes128_enc_x8_sse -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes128_cbc_enc_submit_x8_sse.asm b/lib/sse_t3/mb_mgr_aes128_cbc_enc_submit_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes128_cbc_enc_submit_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes128_cbc_enc_submit_x8_sse.asm index 01882486e7889de73d584fe4613829ae492378e1..54244bded10f0c4230da1c10c0df82a1f6a02a5b 100644 --- a/lib/sse/mb_mgr_aes128_cbc_enc_submit_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes128_cbc_enc_submit_x8_sse.asm @@ -29,4 +29,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_128_x8_sse %define SUBMIT_JOB_AES_ENC submit_job_aes128_enc_x8_sse -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes128_ccm_auth_submit_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes128_ccm_auth_submit_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes128_ccm_auth_submit_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes128_ccm_auth_submit_flush_x8_sse.asm index a228dc85daddf4e154475fc2c3fc726d4d4e5f49..4c74848dfdc96ad76492606e79aa7d5d69a996e7 100644 --- a/lib/sse/mb_mgr_aes128_ccm_auth_submit_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes128_ccm_auth_submit_flush_x8_sse.asm @@ -30,4 +30,4 @@ %define SUBMIT_JOB_AES_CCM_AUTH submit_job_aes128_ccm_auth_x8_sse %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes128_ccm_auth_x8_sse -%include "sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes128_cmac_submit_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes128_cmac_submit_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes128_cmac_submit_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes128_cmac_submit_flush_x8_sse.asm index 62e74082a9a97196faa44ea152dc2be8b12745dc..e3eebcac953b08579455437bdf0eeb40bd054de0 100644 --- a/lib/sse/mb_mgr_aes128_cmac_submit_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes128_cmac_submit_flush_x8_sse.asm @@ -30,4 +30,4 @@ %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes128_cmac_auth_x8_sse %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes128_cmac_auth_x8_sse -%include "sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes192_cbc_enc_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes192_cbc_enc_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes192_cbc_enc_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes192_cbc_enc_flush_x8_sse.asm index b1c3b4c192f952280cecad4e5fb108d488d08e59..657b6c6bbd789ae37281dc4433abc2187840c1d3 100644 --- a/lib/sse/mb_mgr_aes192_cbc_enc_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes192_cbc_enc_flush_x8_sse.asm @@ -29,4 +29,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_192_x8_sse %define FLUSH_JOB_AES_ENC flush_job_aes192_enc_x8_sse -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes192_cbc_enc_submit_x8_sse.asm b/lib/sse_t3/mb_mgr_aes192_cbc_enc_submit_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes192_cbc_enc_submit_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes192_cbc_enc_submit_x8_sse.asm index 63d4294bc4312b52e440fe6b5133280dd96a76db..28ca090aaf7f9bf73313da2732468444f61fbaa9 100644 --- a/lib/sse/mb_mgr_aes192_cbc_enc_submit_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes192_cbc_enc_submit_x8_sse.asm @@ -29,4 +29,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_192_x8_sse %define SUBMIT_JOB_AES_ENC submit_job_aes192_enc_x8_sse -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_cbc_enc_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes256_cbc_enc_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_cbc_enc_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes256_cbc_enc_flush_x8_sse.asm index 0232f61c7ea42ba571e7857cc0a17ae8f4c580d2..33d72ab742ad1773e6903537c152d01221def152 100644 --- a/lib/sse/mb_mgr_aes256_cbc_enc_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes256_cbc_enc_flush_x8_sse.asm @@ -29,4 +29,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_256_x8_sse %define FLUSH_JOB_AES_ENC flush_job_aes256_enc_x8_sse -%include "sse/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_cbc_enc_submit_x8_sse.asm b/lib/sse_t3/mb_mgr_aes256_cbc_enc_submit_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_cbc_enc_submit_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes256_cbc_enc_submit_x8_sse.asm index 184d25be193b0dac951d3a0c7dc3853e16825b6f..fd2d2a768d4b74cc68faf25b35dfe189b08a85bc 100644 --- a/lib/sse/mb_mgr_aes256_cbc_enc_submit_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes256_cbc_enc_submit_x8_sse.asm @@ -29,4 +29,4 @@ %define AES_CBC_ENC_X4 aes_cbc_enc_256_x8_sse %define SUBMIT_JOB_AES_ENC submit_job_aes256_enc_x8_sse -%include "sse/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cbc_enc_submit_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_ccm_auth_submit_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes256_ccm_auth_submit_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_ccm_auth_submit_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes256_ccm_auth_submit_flush_x8_sse.asm index ea8af6ff1f34fb8eb0253d96e318e2aeb0ad5627..c540763789a9a1e1c705a40ab72f7d5f80ce2578 100644 --- a/lib/sse/mb_mgr_aes256_ccm_auth_submit_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes256_ccm_auth_submit_flush_x8_sse.asm @@ -31,4 +31,4 @@ %define SUBMIT_JOB_AES_CCM_AUTH submit_job_aes256_ccm_auth_x8_sse %define FLUSH_JOB_AES_CCM_AUTH flush_job_aes256_ccm_auth_x8_sse -%include "sse/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_ccm_auth_submit_flush_x4_sse.asm" diff --git a/lib/sse/mb_mgr_aes256_cmac_submit_flush_x8_sse.asm b/lib/sse_t3/mb_mgr_aes256_cmac_submit_flush_x8_sse.asm similarity index 96% rename from lib/sse/mb_mgr_aes256_cmac_submit_flush_x8_sse.asm rename to lib/sse_t3/mb_mgr_aes256_cmac_submit_flush_x8_sse.asm index a37744eb33f4bf57d1f60ebc2d38bad5b8e0519c..62aeb6e861d763727804f597a12c6fc1cd5925fa 100644 --- a/lib/sse/mb_mgr_aes256_cmac_submit_flush_x8_sse.asm +++ b/lib/sse_t3/mb_mgr_aes256_cmac_submit_flush_x8_sse.asm @@ -30,4 +30,4 @@ %define SUBMIT_JOB_AES_CMAC_AUTH submit_job_aes256_cmac_auth_x8_sse %define FLUSH_JOB_AES_CMAC_AUTH flush_job_aes256_cmac_auth_x8_sse -%include "sse/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" +%include "sse_t1/mb_mgr_aes128_cmac_submit_flush_x4_sse.asm" diff --git a/lib/sse_t3/mb_mgr_sse_t3.c b/lib/sse_t3/mb_mgr_sse_t3.c new file mode 100644 index 0000000000000000000000000000000000000000..c0a244244cb212d2eaa5a2ddab083515c6792dc1 --- /dev/null +++ b/lib/sse_t3/mb_mgr_sse_t3.c @@ -0,0 +1,489 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include + +#define SSE + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/kasumi_interface.h" +#include "include/zuc_internal.h" +#include "include/snow3g.h" +#include "include/gcm.h" +#include "include/chacha20_poly1305.h" +#include "include/snow3g_submit.h" + +#include "include/save_xmms.h" +#include "include/des.h" +#include "include/cpu_feature.h" +#include "include/noaesni.h" +#include "include/aesni_emu.h" +#include "include/error.h" + +#include "include/arch_sse_type1.h" +#include "include/arch_sse_type2.h" +#include "include/arch_sse_type3.h" + +#include "include/ooo_mgr_reset.h" + +#define SAVE_XMMS save_xmms +#define RESTORE_XMMS restore_xmms + +/* JOB API */ +#define SUBMIT_JOB submit_job_sse_t3 +#define FLUSH_JOB flush_job_sse_t3 +#define QUEUE_SIZE queue_size_sse_t3 +#define SUBMIT_JOB_NOCHECK submit_job_nocheck_sse_t3 +#define GET_NEXT_JOB get_next_job_sse_t3 +#define GET_COMPLETED_JOB get_completed_job_sse_t3 +#define GET_NEXT_BURST get_next_burst_sse_t3 +#define SUBMIT_BURST submit_burst_sse_t3 +#define SUBMIT_BURST_NOCHECK submit_burst_nocheck_sse_t3 +#define FLUSH_BURST flush_burst_sse_t3 +#define SUBMIT_CIPHER_BURST submit_cipher_burst_sse_t3 +#define SUBMIT_CIPHER_BURST_NOCHECK submit_cipher_burst_nocheck_sse_t3 +#define SUBMIT_HASH_BURST submit_hash_burst_sse_t3 +#define SUBMIT_HASH_BURST_NOCHECK submit_hash_burst_nocheck_sse_t3 + +/* Hash */ +#define SUBMIT_JOB_HASH SUBMIT_JOB_HASH_SSE_T3 +#define FLUSH_JOB_HASH FLUSH_JOB_HASH_SSE_T3 + +/* Cipher encrypt / decrypt */ +#define SUBMIT_JOB_CIPHER_ENC SUBMIT_JOB_CIPHER_ENC_SSE_T3 +#define FLUSH_JOB_CIPHER_ENC FLUSH_JOB_CIPHER_ENC_SSE_T3 +#define SUBMIT_JOB_CIPHER_DEC SUBMIT_JOB_CIPHER_DEC_SSE_T3 + +/* AES-GCM */ +#define AES_GCM_DEC_IV_128 aes_gcm_dec_var_iv_128_sse +#define AES_GCM_ENC_IV_128 aes_gcm_enc_var_iv_128_sse +#define AES_GCM_DEC_IV_192 aes_gcm_dec_var_iv_192_sse +#define AES_GCM_ENC_IV_192 aes_gcm_enc_var_iv_192_sse +#define AES_GCM_DEC_IV_256 aes_gcm_dec_var_iv_256_sse +#define AES_GCM_ENC_IV_256 aes_gcm_enc_var_iv_256_sse + +#define SUBMIT_JOB_AES_GCM_DEC submit_job_aes_gcm_dec_sse +#define SUBMIT_JOB_AES_GCM_ENC submit_job_aes_gcm_enc_sse + +/* AES-CBC */ +#define SUBMIT_JOB_AES_CBC_128_ENC submit_job_aes128_enc_x8_sse +#define SUBMIT_JOB_AES_CBC_128_DEC submit_job_aes128_dec_sse +#define FLUSH_JOB_AES_CBC_128_ENC flush_job_aes128_enc_x8_sse + +#define SUBMIT_JOB_AES_CBC_192_ENC submit_job_aes192_enc_x8_sse +#define SUBMIT_JOB_AES_CBC_192_DEC submit_job_aes192_dec_sse +#define FLUSH_JOB_AES_CBC_192_ENC flush_job_aes192_enc_x8_sse + +#define SUBMIT_JOB_AES_CBC_256_ENC submit_job_aes256_enc_x8_sse +#define SUBMIT_JOB_AES_CBC_256_DEC submit_job_aes256_dec_sse +#define FLUSH_JOB_AES_CBC_256_ENC flush_job_aes256_enc_x8_sse + +#define AES_CBC_DEC_128 aes_cbc_dec_128_by8_sse +#define AES_CBC_DEC_192 aes_cbc_dec_192_by8_sse +#define AES_CBC_DEC_256 aes_cbc_dec_256_by8_sse + +/* AES-CBCS */ +#define SUBMIT_JOB_AES128_CBCS_1_9_ENC submit_job_aes128_cbcs_1_9_enc_sse +#define FLUSH_JOB_AES128_CBCS_1_9_ENC flush_job_aes128_cbcs_1_9_enc_sse +#define SUBMIT_JOB_AES128_CBCS_1_9_DEC submit_job_aes128_cbcs_1_9_dec_sse +#define AES_CBCS_1_9_DEC_128 aes_cbcs_1_9_dec_128_sse + +/* AES-ECB */ +#define SUBMIT_JOB_AES_ECB_128_ENC submit_job_aes_ecb_128_enc_sse +#define SUBMIT_JOB_AES_ECB_128_DEC submit_job_aes_ecb_128_dec_sse +#define SUBMIT_JOB_AES_ECB_192_ENC submit_job_aes_ecb_192_enc_sse +#define SUBMIT_JOB_AES_ECB_192_DEC submit_job_aes_ecb_192_dec_sse +#define SUBMIT_JOB_AES_ECB_256_ENC submit_job_aes_ecb_256_enc_sse +#define SUBMIT_JOB_AES_ECB_256_DEC submit_job_aes_ecb_256_dec_sse + +#define AES_ECB_ENC_128 aes_ecb_enc_128_by8_sse +#define AES_ECB_ENC_192 aes_ecb_enc_192_by8_sse +#define AES_ECB_ENC_256 aes_ecb_enc_256_by8_sse +#define AES_ECB_DEC_128 aes_ecb_dec_128_by8_sse +#define AES_ECB_DEC_192 aes_ecb_dec_192_by8_sse +#define AES_ECB_DEC_256 aes_ecb_dec_256_by8_sse + +/* AES-CTR */ +#define AES_CTR_128 aes_cntr_128_sse +#define AES_CTR_192 aes_cntr_192_sse +#define AES_CTR_256 aes_cntr_256_sse +#define AES_CTR_128_BIT aes_cntr_bit_128_sse +#define AES_CTR_192_BIT aes_cntr_bit_192_sse +#define AES_CTR_256_BIT aes_cntr_bit_256_sse + +/* AES-CCM */ +#define AES_CNTR_CCM_128 aes_cntr_ccm_128_sse +#define AES_CNTR_CCM_256 aes_cntr_ccm_256_sse + +#define FLUSH_JOB_AES128_CCM_AUTH flush_job_aes128_ccm_auth_x8_sse +#define SUBMIT_JOB_AES128_CCM_AUTH submit_job_aes128_ccm_auth_x8_sse + +#define FLUSH_JOB_AES256_CCM_AUTH flush_job_aes256_ccm_auth_x8_sse +#define SUBMIT_JOB_AES256_CCM_AUTH submit_job_aes256_ccm_auth_x8_sse + +/* AES-CMAC */ +#define FLUSH_JOB_AES128_CMAC_AUTH flush_job_aes128_cmac_auth_x8_sse +#define SUBMIT_JOB_AES128_CMAC_AUTH submit_job_aes128_cmac_auth_x8_sse + +#define FLUSH_JOB_AES256_CMAC_AUTH flush_job_aes256_cmac_auth_x8_sse +#define SUBMIT_JOB_AES256_CMAC_AUTH submit_job_aes256_cmac_auth_x8_sse + +/* AES-CFB */ +#define AES_CFB_128_ONE aes_cfb_128_one_sse +#define AES_CFB_256_ONE aes_cfb_256_one_sse + +/* AES-XCBC */ +#define SUBMIT_JOB_AES_XCBC submit_job_aes_xcbc_sse +#define FLUSH_JOB_AES_XCBC flush_job_aes_xcbc_sse + +/* PON */ +#define SUBMIT_JOB_PON_ENC submit_job_pon_enc_sse +#define SUBMIT_JOB_PON_DEC submit_job_pon_dec_sse +#define SUBMIT_JOB_PON_ENC_NO_CTR submit_job_pon_enc_no_ctr_sse +#define SUBMIT_JOB_PON_DEC_NO_CTR submit_job_pon_dec_no_ctr_sse + +/* SHA1/224/256/384/512 */ +#define SUBMIT_JOB_SHA1 submit_job_sha1_ni_sse +#define FLUSH_JOB_SHA1 flush_job_sha1_ni_sse +#define SUBMIT_JOB_SHA224 submit_job_sha224_ni_sse +#define FLUSH_JOB_SHA224 flush_job_sha224_ni_sse +#define SUBMIT_JOB_SHA256 submit_job_sha256_ni_sse +#define FLUSH_JOB_SHA256 flush_job_sha256_ni_sse +#define SUBMIT_JOB_SHA384 submit_job_sha384_sse +#define FLUSH_JOB_SHA384 flush_job_sha384_sse +#define SUBMIT_JOB_SHA512 submit_job_sha512_sse +#define FLUSH_JOB_SHA512 flush_job_sha512_sse + +/* HMAC-SHA1/224/256/384/512/MD5 */ +#define SUBMIT_JOB_HMAC submit_job_hmac_ni_sse +#define FLUSH_JOB_HMAC flush_job_hmac_ni_sse +#define SUBMIT_JOB_HMAC_SHA_224 submit_job_hmac_sha_224_ni_sse +#define FLUSH_JOB_HMAC_SHA_224 flush_job_hmac_sha_224_ni_sse +#define SUBMIT_JOB_HMAC_SHA_256 submit_job_hmac_sha_256_ni_sse +#define FLUSH_JOB_HMAC_SHA_256 flush_job_hmac_sha_256_ni_sse +#define SUBMIT_JOB_HMAC_SHA_384 submit_job_hmac_sha_384_sse +#define FLUSH_JOB_HMAC_SHA_384 flush_job_hmac_sha_384_sse +#define SUBMIT_JOB_HMAC_SHA_512 submit_job_hmac_sha_512_sse +#define FLUSH_JOB_HMAC_SHA_512 flush_job_hmac_sha_512_sse +#define SUBMIT_JOB_HMAC_MD5 submit_job_hmac_md5_sse +#define FLUSH_JOB_HMAC_MD5 flush_job_hmac_md5_sse + +/* DES & 3DES */ + +/* - default x86-64 implementation */ + +/* DES-DOCSIS */ + +/* - default x86-64 implementation */ + +/* CHACHA20 & POLY1305 */ +#define SUBMIT_JOB_CHACHA20_ENC_DEC submit_job_chacha20_enc_dec_sse +#define SUBMIT_JOB_CHACHA20_POLY1305 aead_chacha20_poly1305_sse +#define SUBMIT_JOB_CHACHA20_POLY1305_SGL aead_chacha20_poly1305_sgl_sse +#define POLY1305_MAC poly1305_mac_scalar + +/* ZUC EEA3 & EIA3 */ +#define SUBMIT_JOB_ZUC_EEA3 submit_job_zuc_eea3_gfni_sse +#define FLUSH_JOB_ZUC_EEA3 flush_job_zuc_eea3_gfni_sse +#define SUBMIT_JOB_ZUC_EIA3 submit_job_zuc_eia3_gfni_sse +#define FLUSH_JOB_ZUC_EIA3 flush_job_zuc_eia3_gfni_sse +#define SUBMIT_JOB_ZUC256_EEA3 submit_job_zuc256_eea3_gfni_sse +#define FLUSH_JOB_ZUC256_EEA3 flush_job_zuc256_eea3_gfni_sse +#define SUBMIT_JOB_ZUC256_EIA3 submit_job_zuc256_eia3_gfni_sse +#define FLUSH_JOB_ZUC256_EIA3 flush_job_zuc256_eia3_gfni_sse + +/* SNOW-V */ +#define SUBMIT_JOB_SNOW_V snow_v_sse +#define SUBMIT_JOB_SNOW_V_AEAD snow_v_aead_init_sse + +/* SNOW3G UE2 & UIA2 */ +static IMB_JOB * +submit_snow3g_uea2_job_sse(IMB_MGR *state, IMB_JOB *job) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + if ((job->msg_len_to_cipher_in_bits & 7) || + (job->cipher_start_offset_in_bits & 7)) + return def_submit_snow3g_uea2_job(state, job); + + return submit_job_snow3g_uea2_sse(snow3g_uea2_ooo, job); +} + +static IMB_JOB * +flush_snow3g_uea2_job_sse(IMB_MGR *state) +{ + MB_MGR_SNOW3G_OOO *snow3g_uea2_ooo = state->snow3g_uea2_ooo; + + return flush_job_snow3g_uea2_sse(snow3g_uea2_ooo); +} + +#define SUBMIT_JOB_SNOW3G_UEA2 submit_snow3g_uea2_job_sse +#define FLUSH_JOB_SNOW3G_UEA2 flush_snow3g_uea2_job_sse + +#define SUBMIT_JOB_SNOW3G_UIA2 submit_job_snow3g_uia2_sse +#define FLUSH_JOB_SNOW3G_UIA2 flush_job_snow3g_uia2_sse + +/* AES-DOCSIS */ +#define ETHERNET_FCS ethernet_fcs_sse_local + +/* ====================================================================== */ + +static void reset_ooo_mgrs(IMB_MGR *state) +{ + /* Init AES out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_ooo, 8); + ooo_mgr_aes_reset(state->aes192_ooo, 8); + ooo_mgr_aes_reset(state->aes256_ooo, 8); + + /* DOCSIS SEC BPI uses same settings as AES CBC */ + ooo_mgr_docsis_aes_reset(state->docsis128_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis128_crc32_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_sec_ooo, 8); + ooo_mgr_docsis_aes_reset(state->docsis256_crc32_sec_ooo, 8); + + /* Init ZUC out-of-order fields */ + ooo_mgr_zuc_reset(state->zuc_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc_eia3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eea3_ooo, 4); + ooo_mgr_zuc_reset(state->zuc256_eia3_ooo, 4); + + /* Init HMAC/SHA1 out-of-order fields */ + ooo_mgr_hmac_sha1_reset(state->hmac_sha_1_ooo, 2); + + /* Init HMAC/SHA224 out-of-order fields */ + ooo_mgr_hmac_sha224_reset(state->hmac_sha_224_ooo, 2); + + /* Init HMAC/SHA_256 out-of-order fields */ + ooo_mgr_hmac_sha256_reset(state->hmac_sha_256_ooo, 2); + + /* Init HMAC/SHA384 out-of-order fields */ + ooo_mgr_hmac_sha384_reset(state->hmac_sha_384_ooo, + SSE_NUM_SHA512_LANES); + + /* Init HMAC/SHA512 out-of-order fields */ + ooo_mgr_hmac_sha512_reset(state->hmac_sha_512_ooo, + SSE_NUM_SHA512_LANES); + + /* Init HMAC/MD5 out-of-order fields */ + ooo_mgr_hmac_md5_reset(state->hmac_md5_ooo, SSE_NUM_MD5_LANES); + + /* Init AES/XCBC OOO fields */ + ooo_mgr_aes_xcbc_reset(state->aes_xcbc_ooo, 4); + + /* Init AES-CCM auth out-of-order fields */ + ooo_mgr_ccm_reset(state->aes_ccm_ooo, 8); + ooo_mgr_ccm_reset(state->aes256_ccm_ooo, 8); + + /* Init AES-CMAC auth out-of-order fields */ + ooo_mgr_cmac_reset(state->aes_cmac_ooo, 8); + ooo_mgr_cmac_reset(state->aes256_cmac_ooo, 8); + + /* Init AES-CBCS out-of-order fields */ + ooo_mgr_aes_reset(state->aes128_cbcs_ooo, 4); + + /* Init SHA1 out-of-order fields */ + ooo_mgr_sha1_reset(state->sha_1_ooo, 2); + + /* Init SHA224 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_224_ooo, 2); + + /* Init SHA256 out-of-order fields */ + ooo_mgr_sha256_reset(state->sha_256_ooo, 2); + + /* Init SHA384 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_384_ooo, SSE_NUM_SHA512_LANES); + + /* Init SHA512 out-of-order fields */ + ooo_mgr_sha512_reset(state->sha_512_ooo, SSE_NUM_SHA512_LANES); + + /* Init SNOW3G-UEA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uea2_ooo, 4); + + /* Init SNOW3G-UIA out-of-order fields */ + ooo_mgr_snow3g_reset(state->snow3g_uia2_ooo, 4); +} + +IMB_DLL_LOCAL void +init_mb_mgr_sse_t3_internal(IMB_MGR *state, const int reset_mgrs) +{ + /* Check if CPU flags needed for SSE interface are present */ + if ((state->features & IMB_CPUFLAGS_SSE_T3) != IMB_CPUFLAGS_SSE_T3) { + imb_set_errno(state, IMB_ERR_MISSING_CPUFLAGS_INIT_MGR); + return; + } + + /* Set architecture for future checks */ + state->used_arch = (uint32_t) IMB_ARCH_SSE; + + if (reset_mgrs) { + reset_ooo_mgrs(state); + + /* Init "in order" components */ + state->next_job = 0; + state->earliest_job = -1; + } + + /* set handlers */ + state->get_next_job = GET_NEXT_JOB; + state->submit_job = SUBMIT_JOB; + state->submit_job_nocheck = SUBMIT_JOB_NOCHECK; + state->get_completed_job = GET_COMPLETED_JOB; + state->flush_job = FLUSH_JOB; + state->queue_size = QUEUE_SIZE; + state->get_next_burst = GET_NEXT_BURST; + state->submit_burst = SUBMIT_BURST; + state->submit_burst_nocheck= SUBMIT_BURST_NOCHECK; + state->flush_burst = FLUSH_BURST; + state->submit_cipher_burst = SUBMIT_CIPHER_BURST; + state->submit_cipher_burst_nocheck = SUBMIT_CIPHER_BURST_NOCHECK; + state->submit_hash_burst = SUBMIT_HASH_BURST; + state->submit_hash_burst_nocheck = SUBMIT_HASH_BURST_NOCHECK; + + state->keyexp_128 = aes_keyexp_128_sse; + state->keyexp_192 = aes_keyexp_192_sse; + state->keyexp_256 = aes_keyexp_256_sse; + + state->cmac_subkey_gen_128 = aes_cmac_subkey_gen_sse; + state->cmac_subkey_gen_256 = aes_cmac_256_subkey_gen_sse; + + state->xcbc_keyexp = aes_xcbc_expand_key_sse; + state->des_key_sched = des_key_schedule; + + state->sha1_one_block = sha1_one_block_sse; + state->sha1 = sha1_sse; + state->sha224_one_block = sha224_one_block_sse; + state->sha224 = sha224_sse; + state->sha256_one_block = sha256_one_block_sse; + state->sha256 = sha256_sse; + state->sha384_one_block = sha384_one_block_sse; + state->sha384 = sha384_sse; + state->sha512_one_block = sha512_one_block_sse; + state->sha512 = sha512_sse; + state->md5_one_block = md5_one_block_sse; + + state->aes128_cfb_one = aes_cfb_128_one_sse; + + state->eea3_1_buffer = zuc_eea3_1_buffer_sse; + state->eea3_4_buffer = zuc_eea3_4_buffer_gfni_sse; + state->eea3_n_buffer = zuc_eea3_n_buffer_gfni_sse; + state->eia3_n_buffer = zuc_eia3_n_buffer_gfni_sse; + state->eia3_1_buffer = zuc_eia3_1_buffer_sse; + + state->f8_1_buffer = kasumi_f8_1_buffer_sse; + state->f8_1_buffer_bit = kasumi_f8_1_buffer_bit_sse; + state->f8_2_buffer = kasumi_f8_2_buffer_sse; + state->f8_3_buffer = kasumi_f8_3_buffer_sse; + state->f8_4_buffer = kasumi_f8_4_buffer_sse; + state->f8_n_buffer = kasumi_f8_n_buffer_sse; + state->f9_1_buffer = kasumi_f9_1_buffer_sse; + state->f9_1_buffer_user = kasumi_f9_1_buffer_user_sse; + state->kasumi_init_f8_key_sched = kasumi_init_f8_key_sched_sse; + state->kasumi_init_f9_key_sched = kasumi_init_f9_key_sched_sse; + state->kasumi_key_sched_size = kasumi_key_sched_size_sse; + + state->snow3g_f8_1_buffer_bit = snow3g_f8_1_buffer_bit_sse; + state->snow3g_f8_1_buffer = snow3g_f8_1_buffer_sse; + state->snow3g_f8_2_buffer = snow3g_f8_2_buffer_sse; + state->snow3g_f8_4_buffer = snow3g_f8_4_buffer_sse; + state->snow3g_f8_8_buffer = snow3g_f8_8_buffer_sse; + state->snow3g_f8_n_buffer = snow3g_f8_n_buffer_sse; + state->snow3g_f8_8_buffer_multikey = snow3g_f8_8_buffer_multikey_sse; + state->snow3g_f8_n_buffer_multikey = snow3g_f8_n_buffer_multikey_sse; + state->snow3g_f9_1_buffer = snow3g_f9_1_buffer_sse; + state->snow3g_init_key_sched = snow3g_init_key_sched_sse; + state->snow3g_key_sched_size = snow3g_key_sched_size_sse; + + state->hec_32 = hec_32_sse; + state->hec_64 = hec_64_sse; + + state->crc32_ethernet_fcs = ethernet_fcs_sse; + state->crc16_x25 = crc16_x25_sse; + state->crc32_sctp = crc32_sctp_sse; + state->crc24_lte_a = crc24_lte_a_sse; + state->crc24_lte_b = crc24_lte_b_sse; + state->crc16_fp_data = crc16_fp_data_sse; + state->crc11_fp_header = crc11_fp_header_sse; + state->crc7_fp_header = crc7_fp_header_sse; + state->crc10_iuup_data = crc10_iuup_data_sse; + state->crc6_iuup_header = crc6_iuup_header_sse; + state->crc32_wimax_ofdma_data = crc32_wimax_ofdma_data_sse; + state->crc8_wimax_ofdma_hcs = crc8_wimax_ofdma_hcs_sse; + + state->chacha20_poly1305_init = init_chacha20_poly1305_sse; + state->chacha20_poly1305_enc_update = update_enc_chacha20_poly1305_sse; + state->chacha20_poly1305_dec_update = update_dec_chacha20_poly1305_sse; + state->chacha20_poly1305_finalize = finalize_chacha20_poly1305_sse; + + state->gcm128_enc = aes_gcm_enc_128_sse; + state->gcm192_enc = aes_gcm_enc_192_sse; + state->gcm256_enc = aes_gcm_enc_256_sse; + state->gcm128_dec = aes_gcm_dec_128_sse; + state->gcm192_dec = aes_gcm_dec_192_sse; + state->gcm256_dec = aes_gcm_dec_256_sse; + state->gcm128_init = aes_gcm_init_128_sse; + state->gcm192_init = aes_gcm_init_192_sse; + state->gcm256_init = aes_gcm_init_256_sse; + state->gcm128_init_var_iv = aes_gcm_init_var_iv_128_sse; + state->gcm192_init_var_iv = aes_gcm_init_var_iv_192_sse; + state->gcm256_init_var_iv = aes_gcm_init_var_iv_256_sse; + state->gcm128_enc_update = aes_gcm_enc_128_update_sse; + state->gcm192_enc_update = aes_gcm_enc_192_update_sse; + state->gcm256_enc_update = aes_gcm_enc_256_update_sse; + state->gcm128_dec_update = aes_gcm_dec_128_update_sse; + state->gcm192_dec_update = aes_gcm_dec_192_update_sse; + state->gcm256_dec_update = aes_gcm_dec_256_update_sse; + state->gcm128_enc_finalize = aes_gcm_enc_128_finalize_sse; + state->gcm192_enc_finalize = aes_gcm_enc_192_finalize_sse; + state->gcm256_enc_finalize = aes_gcm_enc_256_finalize_sse; + state->gcm128_dec_finalize = aes_gcm_dec_128_finalize_sse; + state->gcm192_dec_finalize = aes_gcm_dec_192_finalize_sse; + state->gcm256_dec_finalize = aes_gcm_dec_256_finalize_sse; + state->gcm128_precomp = aes_gcm_precomp_128_sse; + state->gcm192_precomp = aes_gcm_precomp_192_sse; + state->gcm256_precomp = aes_gcm_precomp_256_sse; + state->gcm128_pre = aes_gcm_pre_128_sse; + state->gcm192_pre = aes_gcm_pre_192_sse; + state->gcm256_pre = aes_gcm_pre_256_sse; + + state->ghash = ghash_sse; + state->ghash_pre = ghash_pre_sse; + + state->gmac128_init = imb_aes_gmac_init_128_sse; + state->gmac192_init = imb_aes_gmac_init_192_sse; + state->gmac256_init = imb_aes_gmac_init_256_sse; + state->gmac128_update = imb_aes_gmac_update_128_sse; + state->gmac192_update = imb_aes_gmac_update_192_sse; + state->gmac256_update = imb_aes_gmac_update_256_sse; + state->gmac128_finalize = imb_aes_gmac_finalize_128_sse; + state->gmac192_finalize = imb_aes_gmac_finalize_192_sse; + state->gmac256_finalize = imb_aes_gmac_finalize_256_sse; +} + +#include "mb_mgr_code.h" diff --git a/lib/sse/mb_mgr_zuc_submit_flush_gfni_sse.asm b/lib/sse_t3/mb_mgr_zuc_submit_flush_gfni_sse.asm similarity index 97% rename from lib/sse/mb_mgr_zuc_submit_flush_gfni_sse.asm rename to lib/sse_t3/mb_mgr_zuc_submit_flush_gfni_sse.asm index d2473f229b5973c0e0b796664c2adb90f9773619..77d26c1d90273b27edd34d046d083842706fac35 100644 --- a/lib/sse/mb_mgr_zuc_submit_flush_gfni_sse.asm +++ b/lib/sse_t3/mb_mgr_zuc_submit_flush_gfni_sse.asm @@ -37,4 +37,4 @@ %define ZUC128_INIT_4 asm_ZucInitialization_4_gfni_sse %define ZUC256_INIT_4 asm_Zuc256Initialization_4_gfni_sse %define ZUC_CIPHER_4 asm_ZucCipher_4_gfni_sse -%include "sse/mb_mgr_zuc_submit_flush_sse.asm" +%include "sse_t1/mb_mgr_zuc_submit_flush_sse.asm" diff --git a/lib/sse/zuc_x4_gfni_sse.asm b/lib/sse_t3/zuc_x4_gfni_sse.asm similarity index 98% rename from lib/sse/zuc_x4_gfni_sse.asm rename to lib/sse_t3/zuc_x4_gfni_sse.asm index 8d99a8a466bf1998d9aae83f2dd19c8b16b2cc73..6f269bfe1aba856057c23efa50655c1f936709fa 100644 --- a/lib/sse/zuc_x4_gfni_sse.asm +++ b/lib/sse_t3/zuc_x4_gfni_sse.asm @@ -34,4 +34,4 @@ %define ZUC_KEYGEN4B_4 asm_ZucGenKeystream4B_4_gfni_sse %define ZUC_EIA3ROUND16B asm_Eia3Round16B_gfni_sse %define ZUC_EIA3REMAINDER asm_Eia3Remainder_gfni_sse -%include "sse/zuc_x4_sse.asm" +%include "sse_t1/zuc_x4_sse.asm" diff --git a/lib/win_x64.mak b/lib/win_x64.mak index cf4beedc53f78227a2bd1d8a23f3d869978912ec..d0e5df95cd6b766348dfc3ff42a72c55759416c6 100644 --- a/lib/win_x64.mak +++ b/lib/win_x64.mak @@ -112,12 +112,6 @@ DAFLAGS = $(DAFLAGS) -DAESNI_EMU CC = cl -# check for CET support -!if ([$(CC) /? 2>&1 | findstr /C:"guard:cf" > nul] == 0) -DCFLAGS = $(DCFLAGS) /guard:cf -DLFLAGS = $(DLFLAGS) /CETCOMPAT /GUARD:CF /DYNAMICBASE -!endif - CFLAGS_ALL = $(EXTRA_CFLAGS) /DNO_COMPAT_IMB_API_053 /I. /Iinclude /Ino-aesni \ /nologo /Y- /W3 /WX- /Gm- /fp:precise /EHsc /Z7 @@ -131,8 +125,7 @@ LINK_TOOL = link LINKFLAGS = $(DLFLAGS) /nologo /machine:X64 AS = nasm -AFLAGS = $(DAFLAGS) -Werror -fwin64 -Xvc -DWIN_ABI -Iinclude/ \ - -I./ -Iavx/ -Iavx2/ -Iavx512/ -Isse/ +AFLAGS = $(DAFLAGS) -Werror -fwin64 -Xvc -DWIN_ABI -I. # dependency !ifndef DEPTOOL @@ -162,9 +155,16 @@ lib_objs1 = \ $(OBJ_DIR)\aes128_ecb_by4_sse.obj \ $(OBJ_DIR)\aes192_ecb_by4_sse.obj \ $(OBJ_DIR)\aes256_ecb_by4_sse.obj \ - $(OBJ_DIR)\aes128_ecb_by4_avx.obj \ - $(OBJ_DIR)\aes192_ecb_by4_avx.obj \ - $(OBJ_DIR)\aes256_ecb_by4_avx.obj \ + $(OBJ_DIR)\aes128_ecb_by8_sse.obj \ + $(OBJ_DIR)\aes192_ecb_by8_sse.obj \ + $(OBJ_DIR)\aes256_ecb_by8_sse.obj \ + $(OBJ_DIR)\aes128_ecb_by8_avx.obj \ + $(OBJ_DIR)\aes192_ecb_by8_avx.obj \ + $(OBJ_DIR)\aes256_ecb_by8_avx.obj \ + $(OBJ_DIR)\aes128_ecb_vaes_avx2.obj \ + $(OBJ_DIR)\aes192_ecb_vaes_avx2.obj \ + $(OBJ_DIR)\aes256_ecb_vaes_avx2.obj \ + $(OBJ_DIR)\aes_ecb_vaes_avx512.obj \ $(OBJ_DIR)\pon_by8_sse.obj \ $(OBJ_DIR)\aes128_cntr_by8_sse.obj \ $(OBJ_DIR)\pon_by8_avx.obj \ @@ -220,6 +220,7 @@ lib_objs1 = \ $(OBJ_DIR)\sha1_x4_avx.obj \ $(OBJ_DIR)\sha1_x4_sse.obj \ $(OBJ_DIR)\sha1_ni_x2_sse.obj \ + $(OBJ_DIR)\sha1_ni_x1_sse.obj \ $(OBJ_DIR)\sha1_one_block_avx.obj \ $(OBJ_DIR)\sha1_one_block_sse.obj \ $(OBJ_DIR)\sha1_x8_avx2.obj \ @@ -230,6 +231,7 @@ lib_objs1 = \ $(OBJ_DIR)\sha256_one_block_avx.obj \ $(OBJ_DIR)\sha256_one_block_sse.obj \ $(OBJ_DIR)\sha256_ni_x2_sse.obj \ + $(OBJ_DIR)\sha256_ni_x1_sse.obj \ $(OBJ_DIR)\sha256_x16_avx512.obj \ $(OBJ_DIR)\sha384_one_block_avx.obj \ $(OBJ_DIR)\sha384_one_block_sse.obj \ @@ -257,6 +259,7 @@ lib_objs1 = \ $(OBJ_DIR)\zuc_x16_vaes_avx512.obj \ $(OBJ_DIR)\zuc_iv.obj \ $(OBJ_DIR)\snow3g_sse.obj \ + $(OBJ_DIR)\snow3g_uea2_by4_sse.obj \ $(OBJ_DIR)\snow3g_uia2_by4_sse.obj \ $(OBJ_DIR)\snow3g_avx.obj \ $(OBJ_DIR)\snow3g_avx2.obj \ @@ -267,10 +270,17 @@ lib_objs1 = \ $(OBJ_DIR)\snow3g_uia2_by32_vaes_avx512.obj \ $(OBJ_DIR)\mb_mgr_snow3g_uea2_submit_flush_vaes_avx512.obj \ $(OBJ_DIR)\mb_mgr_snow3g_uia2_submit_flush_vaes_avx512.obj \ + $(OBJ_DIR)\mb_mgr_snow3g_uea2_submit_flush_x4_sse.obj \ + $(OBJ_DIR)\mb_mgr_snow3g_uia2_submit_flush_x4_sse.obj \ $(OBJ_DIR)\aes_xcbc_expand_key.obj \ $(OBJ_DIR)\md5_one_block.obj \ $(OBJ_DIR)\sha_sse.obj \ $(OBJ_DIR)\sha_avx.obj \ + $(OBJ_DIR)\sha_mb_sse.obj \ + $(OBJ_DIR)\sha_ni_mb_sse.obj \ + $(OBJ_DIR)\sha_mb_avx.obj \ + $(OBJ_DIR)\sha_mb_avx2.obj \ + $(OBJ_DIR)\sha_mb_avx512.obj \ $(OBJ_DIR)\des_key.obj \ $(OBJ_DIR)\des_basic.obj \ $(OBJ_DIR)\chacha20_sse.obj \ @@ -339,7 +349,9 @@ lib_objs1 = \ $(OBJ_DIR)\mb_mgr_aes128_cbcs_1_9_flush_avx.obj \ $(OBJ_DIR)\error.obj \ $(OBJ_DIR)\memcpy_sse.obj \ - $(OBJ_DIR)\memcpy_avx.obj + $(OBJ_DIR)\memcpy_avx.obj \ + $(OBJ_DIR)\ooo_mgr_reset.obj \ + $(OBJ_DIR)\self_test.obj lib_objs2 = \ $(OBJ_DIR)\mb_mgr_aes192_cbc_enc_flush_avx.obj \ @@ -435,10 +447,19 @@ lib_objs2 = \ $(OBJ_DIR)\mb_mgr_zuc_submit_flush_avx512.obj \ $(OBJ_DIR)\mb_mgr_zuc_submit_flush_gfni_avx512.obj \ $(OBJ_DIR)\mb_mgr_avx.obj \ + $(OBJ_DIR)\mb_mgr_avx_t1.obj \ + $(OBJ_DIR)\mb_mgr_avx_t2.obj \ $(OBJ_DIR)\mb_mgr_avx2.obj \ + $(OBJ_DIR)\mb_mgr_avx2_t1.obj \ + $(OBJ_DIR)\mb_mgr_avx2_t2.obj \ $(OBJ_DIR)\mb_mgr_avx512.obj \ + $(OBJ_DIR)\mb_mgr_avx512_t1.obj \ + $(OBJ_DIR)\mb_mgr_avx512_t2.obj \ $(OBJ_DIR)\mb_mgr_des_avx512.obj \ $(OBJ_DIR)\mb_mgr_sse.obj \ + $(OBJ_DIR)\mb_mgr_sse_t1.obj \ + $(OBJ_DIR)\mb_mgr_sse_t2.obj \ + $(OBJ_DIR)\mb_mgr_sse_t3.obj \ $(OBJ_DIR)\alloc.obj \ $(OBJ_DIR)\version.obj \ $(OBJ_DIR)\cpu_feature.obj \ @@ -591,32 +612,67 @@ $(DEPALL): $(all_objs) {x86_64\}.asm{$(OBJ_DIR)}.obj: $(AS) -MD $@.dep -o $@ $(AFLAGS) $< -{sse\}.c{$(OBJ_DIR)}.obj: +{sse_t1\}.c{$(OBJ_DIR)}.obj: + $(CC) /Fo$@ /c $(CFLAGS) $< + $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep + +{sse_t1\}.asm{$(OBJ_DIR)}.obj: + $(AS) -MD $@.dep -o $@ $(AFLAGS) $< + +{sse_t2\}.c{$(OBJ_DIR)}.obj: $(CC) /Fo$@ /c $(CFLAGS) $< $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep -{sse\}.asm{$(OBJ_DIR)}.obj: +{sse_t2\}.asm{$(OBJ_DIR)}.obj: + $(AS) -MD $@.dep -o $@ $(AFLAGS) $< + +{sse_t3\}.c{$(OBJ_DIR)}.obj: + $(CC) /Fo$@ /c $(CFLAGS) $< + $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep + +{sse_t3\}.asm{$(OBJ_DIR)}.obj: + $(AS) -MD $@.dep -o $@ $(AFLAGS) $< + +{avx_t1\}.c{$(OBJ_DIR)}.obj: + $(CC) /arch:AVX /Fo$@ /c $(CFLAGS) $< + $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep + +{avx_t1\}.asm{$(OBJ_DIR)}.obj: + $(AS) -MD $@.dep -o $@ $(AFLAGS) $< + +{avx_t2\}.c{$(OBJ_DIR)}.obj: + $(CC) /arch:AVX /Fo$@ /c $(CFLAGS) $< + $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep + +{avx_t2\}.asm{$(OBJ_DIR)}.obj: + $(AS) -MD $@.dep -o $@ $(AFLAGS) $< + +{avx2_t1\}.c{$(OBJ_DIR)}.obj: + $(CC) /arch:AVX /Fo$@ /c $(CFLAGS) $< + $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep + +{avx2_t1\}.asm{$(OBJ_DIR)}.obj: $(AS) -MD $@.dep -o $@ $(AFLAGS) $< -{avx\}.c{$(OBJ_DIR)}.obj: +{avx2_t2\}.c{$(OBJ_DIR)}.obj: $(CC) /arch:AVX /Fo$@ /c $(CFLAGS) $< $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep -{avx\}.asm{$(OBJ_DIR)}.obj: +{avx2_t2\}.asm{$(OBJ_DIR)}.obj: $(AS) -MD $@.dep -o $@ $(AFLAGS) $< -{avx2\}.c{$(OBJ_DIR)}.obj: +{avx512_t1\}.c{$(OBJ_DIR)}.obj: $(CC) /arch:AVX /Fo$@ /c $(CFLAGS) $< $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep -{avx2\}.asm{$(OBJ_DIR)}.obj: +{avx512_t1\}.asm{$(OBJ_DIR)}.obj: $(AS) -MD $@.dep -o $@ $(AFLAGS) $< -{avx512\}.c{$(OBJ_DIR)}.obj: +{avx512_t2\}.c{$(OBJ_DIR)}.obj: $(CC) /arch:AVX /Fo$@ /c $(CFLAGS) $< $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep -{avx512\}.asm{$(OBJ_DIR)}.obj: +{avx512_t2\}.asm{$(OBJ_DIR)}.obj: $(AS) -MD $@.dep -o $@ $(AFLAGS) $< {no-aesni\}.c{$(OBJ_DIR)}.obj: diff --git a/lib/x86_64/aes_xcbc_expand_key.c b/lib/x86_64/aes_xcbc_expand_key.c index 8fe2583eea06381e51625d48417278b11c557695..1313f07d568a19920732707524046fff24c35015 100644 --- a/lib/x86_64/aes_xcbc_expand_key.c +++ b/lib/x86_64/aes_xcbc_expand_key.c @@ -31,9 +31,11 @@ #ifdef AESNI_EMU #include "include/noaesni.h" #endif -#include "asm.h" #include "include/clear_regs_mem.h" #include "include/error.h" +#include "include/arch_noaesni.h" +#include "include/arch_sse_type1.h" +#include "include/arch_avx_type1.h" static uint32_t in[4*3] = { 0x01010101, 0x01010101, 0x01010101, 0x01010101, diff --git a/lib/x86_64/alloc.c b/lib/x86_64/alloc.c index bf904b390630611f871a874028f941e779aa4c15..683c7635cb51cc04b76744f86530a59443993216 100644 --- a/lib/x86_64/alloc.c +++ b/lib/x86_64/alloc.c @@ -84,6 +84,11 @@ const struct { OOO_INFO(aes256_cmac_ooo, MB_MGR_CMAC_OOO), OOO_INFO(snow3g_uea2_ooo, MB_MGR_SNOW3G_OOO), OOO_INFO(snow3g_uia2_ooo, MB_MGR_SNOW3G_OOO), + OOO_INFO(sha_1_ooo, MB_MGR_SHA_1_OOO), + OOO_INFO(sha_224_ooo, MB_MGR_SHA_256_OOO), + OOO_INFO(sha_256_ooo, MB_MGR_SHA_256_OOO), + OOO_INFO(sha_384_ooo, MB_MGR_SHA_512_OOO), + OOO_INFO(sha_512_ooo, MB_MGR_SHA_512_OOO) }; /** @@ -160,6 +165,7 @@ static void set_ooo_mgr_road_block(IMB_MGR *mgr) * IMB_FLAG_SHANI_OFF - disable use (and detection) of SHA extensions, * currently SHANI is only available for SSE * IMB_FLAG_AESNI_OFF - disable use (and detection) of AES extensions. + * IMB_FLAG_GFNI_OFF - disable use (and detection) of Galois Field extensions. * * @param reset_mgr if 0, IMB_MGR structure is not cleared, else it is. * @@ -272,6 +278,7 @@ free_mem(void *ptr) * IMB_FLAG_SHANI_OFF - disable use (and detection) of SHA extensions, * currently SHANI is only available for SSE * IMB_FLAG_AESNI_OFF - disable use (and detection) of AES extensions. + * IMB_FLAG_GFNI_OFF - disable use (and detection) of Galois Field extensions. * * @return Pointer to allocated memory for MB_MGR structure * @retval NULL on allocation error diff --git a/lib/x86_64/chacha20_poly1305.c b/lib/x86_64/chacha20_poly1305.c index 9f5355589e139b44b8d6370743f2825b07e144ac..bad8c27859b2221b9aadb572243f3f1c9134cb90 100644 --- a/lib/x86_64/chacha20_poly1305.c +++ b/lib/x86_64/chacha20_poly1305.c @@ -162,41 +162,69 @@ void init_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, job->dst, job->msg_len_to_cipher_in_bytes, job->enc_keys, ctx, arch); } - job->status |= IMB_STATUS_COMPLETED; + job->status = IMB_STATUS_COMPLETED; } __forceinline -void update_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, - const unsigned ifma) +void update_chacha20_poly1305_direct(const void *key, + struct chacha20_poly1305_context_data *ctx, + void *dst, const void *src, + const uint64_t len, + const IMB_CIPHER_DIRECTION dir, + const IMB_ARCH arch, + const unsigned check_param, + const unsigned ifma) { - struct chacha20_poly1305_context_data *ctx = - job->u.CHACHA20_POLY1305.ctx; - uint64_t hash_len = job->msg_len_to_hash_in_bytes; +#ifdef SAFE_PARAM + if (check_param) { + /* reset error status */ + imb_set_errno(NULL, 0); + + if (key == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_KEY); + return; + } + if (ctx == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_CTX); + return; + } + if (src == NULL && len != 0) { + imb_set_errno(NULL, IMB_ERR_NULL_SRC); + return; + } + if (dst == NULL && len != 0) { + imb_set_errno(NULL, IMB_ERR_NULL_DST); + return; + } + } +#else + (void) check_param; +#endif uint64_t bytes_to_copy = 0; uint64_t remain_bytes_to_fill = (16 - ctx->remain_ct_bytes); uint64_t remain_ct_bytes; const uint8_t *remain_ct_ptr; + const uint8_t *src8 = (const uint8_t *) src; + uint8_t *dst8 = (uint8_t *) dst; + uint64_t length = len; /* Need to copy more bytes into scratchpad */ if ((ctx->remain_ct_bytes > 0) && (remain_bytes_to_fill > 0)) { - if (hash_len < remain_bytes_to_fill) - bytes_to_copy = hash_len; + if (len < remain_bytes_to_fill) + bytes_to_copy = length; else bytes_to_copy = remain_bytes_to_fill; } /* Increment total hash length */ - ctx->hash_len += job->msg_len_to_hash_in_bytes; + ctx->hash_len += length; - if (job->cipher_direction == IMB_DIR_ENCRYPT) { - chacha20_enc_dec_ks(job->src + - job->cipher_start_src_offset_in_bytes, - job->dst, job->msg_len_to_cipher_in_bytes, - job->enc_keys, ctx, arch); + if (dir == IMB_DIR_ENCRYPT) { + chacha20_enc_dec_ks(src, dst, length, key, ctx, arch); /* Copy more bytes on Poly scratchpad */ memcpy_asm(ctx->poly_scratch + ctx->remain_ct_bytes, - job->dst, bytes_to_copy, arch); + dst, bytes_to_copy, arch); ctx->remain_ct_bytes += bytes_to_copy; /* @@ -209,16 +237,16 @@ void update_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, ctx->remain_ct_bytes = 0; } - hash_len -= bytes_to_copy; - remain_ct_bytes = hash_len & HASH_REMAIN_CLAMP; - hash_len &= hash_len & HASH_LEN_CLAMP; + length -= bytes_to_copy; + remain_ct_bytes = length & HASH_REMAIN_CLAMP; + length &= HASH_LEN_CLAMP; /* compute hash after cipher on encrypt */ - poly1305_aead_update(job->dst + bytes_to_copy, - hash_len, ctx->hash, ctx->poly_key, arch, + poly1305_aead_update(dst8 + bytes_to_copy, + length, ctx->hash, ctx->poly_key, arch, ifma); - remain_ct_ptr = job->dst + bytes_to_copy + hash_len; + remain_ct_ptr = dst8 + bytes_to_copy + length; /* copy last bytes of ciphertext (less than 16 bytes) */ memcpy_asm(ctx->poly_scratch, remain_ct_ptr, remain_ct_bytes, arch); @@ -226,8 +254,7 @@ void update_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, } else { /* Copy more bytes on Poly scratchpad */ memcpy_asm(ctx->poly_scratch + ctx->remain_ct_bytes, - job->src + job->hash_start_src_offset_in_bytes, - bytes_to_copy, arch); + src, bytes_to_copy, arch); ctx->remain_ct_bytes += bytes_to_copy; /* @@ -240,28 +267,37 @@ void update_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, ctx->remain_ct_bytes = 0; } - hash_len -= bytes_to_copy; - remain_ct_bytes = hash_len & HASH_REMAIN_CLAMP; - hash_len &= hash_len & HASH_LEN_CLAMP; + length -= bytes_to_copy; + remain_ct_bytes = length & HASH_REMAIN_CLAMP; + length &= HASH_LEN_CLAMP; /* compute hash first on decrypt */ - poly1305_aead_update(job->src + - job->hash_start_src_offset_in_bytes + - bytes_to_copy, hash_len, ctx->hash, ctx->poly_key, - arch, ifma); + poly1305_aead_update(src8 + bytes_to_copy, length, ctx->hash, + ctx->poly_key, arch, ifma); - remain_ct_ptr = job->src + job->hash_start_src_offset_in_bytes - + bytes_to_copy + hash_len; + remain_ct_ptr = src8 + bytes_to_copy + length; /* copy last bytes of ciphertext (less than 16 bytes) */ memcpy_asm(ctx->poly_scratch, remain_ct_ptr, remain_ct_bytes, arch); ctx->remain_ct_bytes += remain_ct_bytes; - chacha20_enc_dec_ks(job->src + - job->cipher_start_src_offset_in_bytes, - job->dst, job->msg_len_to_cipher_in_bytes, - job->enc_keys, ctx, arch); + chacha20_enc_dec_ks(src, dst, len, key, ctx, arch); } - job->status |= IMB_STATUS_COMPLETED; +} + +__forceinline +void update_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, + const unsigned ifma) +{ + update_chacha20_poly1305_direct(job->enc_keys, + job->u.CHACHA20_POLY1305.ctx, + job->dst, + job->src + + job->cipher_start_src_offset_in_bytes, + job->msg_len_to_cipher_in_bytes, + job->cipher_direction, + arch, + 0, ifma); + job->status = IMB_STATUS_COMPLETED; } __forceinline @@ -370,13 +406,136 @@ void complete_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, clear_mem(ctx->last_ks, sizeof(ctx->last_ks)); clear_mem(ctx->poly_key, sizeof(ctx->poly_key)); #endif - job->status |= IMB_STATUS_COMPLETED; + job->status = IMB_STATUS_COMPLETED; +} + +__forceinline +void init_chacha20_poly1305_direct(const void *key, + struct chacha20_poly1305_context_data *ctx, + const void *iv, const void *aad, + const uint64_t aad_len, const IMB_ARCH arch, + const unsigned check_params, + const unsigned ifma) +{ +#ifdef SAFE_PARAM + if (check_params) { + /* reset error status */ + imb_set_errno(NULL, 0); + + if (key == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_KEY); + return; + } + if (ctx == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_CTX); + return; + } + if (iv == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_IV); + return; + } + if (aad == NULL && aad_len != 0) { + imb_set_errno(NULL, IMB_ERR_NULL_AAD); + return; + } + } +#else + (void) check_params; +#endif + ctx->hash[0] = 0; + ctx->hash[1] = 0; + ctx->hash[2] = 0; + ctx->aad_len = aad_len; + ctx->hash_len = 0; + ctx->last_block_count = 0; + ctx->remain_ks_bytes = 0; + ctx->remain_ct_bytes = 0; + + /* Store IV */ + memcpy_asm(ctx->IV, iv, 12, arch); + + /* Generate Poly key */ + if (arch == IMB_ARCH_SSE) + poly1305_key_gen_sse(key, iv, ctx->poly_key); + else + poly1305_key_gen_avx(key, iv, ctx->poly_key); + + /* Calculate hash over AAD */ + poly1305_aead_update(aad, aad_len, ctx->hash, ctx->poly_key, + arch, ifma); +} + +__forceinline +void +finalize_chacha20_poly1305_direct(struct chacha20_poly1305_context_data *ctx, + void *tag, const uint64_t tag_len, + const IMB_ARCH arch, + const unsigned check_params, + const unsigned ifma) +{ +#ifdef SAFE_PARAM + if (check_params) { + /* reset error status */ + imb_set_errno(NULL, 0); + + if (ctx == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_CTX); + return; + } + if (tag == NULL) { + imb_set_errno(NULL, IMB_ERR_NULL_AUTH); + return; + } + if (tag_len == 0 || tag_len > 16) { + imb_set_errno(NULL, IMB_ERR_AUTH_TAG_LEN); + return; + } + } +#else + (void) check_params; +#endif + uint64_t last[2]; + uint8_t auth_tag[16]; + + if (ctx->remain_ct_bytes > 0) { + poly1305_aead_update(ctx->poly_scratch, + ctx->remain_ct_bytes, + ctx->hash, + ctx->poly_key, + arch, ifma); + ctx->remain_ct_bytes = 0; + } + + /* + * Construct extra block with AAD and message lengths for + * authentication + */ + last[0] = ctx->aad_len; + last[1] = ctx->hash_len; + poly1305_aead_update(last, sizeof(last), ctx->hash, ctx->poly_key, + arch, ifma); + + /* Finalize AEAD Poly1305 (final reduction and +S) */ + poly1305_aead_complete(ctx->hash, ctx->poly_key, auth_tag, arch, ifma); + + /* Copy N bytes of tag */ + memcpy_asm((uint8_t *) tag, auth_tag, tag_len, arch); + + /* Clear sensitive data from the context */ +#ifdef SAFE_DATA + clear_mem(ctx->last_ks, sizeof(ctx->last_ks)); + clear_mem(ctx->poly_key, sizeof(ctx->poly_key)); +#endif } __forceinline IMB_JOB *aead_chacha20_poly1305_sgl(IMB_JOB *job, const IMB_ARCH arch, const unsigned ifma) { + unsigned i; + struct chacha20_poly1305_context_data *ctx = + job->u.CHACHA20_POLY1305.ctx; + switch (job->sgl_state) { case IMB_SGL_INIT: init_chacha20_poly1305(job, arch, ifma); @@ -385,8 +544,35 @@ IMB_JOB *aead_chacha20_poly1305_sgl(IMB_JOB *job, const IMB_ARCH arch, update_chacha20_poly1305(job, arch, ifma); break; case IMB_SGL_COMPLETE: - default: complete_chacha20_poly1305(job, arch, ifma); + break; + case IMB_SGL_ALL: + default: + init_chacha20_poly1305_direct(job->enc_keys, + ctx, + job->iv, job->u.CHACHA20_POLY1305.aad, + job->u.CHACHA20_POLY1305.aad_len_in_bytes, + arch, 0, ifma); + for (i = 0; i < job->num_sgl_io_segs; i++) + update_chacha20_poly1305_direct(job->enc_keys, + ctx, + job->sgl_io_segs[i].out, + job->sgl_io_segs[i].in, + job->sgl_io_segs[i].len, + job->cipher_direction, + arch, + 0, ifma); + + finalize_chacha20_poly1305_direct(ctx, + job->auth_tag_output, + job->auth_tag_output_len_in_bytes, + arch, 0, ifma); + /* Clear sensitive data from the context */ +#ifdef SAFE_DATA + clear_mem(ctx->last_ks, sizeof(ctx->last_ks)); + clear_mem(ctx->poly_key, sizeof(ctx->poly_key)); +#endif + job->status = IMB_STATUS_COMPLETED; } return job; @@ -476,6 +662,7 @@ IMB_JOB *aead_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, for Poly key */ submit_job_chacha20_poly_dec_avx512(job, ks + 64, len_to_gen - 64); + break; } } @@ -490,7 +677,7 @@ IMB_JOB *aead_chacha20_poly1305(IMB_JOB *job, const IMB_ARCH arch, /* Finalize AEAD Poly1305 (final reduction and +S) */ poly1305_aead_complete(hash, ks, job->auth_tag_output, arch, ifma); - job->status |= IMB_STATUS_COMPLETED; + job->status = IMB_STATUS_COMPLETED; return job; } @@ -555,57 +742,6 @@ IMB_JOB *aead_chacha20_poly1305_sgl_avx512(IMB_MGR *mgr, IMB_JOB *job) return aead_chacha20_poly1305_sgl(job, IMB_ARCH_AVX512, 0); } -__forceinline -void init_chacha20_poly1305_direct(const void *key, - struct chacha20_poly1305_context_data *ctx, - const void *iv, const void *aad, - const uint64_t aad_len, const IMB_ARCH arch, - const unsigned ifma) -{ -#ifdef SAFE_PARAM - /* reset error status */ - imb_set_errno(NULL, 0); - - if (key == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_KEY); - return; - } - if (ctx == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_CTX); - return; - } - if (iv == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_IV); - return; - } - if (aad == NULL && aad_len != 0) { - imb_set_errno(NULL, IMB_ERR_NULL_AAD); - return; - } -#endif - ctx->hash[0] = 0; - ctx->hash[1] = 0; - ctx->hash[2] = 0; - ctx->aad_len = aad_len; - ctx->hash_len = 0; - ctx->last_block_count = 0; - ctx->remain_ks_bytes = 0; - ctx->remain_ct_bytes = 0; - - /* Store IV */ - memcpy_asm(ctx->IV, iv, 12, arch); - - /* Generate Poly key */ - if (arch == IMB_ARCH_SSE) - poly1305_key_gen_sse(key, iv, ctx->poly_key); - else - poly1305_key_gen_avx(key, iv, ctx->poly_key); - - /* Calculate hash over AAD */ - poly1305_aead_update(aad, aad_len, ctx->hash, ctx->poly_key, - arch, ifma); -} - IMB_DLL_LOCAL void init_chacha20_poly1305_sse(const void *key, struct chacha20_poly1305_context_data *ctx, @@ -613,7 +749,7 @@ void init_chacha20_poly1305_sse(const void *key, const uint64_t aad_len) { init_chacha20_poly1305_direct(key, ctx, iv, aad, - aad_len, IMB_ARCH_SSE, 0); + aad_len, IMB_ARCH_SSE, 1, 0); } IMB_DLL_LOCAL @@ -623,7 +759,7 @@ void init_chacha20_poly1305_avx(const void *key, const uint64_t aad_len) { init_chacha20_poly1305_direct(key, ctx, iv, aad, - aad_len, IMB_ARCH_AVX, 0); + aad_len, IMB_ARCH_AVX, 1, 0); } IMB_DLL_LOCAL @@ -633,7 +769,7 @@ void init_chacha20_poly1305_avx512(const void *key, const uint64_t aad_len) { init_chacha20_poly1305_direct(key, ctx, iv, aad, - aad_len, IMB_ARCH_AVX512, 0); + aad_len, IMB_ARCH_AVX512, 1, 0); } IMB_DLL_LOCAL @@ -643,121 +779,7 @@ void init_chacha20_poly1305_fma_avx512(const void *key, const uint64_t aad_len) { init_chacha20_poly1305_direct(key, ctx, iv, aad, - aad_len, IMB_ARCH_AVX512, 1); -} - -__forceinline -void update_chacha20_poly1305_direct(const void *key, - struct chacha20_poly1305_context_data *ctx, - void *dst, const void *src, - const uint64_t len, - const IMB_CIPHER_DIRECTION dir, - const IMB_ARCH arch, - const unsigned ifma) -{ -#ifdef SAFE_PARAM - /* reset error status */ - imb_set_errno(NULL, 0); - - if (key == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_KEY); - return; - } - if (ctx == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_CTX); - return; - } - if (src == NULL && len != 0) { - imb_set_errno(NULL, IMB_ERR_NULL_SRC); - return; - } - if (dst == NULL && len != 0) { - imb_set_errno(NULL, IMB_ERR_NULL_DST); - return; - } -#endif - uint64_t bytes_to_copy = 0; - uint64_t remain_bytes_to_fill = (16 - ctx->remain_ct_bytes); - uint64_t remain_ct_bytes; - const uint8_t *remain_ct_ptr; - const uint8_t *src8 = (const uint8_t *) src; - uint8_t *dst8 = (uint8_t *) dst; - uint64_t length = len; - - /* Need to copy more bytes into scratchpad */ - if ((ctx->remain_ct_bytes > 0) && (remain_bytes_to_fill > 0)) { - if (len < remain_bytes_to_fill) - bytes_to_copy = length; - else - bytes_to_copy = remain_bytes_to_fill; - } - - /* Increment total hash length */ - ctx->hash_len += length; - - if (dir == IMB_DIR_ENCRYPT) { - chacha20_enc_dec_ks(src, dst, length, key, ctx, arch); - - /* Copy more bytes on Poly scratchpad */ - memcpy_asm(ctx->poly_scratch + ctx->remain_ct_bytes, - dst, bytes_to_copy, arch); - ctx->remain_ct_bytes += bytes_to_copy; - - /* - * Compute hash on remaining bytes of previous segment and - * first bytes of this segment (if there are 16 bytes) - */ - if (ctx->remain_ct_bytes == 16) { - poly1305_aead_update(ctx->poly_scratch, 16, ctx->hash, - ctx->poly_key, arch, ifma); - ctx->remain_ct_bytes = 0; - } - - length -= bytes_to_copy; - remain_ct_bytes = length & HASH_REMAIN_CLAMP; - length &= length & HASH_LEN_CLAMP; - - /* compute hash after cipher on encrypt */ - poly1305_aead_update(dst8 + bytes_to_copy, - length, ctx->hash, ctx->poly_key, arch, - ifma); - - remain_ct_ptr = dst8 + bytes_to_copy + length; - /* copy last bytes of ciphertext (less than 16 bytes) */ - memcpy_asm(ctx->poly_scratch, remain_ct_ptr, remain_ct_bytes, - arch); - ctx->remain_ct_bytes += remain_ct_bytes; - } else { - /* Copy more bytes on Poly scratchpad */ - memcpy_asm(ctx->poly_scratch + ctx->remain_ct_bytes, - src, bytes_to_copy, arch); - ctx->remain_ct_bytes += bytes_to_copy; - - /* - * Compute hash on remaining bytes of previous segment and - * first bytes of this segment (if there are 16 bytes) - */ - if (ctx->remain_ct_bytes == 16) { - poly1305_aead_update(ctx->poly_scratch, 16, ctx->hash, - ctx->poly_key, arch, ifma); - ctx->remain_ct_bytes = 0; - } - - length -= bytes_to_copy; - remain_ct_bytes = length & HASH_REMAIN_CLAMP; - length &= length & HASH_LEN_CLAMP; - - /* compute hash first on decrypt */ - poly1305_aead_update(src8 + bytes_to_copy, length, ctx->hash, - ctx->poly_key, arch, ifma); - - remain_ct_ptr = src8 + bytes_to_copy + length; - /* copy last bytes of ciphertext (less than 16 bytes) */ - memcpy_asm(ctx->poly_scratch, remain_ct_ptr, remain_ct_bytes, - arch); - ctx->remain_ct_bytes += remain_ct_bytes; - chacha20_enc_dec_ks(src, dst, len, key, ctx, arch); - } + aad_len, IMB_ARCH_AVX512, 1, 1); } void update_enc_chacha20_poly1305_sse(const void *key, @@ -766,7 +788,7 @@ void update_enc_chacha20_poly1305_sse(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_ENCRYPT, IMB_ARCH_SSE, 0); + IMB_DIR_ENCRYPT, IMB_ARCH_SSE, 1, 0); } void update_enc_chacha20_poly1305_avx(const void *key, @@ -775,7 +797,7 @@ void update_enc_chacha20_poly1305_avx(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_ENCRYPT, IMB_ARCH_AVX, 0); + IMB_DIR_ENCRYPT, IMB_ARCH_AVX, 1, 0); } void update_enc_chacha20_poly1305_avx2(const void *key, @@ -784,7 +806,7 @@ void update_enc_chacha20_poly1305_avx2(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_ENCRYPT, IMB_ARCH_AVX2, 0); + IMB_DIR_ENCRYPT, IMB_ARCH_AVX2, 1, 0); } @@ -794,7 +816,7 @@ void update_enc_chacha20_poly1305_avx512(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_ENCRYPT, IMB_ARCH_AVX512, 0); + IMB_DIR_ENCRYPT, IMB_ARCH_AVX512, 1, 0); } void update_enc_chacha20_poly1305_fma_avx512(const void *key, @@ -803,7 +825,7 @@ void update_enc_chacha20_poly1305_fma_avx512(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_ENCRYPT, IMB_ARCH_AVX512, 1); + IMB_DIR_ENCRYPT, IMB_ARCH_AVX512, 1, 1); } void update_dec_chacha20_poly1305_sse(const void *key, @@ -812,7 +834,7 @@ void update_dec_chacha20_poly1305_sse(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_DECRYPT, IMB_ARCH_SSE, 0); + IMB_DIR_DECRYPT, IMB_ARCH_SSE, 1, 0); } void update_dec_chacha20_poly1305_avx(const void *key, @@ -821,7 +843,7 @@ void update_dec_chacha20_poly1305_avx(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_DECRYPT, IMB_ARCH_AVX, 0); + IMB_DIR_DECRYPT, IMB_ARCH_AVX, 1, 0); } void update_dec_chacha20_poly1305_avx2(const void *key, @@ -830,7 +852,7 @@ void update_dec_chacha20_poly1305_avx2(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_DECRYPT, IMB_ARCH_AVX2, 0); + IMB_DIR_DECRYPT, IMB_ARCH_AVX2, 1, 0); } void update_dec_chacha20_poly1305_avx512(const void *key, @@ -839,7 +861,7 @@ void update_dec_chacha20_poly1305_avx512(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_DECRYPT, IMB_ARCH_AVX512, 0); + IMB_DIR_DECRYPT, IMB_ARCH_AVX512, 1, 0); } void update_dec_chacha20_poly1305_fma_avx512(const void *key, @@ -848,76 +870,19 @@ void update_dec_chacha20_poly1305_fma_avx512(const void *key, const uint64_t len) { update_chacha20_poly1305_direct(key, ctx, dst, src, len, - IMB_DIR_DECRYPT, IMB_ARCH_AVX512, 1); -} - -__forceinline -void -finalize_chacha20_poly1305_direct(struct chacha20_poly1305_context_data *ctx, - void *tag, const uint64_t tag_len, - const IMB_ARCH arch, const unsigned ifma) -{ -#ifdef SAFE_PARAM - /* reset error status */ - imb_set_errno(NULL, 0); - - if (ctx == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_CTX); - return; - } - if (tag == NULL) { - imb_set_errno(NULL, IMB_ERR_NULL_AUTH); - return; - } - if (tag_len == 0 || tag_len > 16) { - imb_set_errno(NULL, IMB_ERR_AUTH_TAG_LEN); - return; - } -#endif - uint64_t last[2]; - uint8_t auth_tag[16]; - - if (ctx->remain_ct_bytes > 0) { - poly1305_aead_update(ctx->poly_scratch, - ctx->remain_ct_bytes, - ctx->hash, - ctx->poly_key, - arch, ifma); - ctx->remain_ct_bytes = 0; - } - - /* - * Construct extra block with AAD and message lengths for - * authentication - */ - last[0] = ctx->aad_len; - last[1] = ctx->hash_len; - poly1305_aead_update(last, sizeof(last), ctx->hash, ctx->poly_key, - arch, ifma); - - /* Finalize AEAD Poly1305 (final reduction and +S) */ - poly1305_aead_complete(ctx->hash, ctx->poly_key, auth_tag, arch, ifma); - - /* Copy N bytes of tag */ - memcpy_asm((uint8_t *) tag, auth_tag, tag_len, arch); - - /* Clear sensitive data from the context */ -#ifdef SAFE_DATA - clear_mem(ctx->last_ks, sizeof(ctx->last_ks)); - clear_mem(ctx->poly_key, sizeof(ctx->poly_key)); -#endif + IMB_DIR_DECRYPT, IMB_ARCH_AVX512, 1, 1); } void finalize_chacha20_poly1305_sse(struct chacha20_poly1305_context_data *ctx, void *tag, const uint64_t tag_len) { - finalize_chacha20_poly1305_direct(ctx, tag, tag_len, IMB_ARCH_SSE, 0); + finalize_chacha20_poly1305_direct(ctx, tag, tag_len, IMB_ARCH_SSE, 1, 0); } void finalize_chacha20_poly1305_avx(struct chacha20_poly1305_context_data *ctx, void *tag, const uint64_t tag_len) { - finalize_chacha20_poly1305_direct(ctx, tag, tag_len, IMB_ARCH_AVX, 0); + finalize_chacha20_poly1305_direct(ctx, tag, tag_len, IMB_ARCH_AVX, 1, 0); } void finalize_chacha20_poly1305_avx512( @@ -925,7 +890,7 @@ void finalize_chacha20_poly1305_avx512( void *tag, const uint64_t tag_len) { finalize_chacha20_poly1305_direct(ctx, tag, tag_len, - IMB_ARCH_AVX512, 0); + IMB_ARCH_AVX512, 1, 0); } void finalize_chacha20_poly1305_fma_avx512( @@ -933,5 +898,5 @@ void finalize_chacha20_poly1305_fma_avx512( void *tag, const uint64_t tag_len) { finalize_chacha20_poly1305_direct(ctx, tag, tag_len, - IMB_ARCH_AVX512, 1); + IMB_ARCH_AVX512, 1, 1); } diff --git a/lib/x86_64/cpu_feature.c b/lib/x86_64/cpu_feature.c index 185a9dd055d0e386baecc38e65a2db17d4d72ec1..5248853bc8eadecd280c4aaa8be200f6aafd5ce3 100644 --- a/lib/x86_64/cpu_feature.c +++ b/lib/x86_64/cpu_feature.c @@ -250,6 +250,9 @@ uint64_t cpu_feature_adjust(const uint64_t flags, uint64_t features) if (flags & IMB_FLAG_AESNI_OFF) features &= ~IMB_FEATURE_AESNI; + if (flags & IMB_FLAG_GFNI_OFF) + features &= ~IMB_FEATURE_GFNI; + return features; } diff --git a/lib/x86_64/error.c b/lib/x86_64/error.c index 7f9e83fb8aa84cc739bfa6aca5550f19eb067323..b9b9226c9d3ff8f62e45a92dabd2c84f18ca062f 100644 --- a/lib/x86_64/error.c +++ b/lib/x86_64/error.c @@ -83,7 +83,16 @@ IMB_DLL_LOCAL const int imb_errno_types[] = { IMB_ERR_JOB_NULL_HMAC_IPAD, IMB_ERR_JOB_NULL_XCBC_K1_EXP, IMB_ERR_JOB_NULL_XCBC_K2, - IMB_ERR_JOB_NULL_XCBC_K3 + IMB_ERR_JOB_NULL_XCBC_K3, + IMB_ERR_JOB_CIPH_DIR, + IMB_ERR_JOB_NULL_GHASH_INIT_TAG, + IMB_ERR_MISSING_CPUFLAGS_INIT_MGR, + IMB_ERR_NULL_JOB, + IMB_ERR_QUEUE_SPACE, + IMB_ERR_NULL_BURST, + IMB_ERR_BURST_SIZE, + IMB_ERR_BURST_OOO, + IMB_ERR_SELFTEST }; #ifdef DEBUG @@ -162,6 +171,8 @@ imb_get_strerror(int errnum) return "Null pointer to XCBC K2"; case IMB_ERR_JOB_NULL_XCBC_K3: return "Null pointer to XCBC K3"; + case IMB_ERR_JOB_NULL_GHASH_INIT_TAG: + return "Null pointer to GHASH initial tag value"; case IMB_ERR_NULL_SRC: return "Null source pointer (direct API)"; case IMB_ERR_NULL_DST: @@ -198,6 +209,23 @@ imb_get_strerror(int errnum) return "Null pointer to context (direct API)"; case IMB_ERR_NO_AESNI_EMU: return "No AESNI emulation support"; + case IMB_ERR_JOB_CIPH_DIR: + return "Invalid cipher direction"; + case IMB_ERR_MISSING_CPUFLAGS_INIT_MGR: + return "Failed to initialize IMB_MGR due to missing " + "required CPU flags"; + case IMB_ERR_NULL_JOB: + return "NULL job pointer"; + case IMB_ERR_QUEUE_SPACE: + return "Not enough space in job queue"; + case IMB_ERR_NULL_BURST: + return "NULL pointer to burst job array"; + case IMB_ERR_BURST_SIZE: + return "Invalid burst size"; + case IMB_ERR_BURST_OOO: + return "Burst jobs out of order"; + case IMB_ERR_SELFTEST: + return "Self-test failed"; default: return strerror(errnum); } diff --git a/lib/x86_64/mb_mgr_auto.c b/lib/x86_64/mb_mgr_auto.c index 5b56b96c0b2069c918cdbdd5b852a6829db60b7e..ccf4a18acc3fe467adf41aac2e7fe74112af7898 100644 --- a/lib/x86_64/mb_mgr_auto.c +++ b/lib/x86_64/mb_mgr_auto.c @@ -41,18 +41,6 @@ void init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch) { IMB_ARCH arch_detected = IMB_ARCH_NONE; -#ifdef AESNI_EMU - const uint64_t detect_no_aesni = - IMB_FEATURE_SSE4_2 | IMB_FEATURE_CMOV; -#endif - const uint64_t detect_sse = - IMB_FEATURE_SSE4_2 | IMB_FEATURE_CMOV | - IMB_FEATURE_AESNI | IMB_FEATURE_PCLMULQDQ; - const uint64_t detect_avx = - IMB_FEATURE_AVX | IMB_FEATURE_CMOV | IMB_FEATURE_AESNI; - const uint64_t detect_avx2 = IMB_FEATURE_AVX2 | detect_avx; - const uint64_t detect_avx512 = IMB_FEATURE_AVX512_SKX | detect_avx2; - /* reset error status */ imb_set_errno(state, 0); @@ -62,31 +50,31 @@ init_mb_mgr_auto(IMB_MGR *state, IMB_ARCH *arch) return; } #endif - if ((state->features & detect_avx512) == detect_avx512) { + if ((state->features & IMB_CPUFLAGS_AVX512) == IMB_CPUFLAGS_AVX512) { init_mb_mgr_avx512(state); arch_detected = IMB_ARCH_AVX512; goto init_mb_mgr_auto_ret; } - if ((state->features & detect_avx2) == detect_avx2) { + if ((state->features & IMB_CPUFLAGS_AVX2) == IMB_CPUFLAGS_AVX2) { init_mb_mgr_avx2(state); arch_detected = IMB_ARCH_AVX2; goto init_mb_mgr_auto_ret; } - if ((state->features & detect_avx) == detect_avx) { + if ((state->features & IMB_CPUFLAGS_AVX) == IMB_CPUFLAGS_AVX) { init_mb_mgr_avx(state); arch_detected = IMB_ARCH_AVX; goto init_mb_mgr_auto_ret; } - if ((state->features & detect_sse) == detect_sse) { + if ((state->features & IMB_CPUFLAGS_SSE) == IMB_CPUFLAGS_SSE) { init_mb_mgr_sse(state); arch_detected = IMB_ARCH_SSE; goto init_mb_mgr_auto_ret; } #ifdef AESNI_EMU - if ((state->features & detect_no_aesni) == detect_no_aesni) { + if ((state->features & IMB_CPUFLAGS_NO_AESNI) == IMB_CPUFLAGS_NO_AESNI) { init_mb_mgr_sse_no_aesni(state); arch_detected = IMB_ARCH_NOAESNI; goto init_mb_mgr_auto_ret; diff --git a/lib/x86_64/ooo_mgr_reset.c b/lib/x86_64/ooo_mgr_reset.c new file mode 100644 index 0000000000000000000000000000000000000000..5224ea920e71510446c931a79555fdd1a02cfc7f --- /dev/null +++ b/lib/x86_64/ooo_mgr_reset.c @@ -0,0 +1,417 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include + +#include "ipsec-mb.h" +#include "include/ipsec_ooo_mgr.h" +#include "include/ooo_mgr_reset.h" +#include /* offsetof() */ + +IMB_DLL_LOCAL +void ooo_mgr_aes_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_AES_OOO *p_mgr = (MB_MGR_AES_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_AES_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + if (num_lanes == 4) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == 8) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == 12) { + /* CBCS only */ + const size_t set_0xff_size = + sizeof(p_mgr->lens64) - (12 * sizeof(p_mgr->lens64[0])); + + p_mgr->unused_lanes = 0xBA9876543210; + memset(&p_mgr->lens64[12], 0xFF, set_0xff_size); + } else if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_docsis_aes_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_DOCSIS_AES_OOO *p_mgr = (MB_MGR_DOCSIS_AES_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_DOCSIS_AES_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + if (num_lanes == 4) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == 8) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_cmac_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_CMAC_OOO *p_mgr = (MB_MGR_CMAC_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_CMAC_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + if (num_lanes == 4) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == 8) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_ccm_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_CCM_OOO *p_mgr = (MB_MGR_CCM_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_CCM_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + if (num_lanes == 4) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == 8) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_aes_xcbc_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_AES_XCBC_OOO *p_mgr = (MB_MGR_AES_XCBC_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_AES_XCBC_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) + p_mgr->ldata[i].final_block[16] = 0x80; + + if (num_lanes == 4) + p_mgr->unused_lanes = 0xFF03020100; + else if (num_lanes == 8) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha1_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_HMAC_SHA_1_OOO *p_mgr = (MB_MGR_HMAC_SHA_1_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_HMAC_SHA_1_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) { + p_mgr->ldata[i].extra_block[IMB_SHA1_BLOCK_SIZE] = 0x80; + + p_mgr->ldata[i].outer_block[IMB_SHA1_DIGEST_SIZE_IN_BYTES] = + 0x80; + p_mgr->ldata[i].outer_block[IMB_SHA1_BLOCK_SIZE - 2] = 0x02; + p_mgr->ldata[i].outer_block[IMB_SHA1_BLOCK_SIZE - 1] = 0xa0; + } + + IMB_ASSERT(AVX_NUM_SHA1_LANES == SSE_NUM_SHA1_LANES); + + if (num_lanes == 2) + p_mgr->unused_lanes = 0xFF0100; /* SHANI */ + else if (num_lanes == AVX_NUM_SHA1_LANES) + p_mgr->unused_lanes = 0xFF03020100; + else if (num_lanes == AVX2_NUM_SHA1_LANES) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == AVX512_NUM_SHA1_LANES) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha224_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_HMAC_SHA_256_OOO *p_mgr = (MB_MGR_HMAC_SHA_256_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_HMAC_SHA_256_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) { + p_mgr->ldata[i].extra_block[IMB_SHA_256_BLOCK_SIZE] = 0x80; + + p_mgr->ldata[i].outer_block[IMB_SHA224_DIGEST_SIZE_IN_BYTES] = + 0x80; + p_mgr->ldata[i].outer_block[IMB_SHA_256_BLOCK_SIZE - 2] = 0x02; + p_mgr->ldata[i].outer_block[IMB_SHA_256_BLOCK_SIZE - 1] = 0xe0; + } + + IMB_ASSERT(AVX_NUM_SHA256_LANES == SSE_NUM_SHA256_LANES); + + if (num_lanes == 2) + p_mgr->unused_lanes = 0xFF0100; /* SHANI */ + else if (num_lanes == AVX_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xFF03020100; + else if (num_lanes == AVX2_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == AVX512_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha256_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_HMAC_SHA_256_OOO *p_mgr = (MB_MGR_HMAC_SHA_256_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_HMAC_SHA_256_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) { + p_mgr->ldata[i].extra_block[IMB_SHA_256_BLOCK_SIZE] = 0x80; + + p_mgr->ldata[i].outer_block[IMB_SHA256_DIGEST_SIZE_IN_BYTES] = + 0x80; + p_mgr->ldata[i].outer_block[IMB_SHA_256_BLOCK_SIZE - 2] = 0x03; + p_mgr->ldata[i].outer_block[IMB_SHA_256_BLOCK_SIZE - 1] = 0x00; + } + + IMB_ASSERT(AVX_NUM_SHA256_LANES == SSE_NUM_SHA256_LANES); + + if (num_lanes == 2) + p_mgr->unused_lanes = 0xFF0100; /* SHANI */ + else if (num_lanes == AVX_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xFF03020100; + else if (num_lanes == AVX2_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == AVX512_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha384_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_HMAC_SHA_512_OOO *p_mgr = (MB_MGR_HMAC_SHA_512_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_HMAC_SHA_512_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) { + p_mgr->ldata[i].extra_block[IMB_SHA_384_BLOCK_SIZE] = 0x80; + + p_mgr->ldata[i].outer_block[IMB_SHA384_DIGEST_SIZE_IN_BYTES] = + 0x80; + /* + * hmac outer block length always of fixed size, it is OKey + * length, a whole message block length, 1024 bits, with padding + * plus the length of the inner digest, which is 384 bits + * 1408 bits == 0x0580. The input message block needs to be + * converted to big endian within the sha implementation + * before use. + */ + p_mgr->ldata[i].outer_block[IMB_SHA_384_BLOCK_SIZE - 2] = 0x05; + p_mgr->ldata[i].outer_block[IMB_SHA_384_BLOCK_SIZE - 1] = 0x80; + } + + IMB_ASSERT(AVX_NUM_SHA512_LANES == SSE_NUM_SHA512_LANES); + + if (num_lanes == AVX_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xFF0100; + else if (num_lanes == AVX2_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xFF03020100; + else if (num_lanes == AVX512_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xF76543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_hmac_sha512_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_HMAC_SHA_512_OOO *p_mgr = (MB_MGR_HMAC_SHA_512_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_HMAC_SHA_512_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) { + p_mgr->ldata[i].extra_block[IMB_SHA_512_BLOCK_SIZE] = 0x80; + + p_mgr->ldata[i].outer_block[IMB_SHA512_DIGEST_SIZE_IN_BYTES] = + 0x80; + /* + * hmac outer block length always of fixed size, it is OKey + * length, a whole message block length, 1024 bits, with padding + * plus the length of the inner digest, which is 512 bits + * 1536 bits == 0x600. The input message block needs to be + * converted to big endian within the sha implementation + * before use. + */ + p_mgr->ldata[i].outer_block[IMB_SHA_512_BLOCK_SIZE - 2] = 0x06; + p_mgr->ldata[i].outer_block[IMB_SHA_512_BLOCK_SIZE - 1] = 0x00; + } + + IMB_ASSERT(AVX_NUM_SHA512_LANES == SSE_NUM_SHA512_LANES); + + if (num_lanes == AVX_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xFF0100; + else if (num_lanes == AVX2_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xFF03020100; + else if (num_lanes == AVX512_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xF76543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_hmac_md5_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_HMAC_MD5_OOO *p_mgr = (MB_MGR_HMAC_MD5_OOO *) p_ooo_mgr; + unsigned i; + + memset(p_mgr, 0, offsetof(MB_MGR_HMAC_MD5_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + for (i = 0; i < num_lanes; i++) { + p_mgr->ldata[i].extra_block[64] = 0x80; + + p_mgr->ldata[i].outer_block[4 * 4] = 0x80; + p_mgr->ldata[i].outer_block[64 - 7] = 0x02; + p_mgr->ldata[i].outer_block[64 - 8] = 0x80; + } + + IMB_ASSERT(AVX_NUM_MD5_LANES == SSE_NUM_MD5_LANES); + + if (num_lanes == AVX_NUM_MD5_LANES) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == AVX2_NUM_MD5_LANES) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_zuc_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_ZUC_OOO *p_mgr = (MB_MGR_ZUC_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_ZUC_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + if (num_lanes == 4) { + p_mgr->unused_lanes = 0xFF03020100; + p_mgr->unused_lane_bitmask = 0x0f; + } else if (num_lanes == 8) { + p_mgr->unused_lanes = 0xF76543210; + p_mgr->unused_lane_bitmask = 0xff; + } else if (num_lanes == 16) { + p_mgr->unused_lanes = 0xFEDCBA9876543210; + p_mgr->unused_lane_bitmask = 0xffff; + } +} + +IMB_DLL_LOCAL +void ooo_mgr_sha1_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_SHA_1_OOO *p_mgr = (MB_MGR_SHA_1_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_SHA_1_OOO,road_block)); + + if (num_lanes == 2) + p_mgr->unused_lanes = 0xF10; /* SHANI */ + else if (num_lanes == AVX_NUM_SHA1_LANES) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == AVX2_NUM_SHA1_LANES) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == AVX512_NUM_SHA1_LANES) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_sha256_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_SHA_256_OOO *p_mgr = (MB_MGR_SHA_256_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_SHA_256_OOO,road_block)); + + if (num_lanes == 2) + p_mgr->unused_lanes = 0xF10; /* SHANI */ + if (num_lanes == AVX_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == AVX2_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xF76543210; + else if (num_lanes == AVX512_NUM_SHA256_LANES) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_sha512_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_SHA_512_OOO *p_mgr = (MB_MGR_SHA_512_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_SHA_512_OOO,road_block)); + + if (num_lanes == AVX_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xF10; + else if (num_lanes == AVX2_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xF3210; + else if (num_lanes == AVX512_NUM_SHA512_LANES) + p_mgr->unused_lanes = 0xF76543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_des_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_DES_OOO *p_mgr = (MB_MGR_DES_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_DES_OOO,road_block)); + + if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} + +IMB_DLL_LOCAL +void ooo_mgr_snow3g_reset(void *p_ooo_mgr, const unsigned num_lanes) +{ + MB_MGR_SNOW3G_OOO *p_mgr = (MB_MGR_SNOW3G_OOO *) p_ooo_mgr; + + memset(p_mgr, 0, offsetof(MB_MGR_SNOW3G_OOO,road_block)); + memset(p_mgr->lens, 0xff, sizeof(p_mgr->lens)); + + if (num_lanes == 4) { + /* + * lens[0:3] indicate outstanding bytes after + * rounding up length to dwords + * - initialize to 0 + * lens[4] common min length for all lanes in dwords + * - initialize to 0 + * lens[8:11] keep lengths rounded up to dwords + * - initialize to UINT32_MAX not to interfere + * when searching for minimum length + * lens[5:7] unused + * lens[12:15] unused + */ + p_mgr->lens[8] = 0xffffffff; + p_mgr->lens[9] = 0xffffffff; + p_mgr->lens[10] = 0xffffffff; + p_mgr->lens[11] = 0xffffffff; + p_mgr->unused_lanes = 0x3210; + } else if (num_lanes == 16) + p_mgr->unused_lanes = 0xFEDCBA9876543210; +} diff --git a/lib/x86_64/self_test.c b/lib/x86_64/self_test.c new file mode 100644 index 0000000000000000000000000000000000000000..267c650c9eab0f5614ff79ef1a6f2ca99364247c --- /dev/null +++ b/lib/x86_64/self_test.c @@ -0,0 +1,1598 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + + +#include +#include + +#include "ipsec-mb.h" +#include "arch_x86_64.h" + +static int process_job(IMB_MGR *p_mgr) +{ + IMB_JOB *job = IMB_SUBMIT_JOB(p_mgr); + + if (!job) { + const int err = imb_get_errno(p_mgr); + + /* check for error */ + if (err != 0) + return 0; + + /* flush to get the job processed */ + job = IMB_FLUSH_JOB(p_mgr); + + /* if flush returns nothing then it's an error */ + if (!job) + return 0; + } + + /* if returned job is not complete then it's an error */ + if (job->status != IMB_STATUS_COMPLETED) + return 0; + + return 1; +} + +/* + * ============================================================================= + * CIPHER SELF-TEST + * ============================================================================= + */ + +struct self_test_cipher_vector { + IMB_CIPHER_MODE cipher_mode; + const uint8_t *cipher_key; + size_t cipher_key_size; /* key size in bytes */ + const uint8_t *cipher_iv; /* initialization vector */ + size_t cipher_iv_size; + const uint8_t *plain_text; + size_t plain_text_size; + const uint8_t *cipher_text; +}; + +/* + * AES-CBC Test vectors from + * http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + */ + +static const uint8_t aes_cbc_128_key[] = { + 0x2b, 0x7e, 0x15, 0x16, 0x28, 0xae, 0xd2, 0xa6, + 0xab, 0xf7, 0x15, 0x88, 0x09, 0xcf, 0x4f, 0x3c +}; +static const uint8_t aes_cbc_128_iv[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f +}; +static const uint8_t aes_cbc_128_plain_text[] = { + 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, + 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, + 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, + 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51, + 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, + 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, + 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, + 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 +}; + +static const uint8_t aes_cbc_128_cipher_text[] = { + 0x76, 0x49, 0xab, 0xac, 0x81, 0x19, 0xb2, 0x46, + 0xce, 0xe9, 0x8e, 0x9b, 0x12, 0xe9, 0x19, 0x7d, + 0x50, 0x86, 0xcb, 0x9b, 0x50, 0x72, 0x19, 0xee, + 0x95, 0xdb, 0x11, 0x3a, 0x91, 0x76, 0x78, 0xb2, + 0x73, 0xbe, 0xd6, 0xb8, 0xe3, 0xc1, 0x74, 0x3b, + 0x71, 0x16, 0xe6, 0x9e, 0x22, 0x22, 0x95, 0x16, + 0x3f, 0xf1, 0xca, 0xa1, 0x68, 0x1f, 0xac, 0x09, + 0x12, 0x0e, 0xca, 0x30, 0x75, 0x86, 0xe1, 0xa7 +}; + +static const uint8_t aes_cbc_192_key[] = { + 0x8e, 0x73, 0xb0, 0xf7, 0xda, 0x0e, 0x64, 0x52, + 0xc8, 0x10, 0xf3, 0x2b, 0x80, 0x90, 0x79, 0xe5, + 0x62, 0xf8, 0xea, 0xd2, 0x52, 0x2c, 0x6b, 0x7b +}; +static const uint8_t aes_cbc_192_iv[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f +}; +static const uint8_t aes_cbc_192_plain_text[] = { + 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, + 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, + 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, + 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51, + 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, + 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, + 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, + 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 +}; +static const uint8_t aes_cbc_192_cipher_text[] = { + 0x4f, 0x02, 0x1d, 0xb2, 0x43, 0xbc, 0x63, 0x3d, + 0x71, 0x78, 0x18, 0x3a, 0x9f, 0xa0, 0x71, 0xe8, + 0xb4, 0xd9, 0xad, 0xa9, 0xad, 0x7d, 0xed, 0xf4, + 0xe5, 0xe7, 0x38, 0x76, 0x3f, 0x69, 0x14, 0x5a, + 0x57, 0x1b, 0x24, 0x20, 0x12, 0xfb, 0x7a, 0xe0, + 0x7f, 0xa9, 0xba, 0xac, 0x3d, 0xf1, 0x02, 0xe0, + 0x08, 0xb0, 0xe2, 0x79, 0x88, 0x59, 0x88, 0x81, + 0xd9, 0x20, 0xa9, 0xe6, 0x4f, 0x56, 0x15, 0xcd +}; + +static const uint8_t aes_cbc_256_key[] = { + 0x60, 0x3d, 0xeb, 0x10, 0x15, 0xca, 0x71, 0xbe, + 0x2b, 0x73, 0xae, 0xf0, 0x85, 0x7d, 0x77, 0x81, + 0x1f, 0x35, 0x2c, 0x07, 0x3b, 0x61, 0x08, 0xd7, + 0x2d, 0x98, 0x10, 0xa3, 0x09, 0x14, 0xdf, 0xf4 +}; +static const uint8_t aes_cbc_256_iv[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f +}; +static const uint8_t aes_cbc_256_plain_text[] = { + 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, + 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, + 0xae, 0x2d, 0x8a, 0x57, 0x1e, 0x03, 0xac, 0x9c, + 0x9e, 0xb7, 0x6f, 0xac, 0x45, 0xaf, 0x8e, 0x51, + 0x30, 0xc8, 0x1c, 0x46, 0xa3, 0x5c, 0xe4, 0x11, + 0xe5, 0xfb, 0xc1, 0x19, 0x1a, 0x0a, 0x52, 0xef, + 0xf6, 0x9f, 0x24, 0x45, 0xdf, 0x4f, 0x9b, 0x17, + 0xad, 0x2b, 0x41, 0x7b, 0xe6, 0x6c, 0x37, 0x10 +}; +static const uint8_t aes_cbc_256_cipher_text[] = { + 0xf5, 0x8c, 0x4c, 0x04, 0xd6, 0xe5, 0xf1, 0xba, + 0x77, 0x9e, 0xab, 0xfb, 0x5f, 0x7b, 0xfb, 0xd6, + 0x9c, 0xfc, 0x4e, 0x96, 0x7e, 0xdb, 0x80, 0x8d, + 0x67, 0x9f, 0x77, 0x7b, 0xc6, 0x70, 0x2c, 0x7d, + 0x39, 0xf2, 0x33, 0x69, 0xa9, 0xd9, 0xba, 0xcf, + 0xa5, 0x30, 0xe2, 0x63, 0x04, 0x23, 0x14, 0x61, + 0xb2, 0xeb, 0x05, 0xe2, 0xc3, 0x9b, 0xe9, 0xfc, + 0xda, 0x6c, 0x19, 0x07, 0x8c, 0x6a, 0x9d, 0x1b +}; + +/* + * Test Vector from + * https://tools.ietf.org/html/rfc3686 + */ + +static const uint8_t aes_ctr_128_key[] = { + 0xAE, 0x68, 0x52, 0xF8, 0x12, 0x10, 0x67, 0xCC, + 0x4B, 0xF7, 0xA5, 0x76, 0x55, 0x77, 0xF3, 0x9E, +}; +static const uint8_t aes_ctr_128_iv[] = { + 0x00, 0x00, 0x00, 0x30, /* nonce */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, +}; +static const uint8_t aes_ctr_128_plain_text[] = { + 0x53, 0x69, 0x6E, 0x67, 0x6C, 0x65, 0x20, 0x62, + 0x6C, 0x6F, 0x63, 0x6B, 0x20, 0x6D, 0x73, 0x67, +}; +static const uint8_t aes_ctr_128_cipher_text[] = { + 0xE4, 0x09, 0x5D, 0x4F, 0xB7, 0xA7, 0xB3, 0x79, + 0x2D, 0x61, 0x75, 0xA3, 0x26, 0x13, 0x11, 0xB8, +}; + +static const uint8_t aes_ctr_192_key[] = { + 0x16, 0xAF, 0x5B, 0x14, 0x5F, 0xC9, 0xF5, 0x79, + 0xC1, 0x75, 0xF9, 0x3E, 0x3B, 0xFB, 0x0E, 0xED, + 0x86, 0x3D, 0x06, 0xCC, 0xFD, 0xB7, 0x85, 0x15, +}; +static const uint8_t aes_ctr_192_iv[] = { + 0x00, 0x00, 0x00, 0x48, /* nonce */ + 0x36, 0x73, 0x3C, 0x14, 0x7D, 0x6D, 0x93, 0xCB, +}; +static const uint8_t aes_ctr_192_plain_text[] = { + 0x53, 0x69, 0x6E, 0x67, 0x6C, 0x65, 0x20, 0x62, + 0x6C, 0x6F, 0x63, 0x6B, 0x20, 0x6D, 0x73, 0x67, +}; +static const uint8_t aes_ctr_192_cipher_text[] = { + 0x4B, 0x55, 0x38, 0x4F, 0xE2, 0x59, 0xC9, 0xC8, + 0x4E, 0x79, 0x35, 0xA0, 0x03, 0xCB, 0xE9, 0x28, +}; + +static const uint8_t aes_ctr_256_key[] = { + 0x77, 0x6B, 0xEF, 0xF2, 0x85, 0x1D, 0xB0, 0x6F, + 0x4C, 0x8A, 0x05, 0x42, 0xC8, 0x69, 0x6F, 0x6C, + 0x6A, 0x81, 0xAF, 0x1E, 0xEC, 0x96, 0xB4, 0xD3, + 0x7F, 0xC1, 0xD6, 0x89, 0xE6, 0xC1, 0xC1, 0x04, +}; +static const uint8_t aes_ctr_256_iv[] = { + 0x00, 0x00, 0x00, 0x60, /* nonce */ + 0xDB, 0x56, 0x72, 0xC9, 0x7A, 0xA8, 0xF0, 0xB2, +}; +static const uint8_t aes_ctr_256_plain_text[] = { + 0x53, 0x69, 0x6E, 0x67, 0x6C, 0x65, 0x20, 0x62, + 0x6C, 0x6F, 0x63, 0x6B, 0x20, 0x6D, 0x73, 0x67, +}; +static const uint8_t aes_ctr_256_cipher_text[] = { + 0x14, 0x5A, 0xD0, 0x1D, 0xBF, 0x82, 0x4E, 0xC7, + 0x56, 0x08, 0x63, 0xDC, 0x71, 0xE3, 0xE0, 0xC0, +}; + + +#define ADD_CIPHER_VECTOR(_cmode,_key,_iv,_plain,_cipher) \ + {_cmode, _key, sizeof(_key), _iv, sizeof(_iv), \ + _plain, sizeof(_plain), _cipher} + +struct self_test_cipher_vector cipher_vectors[] = { + ADD_CIPHER_VECTOR(IMB_CIPHER_CBC, aes_cbc_128_key, aes_cbc_128_iv, + aes_cbc_128_plain_text, aes_cbc_128_cipher_text), + ADD_CIPHER_VECTOR(IMB_CIPHER_CBC, aes_cbc_192_key, aes_cbc_192_iv, + aes_cbc_192_plain_text, aes_cbc_192_cipher_text), + ADD_CIPHER_VECTOR(IMB_CIPHER_CBC, aes_cbc_256_key, aes_cbc_256_iv, + aes_cbc_256_plain_text, aes_cbc_256_cipher_text), + ADD_CIPHER_VECTOR(IMB_CIPHER_CNTR, aes_ctr_128_key, aes_ctr_128_iv, + aes_ctr_128_plain_text, aes_ctr_128_cipher_text), + ADD_CIPHER_VECTOR(IMB_CIPHER_CNTR, aes_ctr_192_key, aes_ctr_192_iv, + aes_ctr_192_plain_text, aes_ctr_192_cipher_text), + ADD_CIPHER_VECTOR(IMB_CIPHER_CNTR, aes_ctr_256_key, aes_ctr_256_iv, + aes_ctr_256_plain_text, aes_ctr_256_cipher_text), +}; + +static int self_test_ciphers(IMB_MGR *p_mgr) +{ + uint8_t scratch[256]; + DECLARE_ALIGNED(uint32_t expkey_enc[4*15], 16); + DECLARE_ALIGNED(uint32_t expkey_dec[4*15], 16); + unsigned i; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (i = 0; i < IMB_DIM(cipher_vectors); i++) { + struct self_test_cipher_vector *v = &cipher_vectors[i]; + + IMB_ASSERT(v->plain_text_size <= sizeof(scratch)); + + /* message too long */ + if (v->plain_text_size > sizeof(scratch)) + return 0; + + switch (v->cipher_key_size) { + case IMB_KEY_128_BYTES: + IMB_AES_KEYEXP_128(p_mgr, v->cipher_key, + expkey_enc, expkey_dec); + break; + case IMB_KEY_192_BYTES: + IMB_AES_KEYEXP_192(p_mgr, v->cipher_key, + expkey_enc, expkey_dec); + break; + case IMB_KEY_256_BYTES: + IMB_AES_KEYEXP_256(p_mgr, v->cipher_key, + expkey_enc, expkey_dec); + break; + default: + /* invalid key size */ + return 0; + } + + /* test encrypt direction */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->src = v->plain_text; + job->dst = scratch; + job->cipher_mode = v->cipher_mode; + job->enc_keys = expkey_enc; + if (v->cipher_mode != IMB_CIPHER_CNTR) + job->dec_keys = expkey_dec; + job->key_len_in_bytes = v->cipher_key_size; + job->iv = v->cipher_iv; + job->iv_len_in_bytes = v->cipher_iv_size; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = v->plain_text_size; + + memset(scratch, 0, sizeof(scratch)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for cipher text mismatch */ + if (memcmp(scratch, v->cipher_text, v->plain_text_size)) + return 0; + + /* test decrypt direction */ + job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_NULL; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = v->cipher_text; + job->dst = scratch; + job->cipher_mode = v->cipher_mode; + job->dec_keys = expkey_dec; + if (v->cipher_mode == IMB_CIPHER_CNTR) + job->enc_keys = expkey_enc; + job->key_len_in_bytes = v->cipher_key_size; + job->iv = v->cipher_iv; + job->iv_len_in_bytes = v->cipher_iv_size; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = v->plain_text_size; + + memset(scratch, 0, sizeof(scratch)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for plain text mismatch */ + if (memcmp(scratch, v->plain_text, v->plain_text_size)) + return 0; + + } /* for(cipher_vectors) */ + + return 1; +} + +/* + * ============================================================================= + * HASH SELF-TEST + * ============================================================================= + */ + +struct self_test_hash_vector { + IMB_HASH_ALG hash_mode; + const uint8_t *hash_key; /* cmac, hmac, gmac */ + size_t hash_key_size; /* key size in bytes */ + const uint8_t *message; + size_t message_size; + const uint8_t *tag; + size_t tag_size; + const uint8_t *hash_iv; /* gmac */ + size_t hash_iv_size; +}; + +/* + * Test vectors come from this NIST document: + * + * https://csrc.nist.gov/csrc/media/projects/ + * cryptographic-standards-and-guidelines/documents/examples/sha_all.pdf + */ + +const uint8_t sha_message[] = { + 0x61, 0x62, 0x63, 0x64, 0x62, 0x63, 0x64, 0x65, + 0x63, 0x64, 0x65, 0x66, 0x64, 0x65, 0x66, 0x67, + 0x65, 0x66, 0x67, 0x68, 0x66, 0x67, 0x68, 0x69, + 0x67, 0x68, 0x69, 0x6a, 0x68, 0x69, 0x6a, 0x6b, + 0x69, 0x6a, 0x6b, 0x6c, 0x6a, 0x6b, 0x6c, 0x6d, + 0x6b, 0x6c, 0x6d, 0x6e, 0x6c, 0x6d, 0x6e, 0x6f, + 0x6d, 0x6e, 0x6f, 0x70, 0x6e, 0x6f, 0x70, 0x71 +}; + +const uint8_t sha1_digest[] = { + 0x84, 0x98, 0x3e, 0x44, + 0x1c, 0x3b, 0xd2, 0x6e, + 0xba, 0xae, 0x4a, 0xa1, + 0xf9, 0x51, 0x29, 0xe5, + 0xe5, 0x46, 0x70, 0xf1 +}; + +const uint8_t sha224_digest[] = { + 0x75, 0x38, 0x8b, 0x16, + 0x51, 0x27, 0x76, 0xcc, + 0x5d, 0xba, 0x5d, 0xa1, + 0xfd, 0x89, 0x01, 0x50, + 0xb0, 0xc6, 0x45, 0x5c, + 0xb4, 0xf5, 0x8b, 0x19, + 0x52, 0x52, 0x25, 0x25 +}; + +const uint8_t sha256_digest[] = { + 0x24, 0x8d, 0x6a, 0x61, + 0xd2, 0x06, 0x38, 0xb8, + 0xe5, 0xc0, 0x26, 0x93, + 0x0c, 0x3e, 0x60, 0x39, + 0xa3, 0x3c, 0xe4, 0x59, + 0x64, 0xff, 0x21, 0x67, + 0xf6, 0xec, 0xed, 0xd4, + 0x19, 0xdb, 0x06, 0xc1 +}; + +const uint8_t sha384_digest[] = { + 0x33, 0x91, 0xfd, 0xdd, 0xfc, 0x8d, 0xc7, 0x39, + 0x37, 0x07, 0xa6, 0x5b, 0x1b, 0x47, 0x09, 0x39, + 0x7c, 0xf8, 0xb1, 0xd1, 0x62, 0xaf, 0x05, 0xab, + 0xfe, 0x8f, 0x45, 0x0d, 0xe5, 0xf3, 0x6b, 0xc6, + 0xb0, 0x45, 0x5a, 0x85, 0x20, 0xbc, 0x4e, 0x6f, + 0x5f, 0xe9, 0x5b, 0x1f, 0xe3, 0xc8, 0x45, 0x2b +}; + +const uint8_t sha512_digest[] = { + 0x20, 0x4a, 0x8f, 0xc6, 0xdd, 0xa8, 0x2f, 0x0a, + 0x0c, 0xed, 0x7b, 0xeb, 0x8e, 0x08, 0xa4, 0x16, + 0x57, 0xc1, 0x6e, 0xf4, 0x68, 0xb2, 0x28, 0xa8, + 0x27, 0x9b, 0xe3, 0x31, 0xa7, 0x03, 0xc3, 0x35, + 0x96, 0xfd, 0x15, 0xc1, 0x3b, 0x1b, 0x07, 0xf9, + 0xaa, 0x1d, 0x3b, 0xea, 0x57, 0x78, 0x9c, 0xa0, + 0x31, 0xad, 0x85, 0xc7, 0xa7, 0x1d, 0xd7, 0x03, + 0x54, 0xec, 0x63, 0x12, 0x38, 0xca, 0x34, 0x45 +}; + +#define ADD_SHA_VECTOR(_hmode,_msg,_digest) \ + {_hmode, NULL, 0, _msg, sizeof(_msg), \ + _digest, sizeof(_digest), NULL, 0} + +/* + * Test vector from https://csrc.nist.gov/csrc/media/publications/fips/198/ + * archive/2002-03-06/documents/fips-198a.pdf + */ + +static const uint8_t hmac_sha1_key[] = { + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, + 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, + 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, + 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, + 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, + 0xa0 +}; + +static const uint8_t hmac_sha1_message[] = { + 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x23, + 0x34 +}; + +static const uint8_t hmac_sha1_digest[] = { + 0x9e, 0xa8, 0x86, 0xef, 0xe2, 0x68, 0xdb, 0xec, + 0xce, 0x42, 0x0c, 0x75 +}; + +/* + * Test vector from https://csrc.nist.gov/csrc/media/projects/ + * cryptographic-standards-and-guidelines/documents/examples/hmac_sha224.pdf + */ +static const uint8_t hmac_sha224_key[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, + 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f +}; +static const uint8_t hmac_sha224_message[] = { + 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x6b, 0x65, 0x79, 0x6c, 0x65, + 0x6e, 0x3d, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x6c, + 0x65, 0x6e +}; +static const uint8_t hmac_sha224_digest[] = { + 0xc7, 0x40, 0x5e, 0x3a, 0xe0, 0x58, 0xe8, 0xcd, + 0x30, 0xb0, 0x8b, 0x41, 0x40, 0x24, 0x85, 0x81, + 0xed, 0x17, 0x4c, 0xb3, 0x4e, 0x12, 0x24, 0xbc, + 0xc1, 0xef, 0xc8, 0x1b +}; + +/* + * Test vector from https://csrc.nist.gov/csrc/media/projects/ + * cryptographic-standards-and-guidelines/documents/examples/hmac_sha256.pdf + */ +static const uint8_t hmac_sha256_key[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, + 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f +}; +static const uint8_t hmac_sha256_message[] = { + 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x6b, 0x65, 0x79, 0x6c, 0x65, + 0x6e, 0x3d, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x6c, + 0x65, 0x6e +}; +static const uint8_t hmac_sha256_digest[] = { + 0x8b, 0xb9, 0xa1, 0xdb, 0x98, 0x06, 0xf2, 0x0d, + 0xf7, 0xf7, 0x7b, 0x82, 0x13, 0x8c, 0x79, 0x14, + 0xd1, 0x74, 0xd5, 0x9e, 0x13, 0xdc, 0x4d, 0x01, + 0x69, 0xc9, 0x05, 0x7b, 0x13, 0x3e, 0x1d, 0x62, +}; + +/* + * Test vector from https://csrc.nist.gov/csrc/media/projects/ + * cryptographic-standards-and-guidelines/documents/examples/hmac_sha384.pdf + */ +static const uint8_t hmac_sha384_key[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, + 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, + 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, + 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, + 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, + 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, + 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f +}; +static const uint8_t hmac_sha384_message[] = { + 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x6b, 0x65, 0x79, 0x6c, 0x65, + 0x6e, 0x3d, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x6c, + 0x65, 0x6e +}; +static const uint8_t hmac_sha384_digest[] = { + 0x63, 0xc5, 0xda, 0xa5, 0xe6, 0x51, 0x84, 0x7c, + 0xa8, 0x97, 0xc9, 0x58, 0x14, 0xab, 0x83, 0x0b, + 0xed, 0xed, 0xc7, 0xd2, 0x5e, 0x83, 0xee, 0xf9 +}; + +/* + * Test vector from https://csrc.nist.gov/csrc/media/projects/ + * cryptographic-standards-and-guidelines/documents/examples/hmac_sha512.pdf + */ +static const uint8_t hmac_sha512_key[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, + 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, + 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, + 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, + 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, + 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, + 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, + 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, + 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, + 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f +}; +static const uint8_t hmac_sha512_message[] = { + 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x20, 0x6d, + 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x66, + 0x6f, 0x72, 0x20, 0x6b, 0x65, 0x79, 0x6c, 0x65, + 0x6e, 0x3d, 0x62, 0x6c, 0x6f, 0x63, 0x6b, 0x6c, + 0x65, 0x6e +}; +static const uint8_t hmac_sha512_digest[] = { + 0xfc, 0x25, 0xe2, 0x40, 0x65, 0x8c, 0xa7, 0x85, + 0xb7, 0xa8, 0x11, 0xa8, 0xd3, 0xf7, 0xb4, 0xca, + 0x48, 0xcf, 0xa2, 0x6a, 0x8a, 0x36, 0x6b, 0xf2, + 0xcd, 0x1f, 0x83, 0x6b, 0x05, 0xfc, 0xb0, 0x24 +}; + +#define ADD_HMAC_SHA_VECTOR(_hmode,_key,_msg,_digest) \ + {_hmode, _key, sizeof(_key), _msg, sizeof(_msg), \ + _digest, sizeof(_digest), NULL, 0} + +/* + * 3GPP 33.401 C.2.1 Test Case 2 + */ +static const uint8_t aes_cmac_128_key[] = { + 0xd3, 0xc5, 0xd5, 0x92, 0x32, 0x7f, 0xb1, 0x1c, + 0x40, 0x35, 0xc6, 0x68, 0x0a, 0xf8, 0xc6, 0xd1 +}; + +static const uint8_t aes_cmac_128_tag[] = { + 0xb9, 0x37, 0x87, 0xe6 +}; + +static const uint8_t aes_cmac_128_message[] = { + 0x39, 0x8a, 0x59, 0xb4, 0xd4, 0x00, 0x00, 0x00, + 0x48, 0x45, 0x83, 0xd5, 0xaf, 0xe0, 0x82, 0xae +}; + +static const uint8_t aes_cmac_256_key[] = { + 0x60, 0x3D, 0xEB, 0x10, 0x15, 0xCA, 0x71, 0xBE, + 0x2B, 0x73, 0xAE, 0xF0, 0x85, 0x7D, 0x77, 0x81, + 0x1F, 0x35, 0x2C, 0x07, 0x3B, 0x61, 0x08, 0xD7, + 0x2D, 0x98, 0x10, 0xA3, 0x09, 0x14, 0xDF, 0xF4 +}; +static const uint8_t aes_cmac_256_message[] = { + 0x6b, 0xc1, 0xbe, 0xe2, 0x2e, 0x40, 0x9f, 0x96, + 0xe9, 0x3d, 0x7e, 0x11, 0x73, 0x93, 0x17, 0x2a, + 0xae, 0x2d, 0x8a, 0x57 +}; +static const uint8_t aes_cmac_256_tag[] = { + 0x15, 0x67, 0x27, 0xDC, 0x08, 0x78, 0x94, 0x4A, + 0x02, 0x3C, 0x1F, 0xE0, 0x3B, 0xAD, 0x6D, 0x93 +}; + +#define ADD_CMAC_VECTOR(_hmode,_key,_msg,_digest) \ + {_hmode, _key, sizeof(_key), _msg, sizeof(_msg), \ + _digest, sizeof(_digest), NULL, 0} + +/* + * GMAC vectors + */ +static const uint8_t aes_gmac_128_key[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F +}; +static const uint8_t aes_gmac_128_iv[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0A, 0x0B +}; +static const uint8_t aes_gmac_128_message[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, + 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, + 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, + 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, + 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, +}; +static const uint8_t aes_gmac_128_tag[] = { + 0xC5, 0x3A, 0xF9, 0xE8 +}; + +static const uint8_t aes_gmac_192_key[] = { + 0xaa, 0x74, 0x0a, 0xbf, 0xad, 0xcd, 0xa7, 0x79, + 0x22, 0x0d, 0x3b, 0x40, 0x6c, 0x5d, 0x7e, 0xc0, + 0x9a, 0x77, 0xfe, 0x9d, 0x94, 0x10, 0x45, 0x39, +}; +static const uint8_t aes_gmac_192_iv[] = { + 0xab, 0x22, 0x65, 0xb4, 0xc1, 0x68, 0x95, 0x55, + 0x61, 0xf0, 0x43, 0x15 +}; +static const uint8_t aes_gmac_192_message[] = { + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, +}; +static const uint8_t aes_gmac_192_tag[] = { + 0xCF, 0x82, 0x80, 0x64, 0x02, 0x46, 0xF4, 0xFB, + 0x33, 0xAE, 0x1D, 0x90, 0xEA, 0x48, 0x83, 0xDB +}; + +static const uint8_t aes_gmac_256_key[] = { + 0xb5, 0x48, 0xe4, 0x93, 0x4f, 0x5c, 0x64, 0xd3, + 0xc0, 0xf0, 0xb7, 0x8f, 0x7b, 0x4d, 0x88, 0x24, + 0xaa, 0xc4, 0x6b, 0x3c, 0x8d, 0x2c, 0xc3, 0x5e, + 0xe4, 0xbf, 0xb2, 0x54, 0xe4, 0xfc, 0xba, 0xf7, +}; +static const uint8_t aes_gmac_256_iv[] = { + 0x2e, 0xed, 0xe1, 0xdc, 0x64, 0x47, 0xc7, 0xaf, + 0xc4, 0x41, 0x53, 0x58, +}; +static const uint8_t aes_gmac_256_message[] = { + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, + 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10, + 0x01 +}; +static const uint8_t aes_gmac_256_tag[] = { + 0x77, 0x46, 0x0D, 0x6F, 0xB1, 0x87, 0xDB, 0xA9, + 0x46, 0xAD, 0xCD, 0xFB, 0xB7, 0xF9, 0x13, 0xA1 +}; + +#define ADD_GMAC_VECTOR(_hmode,_key,_iv,_msg,_tag) \ + {_hmode, _key, sizeof(_key), _msg, sizeof(_msg), \ + _tag, sizeof(_tag), \ + _iv, sizeof(_iv)} + +struct self_test_hash_vector hash_vectors[] = { + ADD_SHA_VECTOR(IMB_AUTH_SHA_1, sha_message, sha1_digest), + ADD_SHA_VECTOR(IMB_AUTH_SHA_224, sha_message, sha224_digest), + ADD_SHA_VECTOR(IMB_AUTH_SHA_256, sha_message, sha256_digest), + ADD_SHA_VECTOR(IMB_AUTH_SHA_384, sha_message, sha384_digest), + ADD_SHA_VECTOR(IMB_AUTH_SHA_512, sha_message, sha512_digest), + ADD_HMAC_SHA_VECTOR(IMB_AUTH_HMAC_SHA_1, hmac_sha1_key, + hmac_sha1_message, hmac_sha1_digest), + ADD_HMAC_SHA_VECTOR(IMB_AUTH_HMAC_SHA_224, hmac_sha224_key, + hmac_sha224_message, hmac_sha224_digest), + ADD_HMAC_SHA_VECTOR(IMB_AUTH_HMAC_SHA_256, hmac_sha256_key, + hmac_sha256_message, hmac_sha256_digest), + ADD_HMAC_SHA_VECTOR(IMB_AUTH_HMAC_SHA_384, hmac_sha384_key, + hmac_sha384_message, hmac_sha384_digest), + ADD_HMAC_SHA_VECTOR(IMB_AUTH_HMAC_SHA_512, hmac_sha512_key, + hmac_sha512_message, hmac_sha512_digest), + ADD_CMAC_VECTOR(IMB_AUTH_AES_CMAC, aes_cmac_128_key, + aes_cmac_128_message, aes_cmac_128_tag), + ADD_CMAC_VECTOR(IMB_AUTH_AES_CMAC_256, aes_cmac_256_key, + aes_cmac_256_message, aes_cmac_256_tag), + ADD_GMAC_VECTOR(IMB_AUTH_AES_GMAC_128, aes_gmac_128_key, + aes_gmac_128_iv, aes_gmac_128_message, + aes_gmac_128_tag), + ADD_GMAC_VECTOR(IMB_AUTH_AES_GMAC_192, aes_gmac_192_key, + aes_gmac_192_iv, aes_gmac_192_message, + aes_gmac_192_tag), + ADD_GMAC_VECTOR(IMB_AUTH_AES_GMAC_256, aes_gmac_256_key, + aes_gmac_256_iv, aes_gmac_256_message, + aes_gmac_256_tag), +}; + +static int self_test_hash(IMB_MGR *p_mgr) +{ + /* hmac */ + DECLARE_ALIGNED(uint8_t hmac_ipad[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t hmac_opad[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + /* cmac */ + DECLARE_ALIGNED(uint32_t expkey[4*15], 16); + DECLARE_ALIGNED(uint32_t dust[4*15], 16); + uint32_t skey1[4], skey2[4]; + /* gmac */ + struct gcm_key_data gmac_key; + /* all */ + uint8_t scratch[IMB_SHA_512_BLOCK_SIZE]; + unsigned i; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (i = 0; i < IMB_DIM(hash_vectors); i++) { + struct self_test_hash_vector *v = &hash_vectors[i]; + + IMB_ASSERT(v->tag_size <= sizeof(scratch)); + + /* tag too long */ + if (v->tag_size > sizeof(scratch)) + return 0; + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = v->hash_mode; + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = v->message; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->message_size; + job->auth_tag_output = scratch; + job->auth_tag_output_len_in_bytes = v->tag_size; + + if (v->hash_mode == IMB_AUTH_HMAC_SHA_1) { + /* compute IPAD and OPAD */ + unsigned j; + + IMB_ASSERT(sizeof(scratch) >= IMB_SHA1_BLOCK_SIZE); + + memset(scratch, 0x36, IMB_SHA1_BLOCK_SIZE); + for (j = 0; j < v->hash_key_size; j++) + scratch[j] ^= v->hash_key[j]; + IMB_SHA1_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + memset(scratch, 0x5c, IMB_SHA1_BLOCK_SIZE); + for (j = 0; j < v->hash_key_size; j++) + scratch[j] ^= v->hash_key[j]; + IMB_SHA1_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + } + + if (v->hash_mode == IMB_AUTH_HMAC_SHA_224 || + v->hash_mode == IMB_AUTH_HMAC_SHA_256) { + /* compute IPAD and OPAD */ + unsigned j; + + IMB_ASSERT(sizeof(scratch) >= IMB_SHA_256_BLOCK_SIZE); + + memset(scratch, 0x36, IMB_SHA_256_BLOCK_SIZE); + for (j = 0; j < v->hash_key_size; j++) + scratch[j] ^= v->hash_key[j]; + if (v->hash_mode == IMB_AUTH_HMAC_SHA_224) + IMB_SHA224_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + else + IMB_SHA256_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + memset(scratch, 0x5c, IMB_SHA_256_BLOCK_SIZE); + for (j = 0; j < v->hash_key_size; j++) + scratch[j] ^= v->hash_key[j]; + if (v->hash_mode == IMB_AUTH_HMAC_SHA_224) + IMB_SHA224_ONE_BLOCK(p_mgr, scratch, hmac_opad); + else + IMB_SHA256_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + } + + if (v->hash_mode == IMB_AUTH_HMAC_SHA_384 || + v->hash_mode == IMB_AUTH_HMAC_SHA_512) { + /* compute IPAD and OPAD */ + unsigned j; + + IMB_ASSERT(sizeof(scratch) >= IMB_SHA_512_BLOCK_SIZE); + + memset(scratch, 0x36, IMB_SHA_512_BLOCK_SIZE); + for (j = 0; j < v->hash_key_size; j++) + scratch[j] ^= v->hash_key[j]; + if (v->hash_mode == IMB_AUTH_HMAC_SHA_384) + IMB_SHA384_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + else + IMB_SHA512_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + memset(scratch, 0x5c, IMB_SHA_512_BLOCK_SIZE); + for (j = 0; j < v->hash_key_size; j++) + scratch[j] ^= v->hash_key[j]; + if (v->hash_mode == IMB_AUTH_HMAC_SHA_384) + IMB_SHA384_ONE_BLOCK(p_mgr, scratch, hmac_opad); + else + IMB_SHA512_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + } + + if (v->hash_mode == IMB_AUTH_AES_CMAC) { + IMB_AES_KEYEXP_128(p_mgr, v->hash_key, expkey, dust); + IMB_AES_CMAC_SUBKEY_GEN_128(p_mgr, expkey, skey1, skey2); + job->u.CMAC._key_expanded = expkey; + job->u.CMAC._skey1 = skey1; + job->u.CMAC._skey2 = skey2; + } + + if (v->hash_mode == IMB_AUTH_AES_CMAC_256) { + IMB_AES_KEYEXP_256(p_mgr, v->hash_key, expkey, dust); + IMB_AES_CMAC_SUBKEY_GEN_256(p_mgr, expkey, skey1, skey2); + job->u.CMAC._key_expanded = expkey; + job->u.CMAC._skey1 = skey1; + job->u.CMAC._skey2 = skey2; + } + + if (v->hash_mode == IMB_AUTH_AES_GMAC_128) { + IMB_AES128_GCM_PRE(p_mgr, v->hash_key, &gmac_key); + job->u.GMAC._key = &gmac_key; + job->u.GMAC._iv = v->hash_iv; + job->u.GMAC.iv_len_in_bytes = v->hash_iv_size; + } + + if (v->hash_mode == IMB_AUTH_AES_GMAC_192) { + IMB_AES192_GCM_PRE(p_mgr, v->hash_key, &gmac_key); + job->u.GMAC._key = &gmac_key; + job->u.GMAC._iv = v->hash_iv; + job->u.GMAC.iv_len_in_bytes = v->hash_iv_size; + } + + if (v->hash_mode == IMB_AUTH_AES_GMAC_256) { + IMB_AES256_GCM_PRE(p_mgr, v->hash_key, &gmac_key); + job->u.GMAC._key = &gmac_key; + job->u.GMAC._iv = v->hash_iv; + job->u.GMAC.iv_len_in_bytes = v->hash_iv_size; + } + + /* clear space where computed TAG is put into */ + memset(scratch, 0, sizeof(scratch)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for TAG mismatch */ + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + + /* exercise direct API test if available */ + memset(scratch, 0, sizeof(scratch)); + + if (v->hash_mode == IMB_AUTH_SHA_1) { + memset(scratch, 0, sizeof(scratch)); + IMB_SHA1(p_mgr, v->message, v->message_size, scratch); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_SHA_224) { + memset(scratch, 0, sizeof(scratch)); + IMB_SHA224(p_mgr, v->message, v->message_size, scratch); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_SHA_256) { + memset(scratch, 0, sizeof(scratch)); + IMB_SHA256(p_mgr, v->message, v->message_size, scratch); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_SHA_384) { + memset(scratch, 0, sizeof(scratch)); + IMB_SHA384(p_mgr, v->message, v->message_size, scratch); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_SHA_512) { + memset(scratch, 0, sizeof(scratch)); + IMB_SHA512(p_mgr, v->message, v->message_size, scratch); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_AES_GMAC_128) { + struct gcm_context_data ctx; + + memset(scratch, 0, sizeof(scratch)); + IMB_AES128_GCM_PRE(p_mgr, v->hash_key, &gmac_key); + IMB_AES128_GMAC_INIT(p_mgr, &gmac_key, &ctx, v->hash_iv, + v->hash_iv_size); + IMB_AES128_GMAC_UPDATE(p_mgr, &gmac_key, &ctx, + v->message, v->message_size); + IMB_AES128_GMAC_FINALIZE(p_mgr, &gmac_key, &ctx, + scratch, v->tag_size); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_AES_GMAC_192) { + struct gcm_context_data ctx; + + memset(scratch, 0, sizeof(scratch)); + IMB_AES192_GCM_PRE(p_mgr, v->hash_key, &gmac_key); + IMB_AES192_GMAC_INIT(p_mgr, &gmac_key, &ctx, v->hash_iv, + v->hash_iv_size); + IMB_AES192_GMAC_UPDATE(p_mgr, &gmac_key, &ctx, + v->message, v->message_size); + IMB_AES192_GMAC_FINALIZE(p_mgr, &gmac_key, &ctx, + scratch, v->tag_size); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + if (v->hash_mode == IMB_AUTH_AES_GMAC_256) { + struct gcm_context_data ctx; + + memset(scratch, 0, sizeof(scratch)); + IMB_AES256_GCM_PRE(p_mgr, v->hash_key, &gmac_key); + IMB_AES256_GMAC_INIT(p_mgr, &gmac_key, &ctx, v->hash_iv, + v->hash_iv_size); + IMB_AES256_GMAC_UPDATE(p_mgr, &gmac_key, &ctx, + v->message, v->message_size); + IMB_AES256_GMAC_FINALIZE(p_mgr, &gmac_key, &ctx, + scratch, v->tag_size); + if (memcmp(scratch, v->tag, v->tag_size)) + return 0; + } + + } /* for(hash_vectors) */ + + return 1; +} + +/* + * ============================================================================= + * AEAD SELF-TEST + * ============================================================================= + */ + +struct self_test_gcm_vector { + IMB_HASH_ALG hash_mode; + IMB_CIPHER_MODE cipher_mode; + const uint8_t *cipher_key; + size_t cipher_key_size; + const uint8_t *cipher_iv; + size_t cipher_iv_size; + const uint8_t *aad; + size_t aad_size; + const uint8_t *plain_text; + size_t plain_text_size; + const uint8_t *cipher_text; + const uint8_t *tag; + size_t tag_size; +}; + +/* + * http://csrc.nist.gov/groups/STM/cavp/gcmtestvectors.zip + * gcmEncryptExtIV128.rsp + */ +static const uint8_t aes_gcm_128_key[] = { + 0xc9, 0x39, 0xcc, 0x13, 0x39, 0x7c, 0x1d, 0x37, + 0xde, 0x6a, 0xe0, 0xe1, 0xcb, 0x7c, 0x42, 0x3c +}; +static const uint8_t aes_gcm_128_iv[] = { + 0xb3, 0xd8, 0xcc, 0x01, 0x7c, 0xbb, 0x89, 0xb3, + 0x9e, 0x0f, 0x67, 0xe2 +}; +static const uint8_t aes_gcm_128_plain_text[] = { + 0xc3, 0xb3, 0xc4, 0x1f, 0x11, 0x3a, 0x31, 0xb7, + 0x3d, 0x9a, 0x5c, 0xd4, 0x32, 0x10, 0x30, 0x69 +}; +static const uint8_t aes_gcm_128_aad[] = { + 0x24, 0x82, 0x56, 0x02, 0xbd, 0x12, 0xa9, 0x84, + 0xe0, 0x09, 0x2d, 0x3e, 0x44, 0x8e, 0xda, 0x5f +}; +static const uint8_t aes_gcm_128_cipher_text[] = { + 0x93, 0xfe, 0x7d, 0x9e, 0x9b, 0xfd, 0x10, 0x34, + 0x8a, 0x56, 0x06, 0xe5, 0xca, 0xfa, 0x73, 0x54 +}; +static const uint8_t aes_gcm_128_tag[] = { + 0x00, 0x32, 0xa1, 0xdc, 0x85, 0xf1, 0xc9, 0x78, + 0x69, 0x25, 0xa2, 0xe7, 0x1d, 0x82, 0x72, 0xdd +}; + +/* + * https://tools.ietf.org/html/draft-mcgrew-gcm-test-01 case #7 + */ +static const uint8_t aes_gcm_192_key[] = { + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, +}; +static const uint8_t aes_gcm_192_plain_text[] = { + 0x45, 0x00, 0x00, 0x28, 0xa4, 0xad, 0x40, 0x00, + 0x40, 0x06, 0x78, 0x80, 0x0a, 0x01, 0x03, 0x8f, + 0x0a, 0x01, 0x06, 0x12, 0x80, 0x23, 0x06, 0xb8, + 0xcb, 0x71, 0x26, 0x02, 0xdd, 0x6b, 0xb0, 0x3e, + 0x50, 0x10, 0x16, 0xd0, 0x75, 0x68, 0x00, 0x01, +}; +static const uint8_t aes_gcm_192_aad[] = { + 0x00, 0x00, 0xa5, 0xf8, 0x00, 0x00, 0x00, 0x0a, +}; +static const uint8_t aes_gcm_192_iv[] = { + 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88, +}; +static const uint8_t aes_gcm_192_cipher_text[] = { + 0xa5, 0xb1, 0xf8, 0x06, 0x60, 0x29, 0xae, 0xa4, + 0x0e, 0x59, 0x8b, 0x81, 0x22, 0xde, 0x02, 0x42, + 0x09, 0x38, 0xb3, 0xab, 0x33, 0xf8, 0x28, 0xe6, + 0x87, 0xb8, 0x85, 0x8b, 0x5b, 0xfb, 0xdb, 0xd0, + 0x31, 0x5b, 0x27, 0x45, 0x21, 0x44, 0xcc, 0x77, +}; +static const uint8_t aes_gcm_192_tag[] = { + 0x95, 0x45, 0x7b, 0x96, 0x52, 0x03, 0x7f, 0x53, + 0x18, 0x02, 0x7b, 0x5b, 0x4c, 0xd7, 0xa6, 0x36, +}; + +/* + * http://csrc.nist.gov/groups/ST/toolkit/BCM/ + * documents/proposedmodes/gcm/gcm-revised-spec.pdf + */ +static const uint8_t aes_gcm_256_key[] = { + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08, + 0xfe, 0xff, 0xe9, 0x92, 0x86, 0x65, 0x73, 0x1c, + 0x6d, 0x6a, 0x8f, 0x94, 0x67, 0x30, 0x83, 0x08 +}; +static const uint8_t aes_gcm_256_plain_text[] = { + 0xd9, 0x31, 0x32, 0x25, 0xf8, 0x84, 0x06, 0xe5, + 0xa5, 0x59, 0x09, 0xc5, 0xaf, 0xf5, 0x26, 0x9a, + 0x86, 0xa7, 0xa9, 0x53, 0x15, 0x34, 0xf7, 0xda, + 0x2e, 0x4c, 0x30, 0x3d, 0x8a, 0x31, 0x8a, 0x72, + 0x1c, 0x3c, 0x0c, 0x95, 0x95, 0x68, 0x09, 0x53, + 0x2f, 0xcf, 0x0e, 0x24, 0x49, 0xa6, 0xb5, 0x25, + 0xb1, 0x6a, 0xed, 0xf5, 0xaa, 0x0d, 0xe6, 0x57, + 0xba, 0x63, 0x7b, 0x39 +}; +static const uint8_t aes_gcm_256_aad[] = { + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xfe, 0xed, 0xfa, 0xce, 0xde, 0xad, 0xbe, 0xef, + 0xab, 0xad, 0xda, 0xd2 +}; +static const uint8_t aes_gcm_256_iv[] = { + 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, 0xdb, 0xad, + 0xde, 0xca, 0xf8, 0x88 +}; +static const uint8_t aes_gcm_256_cipher_text[] = { + 0x52, 0x2d, 0xc1, 0xf0, 0x99, 0x56, 0x7d, 0x07, + 0xf4, 0x7f, 0x37, 0xa3, 0x2a, 0x84, 0x42, 0x7d, + 0x64, 0x3a, 0x8c, 0xdc, 0xbf, 0xe5, 0xc0, 0xc9, + 0x75, 0x98, 0xa2, 0xbd, 0x25, 0x55, 0xd1, 0xaa, + 0x8c, 0xb0, 0x8e, 0x48, 0x59, 0x0d, 0xbb, 0x3d, + 0xa7, 0xb0, 0x8b, 0x10, 0x56, 0x82, 0x88, 0x38, + 0xc5, 0xf6, 0x1e, 0x63, 0x93, 0xba, 0x7a, 0x0a, + 0xbc, 0xc9, 0xf6, 0x62 +}; +static const uint8_t aes_gcm_256_tag[] = { + 0x76, 0xfc, 0x6e, 0xce, 0x0f, 0x4e, 0x17, 0x68, + 0xcd, 0xdf, 0x88, 0x53, 0xbb, 0x2d, 0x55, 0x1b +}; + +#define ADD_GCM_VECTOR(_key,_iv,_aad,_plain,_cipher,_tag) \ + {IMB_AUTH_AES_GMAC, IMB_CIPHER_GCM, _key, sizeof(_key), \ + _iv, sizeof(_iv), _aad, sizeof(_aad), \ + _plain, sizeof(_plain), _cipher, \ + _tag, sizeof(_tag)} + +struct self_test_gcm_vector aead_gcm_vectors[] = { + ADD_GCM_VECTOR(aes_gcm_128_key, aes_gcm_128_iv, aes_gcm_128_aad, + aes_gcm_128_plain_text, aes_gcm_128_cipher_text, + aes_gcm_128_tag), + ADD_GCM_VECTOR(aes_gcm_192_key, aes_gcm_192_iv, aes_gcm_192_aad, + aes_gcm_192_plain_text, aes_gcm_192_cipher_text, + aes_gcm_192_tag), + ADD_GCM_VECTOR(aes_gcm_256_key, aes_gcm_256_iv, aes_gcm_256_aad, + aes_gcm_256_plain_text, aes_gcm_256_cipher_text, + aes_gcm_256_tag) +}; + + +static int self_test_aead_gcm(IMB_MGR *p_mgr) +{ + struct gcm_key_data gcm_key; + struct gcm_context_data ctx; + uint8_t text[128], tag[16]; + unsigned i; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (i = 0; i < IMB_DIM(aead_gcm_vectors); i++) { + struct self_test_gcm_vector *v = &aead_gcm_vectors[i]; + + IMB_ASSERT(v->tag_size <= sizeof(tag)); + IMB_ASSERT(v->plain_text_size <= sizeof(text)); + + /* tag too long */ + if (v->tag_size > sizeof(tag)) + return 0; + + /* message too long */ + if (v->plain_text_size > sizeof(text)) + return 0; + + switch (v->cipher_key_size) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_PRE(p_mgr, v->cipher_key, + &gcm_key); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_PRE(p_mgr, v->cipher_key, + &gcm_key); + break; + case IMB_KEY_256_BYTES: + IMB_AES256_GCM_PRE(p_mgr, v->cipher_key, + &gcm_key); + break; + default: + return 0; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + /* encrypt test */ + job->cipher_mode = v->cipher_mode; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->key_len_in_bytes = v->cipher_key_size; + job->src = v->plain_text; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->plain_text_size; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = v->cipher_iv; + job->iv_len_in_bytes = v->cipher_iv_size; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tag_size; + job->hash_alg = v->hash_mode; + job->enc_keys = &gcm_key; + job->dec_keys = &gcm_key; + job->u.GCM.aad = v->aad; + job->u.GCM.aad_len_in_bytes = v->aad_size; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for TAG mismatch */ + if (memcmp(tag, v->tag, v->tag_size)) + return 0; + + /* check for text mismatch */ + if (memcmp(text, v->cipher_text, v->plain_text_size)) + return 0; + + /* decrypt test */ + job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = v->cipher_mode; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->key_len_in_bytes = v->cipher_key_size; + job->src = v->cipher_text; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->plain_text_size; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = v->cipher_iv; + job->iv_len_in_bytes = v->cipher_iv_size; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tag_size; + job->hash_alg = v->hash_mode; + job->enc_keys = &gcm_key; + job->dec_keys = &gcm_key; + job->u.GCM.aad = v->aad; + job->u.GCM.aad_len_in_bytes = v->aad_size; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for TAG mismatch */ + if (memcmp(tag, v->tag, v->tag_size)) + return 0; + + /* check for text mismatch */ + if (memcmp(text, v->plain_text, v->plain_text_size)) + return 0; + + /* test direct API */ + + /* encrypt direction */ + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + switch (v->cipher_key_size) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + v->cipher_iv, + v->cipher_iv_size, + v->aad, v->aad_size); + IMB_AES128_GCM_ENC_UPDATE(p_mgr, &gcm_key, &ctx, text, + v->plain_text, + v->plain_text_size); + IMB_AES128_GCM_ENC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tag_size); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + v->cipher_iv, + v->cipher_iv_size, + v->aad, v->aad_size); + IMB_AES192_GCM_ENC_UPDATE(p_mgr, &gcm_key, &ctx, text, + v->plain_text, + v->plain_text_size); + IMB_AES192_GCM_ENC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tag_size); + break; + case IMB_KEY_256_BYTES: + IMB_AES256_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + v->cipher_iv, + v->cipher_iv_size, + v->aad, v->aad_size); + IMB_AES256_GCM_ENC_UPDATE(p_mgr, &gcm_key, &ctx, text, + v->plain_text, + v->plain_text_size); + IMB_AES256_GCM_ENC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tag_size); + break; + default: + return 0; + } + /* check for TAG mismatch */ + if (memcmp(tag, v->tag, v->tag_size)) + return 0; + + /* check for text mismatch */ + if (memcmp(text, v->cipher_text, v->plain_text_size)) + return 0; + + /* decrypt direction */ + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + switch (v->cipher_key_size) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + v->cipher_iv, + v->cipher_iv_size, + v->aad, v->aad_size); + IMB_AES128_GCM_DEC_UPDATE(p_mgr, &gcm_key, &ctx, text, + v->cipher_text, + v->plain_text_size); + IMB_AES128_GCM_DEC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tag_size); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + v->cipher_iv, + v->cipher_iv_size, + v->aad, v->aad_size); + IMB_AES192_GCM_DEC_UPDATE(p_mgr, &gcm_key, &ctx, text, + v->cipher_text, + v->plain_text_size); + IMB_AES192_GCM_DEC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tag_size); + break; + case IMB_KEY_256_BYTES: + IMB_AES256_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + v->cipher_iv, + v->cipher_iv_size, + v->aad, v->aad_size); + IMB_AES256_GCM_DEC_UPDATE(p_mgr, &gcm_key, &ctx, text, + v->cipher_text, + v->plain_text_size); + IMB_AES256_GCM_DEC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tag_size); + break; + default: + return 0; + } + /* check for TAG mismatch */ + if (memcmp(tag, v->tag, v->tag_size)) + return 0; + + /* check for text mismatch */ + if (memcmp(text, v->plain_text, v->plain_text_size)) + return 0; + + } /* for(gcm_vectors) */ + + return 1; +} + +struct self_test_aead_ccm_vector { + IMB_HASH_ALG hash_mode; + IMB_CIPHER_MODE cipher_mode; + const uint8_t *cipher_key; + size_t cipher_key_size; + const uint8_t *cipher_nonce; + size_t cipher_nonce_size; + const uint8_t *aad; + size_t aad_size; + const uint8_t *plain_text; + size_t plain_text_size; + const uint8_t *cipher_text; + const uint8_t *tag; + size_t tag_size; +}; + +/* + * Test vectors from https://tools.ietf.org/html/rfc3610 + */ +static const uint8_t aes_ccm_128_key[] = { + 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF +}; +static const uint8_t aes_ccm_128_nonce[] = { + 0x00, 0x00, 0x00, 0x03, 0x02, 0x01, 0x00, 0xA0, + 0xA1, 0xA2, 0xA3, 0xA4, 0xA5 +}; +static const uint8_t aes_ccm_128_plain_text[] = { + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E +}; +static const uint8_t aes_ccm_128_aad[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07 +}; +static const uint8_t aes_ccm_128_cipher_text[] = { + 0x58, 0x8C, 0x97, 0x9A, 0x61, 0xC6, 0x63, 0xD2, + 0xF0, 0x66, 0xD0, 0xC2, 0xC0, 0xF9, 0x89, 0x80, + 0x6D, 0x5F, 0x6B, 0x61, 0xDA, 0xC3, 0x84, +}; +static const uint8_t aes_ccm_128_tag[] = { + 0x17, 0xE8, 0xD1, 0x2C, 0xFD, 0xF9, 0x26, 0xE0 +}; + +static const uint8_t aes_ccm_256_key[] = { + 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF, + 0xC0, 0xC1, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, + 0xC8, 0xC9, 0xCA, 0xCB, 0xCC, 0xCD, 0xCE, 0xCF +}; +static const uint8_t aes_ccm_256_nonce[] = { + 0x00, 0x00, 0x00, 0x03, 0x02, 0x01, 0x00, 0xA0, + 0xA1, 0xA2, 0xA3, 0xA4, 0xA5 +}; +static const uint8_t aes_ccm_256_plain_text[] = { + 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, + 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, + 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E +}; +static const uint8_t aes_ccm_256_aad[] = { + 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, +}; +static const uint8_t aes_ccm_256_cipher_text[] = { + 0x21, 0x61, 0x63, 0xDE, 0xCF, 0x74, 0xE0, 0x0C, + 0xAB, 0x04, 0x56, 0xFF, 0x45, 0xCD, 0xA7, 0x17, + 0x1F, 0xA5, 0x96, 0xD7, 0x0F, 0x76, 0x91 +}; +static const uint8_t aes_ccm_256_tag[] = { + 0xCA, 0x8A, 0xFA, 0xA2, 0x3F, 0x22, 0x3E, 0x64 +}; + +#define ADD_CCM_VECTOR(_key,_nonce,_aad,_plain,_cipher,_tag) \ + {IMB_AUTH_AES_CCM, IMB_CIPHER_CCM, _key, sizeof(_key), \ + _nonce, sizeof(_nonce), _aad, sizeof(_aad), \ + _plain, sizeof(_plain), _cipher, \ + _tag, sizeof(_tag)} + +struct self_test_aead_ccm_vector aead_ccm_vectors[] = { + ADD_CCM_VECTOR(aes_ccm_128_key, aes_ccm_128_nonce, aes_ccm_128_aad, + aes_ccm_128_plain_text, aes_ccm_128_cipher_text, + aes_ccm_128_tag), + ADD_CCM_VECTOR(aes_ccm_256_key, aes_ccm_256_nonce, aes_ccm_256_aad, + aes_ccm_256_plain_text, aes_ccm_256_cipher_text, + aes_ccm_256_tag) +}; + +static int self_test_aead_ccm(IMB_MGR *p_mgr) +{ + DECLARE_ALIGNED(uint32_t expkey[4*15], 16); + DECLARE_ALIGNED(uint32_t dust[4*15], 16); + uint8_t text[128], tag[16]; + unsigned i; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (i = 0; i < IMB_DIM(aead_ccm_vectors); i++) { + struct self_test_aead_ccm_vector *v = &aead_ccm_vectors[i]; + + IMB_ASSERT(v->tag_size <= sizeof(tag)); + IMB_ASSERT(v->plain_text_size <= sizeof(text)); + + /* tag too long */ + if (v->tag_size > sizeof(tag)) + return 0; + + /* message too long */ + if (v->plain_text_size > sizeof(text)) + return 0; + + switch (v->cipher_key_size) { + case IMB_KEY_128_BYTES: + IMB_AES_KEYEXP_128(p_mgr, v->cipher_key, expkey, + dust); + break; + case IMB_KEY_256_BYTES: + IMB_AES_KEYEXP_256(p_mgr, v->cipher_key, expkey, + dust); + break; + default: + return 0; + } + + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + /* encrypt test */ + job->cipher_mode = v->cipher_mode; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->key_len_in_bytes = v->cipher_key_size; + job->src = v->plain_text; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->plain_text_size; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->msg_len_to_hash_in_bytes = v->plain_text_size; + job->hash_start_src_offset_in_bytes = UINT64_C(0); + job->iv = v->cipher_nonce; + job->iv_len_in_bytes = v->cipher_nonce_size; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tag_size; + job->hash_alg = v->hash_mode; + job->enc_keys = expkey; + job->dec_keys = expkey; + job->u.CCM.aad_len_in_bytes = v->aad_size; + job->u.CCM.aad = v->aad; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for TAG mismatch */ + if (memcmp(tag, v->tag, v->tag_size)) + return 0; + + /* check for text mismatch */ + if (memcmp(text, v->cipher_text, v->plain_text_size)) + return 0; + + /* decrypt test */ + job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = v->cipher_mode; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->key_len_in_bytes = v->cipher_key_size; + job->src = v->cipher_text; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->plain_text_size; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->msg_len_to_hash_in_bytes = v->plain_text_size; + job->hash_start_src_offset_in_bytes = UINT64_C(0); + job->iv = v->cipher_nonce; + job->iv_len_in_bytes = v->cipher_nonce_size; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tag_size; + job->hash_alg = v->hash_mode; + job->enc_keys = expkey; + job->dec_keys = expkey; + job->u.CCM.aad_len_in_bytes = v->aad_size; + job->u.CCM.aad = v->aad; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and get it processed */ + if (!process_job(p_mgr)) + return 0; + + /* check for TAG mismatch */ + if (memcmp(tag, v->tag, v->tag_size)) + return 0; + + /* check for text mismatch */ + if (memcmp(text, v->plain_text, v->plain_text_size)) + return 0; + } /* for(ccm_vectors) */ + + return 1; +} + +static int self_test_aead(IMB_MGR *p_mgr) +{ + if (!self_test_aead_gcm(p_mgr)) + return 0; + if (!self_test_aead_ccm(p_mgr)) + return 0; + return 1; +} + +/* + * ============================================================================= + * SELF-TEST INTERNAL API + * ============================================================================= + */ + +IMB_DLL_LOCAL int self_test(IMB_MGR *p_mgr) +{ + int ret = 1; + + p_mgr->features |= IMB_FEATURE_SELF_TEST; + p_mgr->features &= ~IMB_FEATURE_SELF_TEST_PASS; + + if (!self_test_ciphers(p_mgr)) + ret = 0; + + if (!self_test_hash(p_mgr)) + ret = 0; + + if (!self_test_aead(p_mgr)) + ret = 0; + + if (ret) + p_mgr->features |= IMB_FEATURE_SELF_TEST_PASS; + +#ifdef NO_SELF_TEST_DEV + p_mgr->features &= ~(IMB_FEATURE_SELF_TEST | + IMB_FEATURE_SELF_TEST_PASS); + ret = 1; +#endif + + return ret; +} diff --git a/perf/Makefile b/perf/Makefile index 3dc13dc20df9a9f24750523902fce91695e1c53e..c6e286ae89d3f5173f44d0b3da50d51d901c2521 100644 --- a/perf/Makefile +++ b/perf/Makefile @@ -39,7 +39,12 @@ CFLAGS = -D_GNU_SOURCE -DNO_COMPAT_IMB_API_053 $(INCLUDES) \ -Wformat -Wformat-security \ -Wunreachable-code -Wmissing-noreturn -Wsign-compare -Wno-endif-labels \ -Wstrict-prototypes -Wmissing-prototypes -Wold-style-definition \ - -pthread -fno-strict-overflow -fno-delete-null-pointer-checks -fwrapv + -pthread -fno-delete-null-pointer-checks -fwrapv + +# -fno-strict-overflow is not supported by clang +ifneq ($(CC),clang) +CFLAGS += -fno-strict-overflow +endif ifeq ($(MINGW),0) CFLAGS += -DLINUX @@ -58,16 +63,20 @@ CET_LDFLAGS=-r -z ibt -z shstk endif # MINGW endif # x86_64 +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) CFLAGS += -fcf-protection=full endif +endif ifeq ($(MINGW),0) LDFLAGS = -fPIE -z noexecstack -z relro -z now -pthread endif +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) LDFLAGS += -fcf-protection=full -Wl,-z,ibt -Wl,-z,shstk -Wl,-z,cet-report=error endif +endif LDLIBS = -lIPSec_MB ifeq ("$(shell test -e $(INSTPATH) && echo -n yes)","yes") @@ -138,4 +147,5 @@ style: $(CHECKPATCH) --no-tree --no-signoff --emacs --no-color \ --ignore CODE_INDENT,INITIALISED_STATIC,LEADING_SPACE,SPLIT_STRING,\ UNSPECIFIED_INT,ARRAY_SIZE,BLOCK_COMMENT_STYLE,GLOBAL_INITIALISERS,\ -COMPLEX_MACRO,SPACING,STORAGE_CLASS,USE_FUNC,NEW_TYPEDEFS $(SOURCES_STYLE) +COMPLEX_MACRO,SPACING,STORAGE_CLASS,USE_FUNC,NEW_TYPEDEFS,VOLATILE,\ +CONSTANT_COMPARISON $(SOURCES_STYLE) diff --git a/perf/ipsec_diff_tool.py b/perf/ipsec_diff_tool.py index f69823e87b0f530c8d7003dc232597c3375d8ac4..d21b049f2424ebbdeafe55e929e78d2d5f1bc0d3 100755 --- a/perf/ipsec_diff_tool.py +++ b/perf/ipsec_diff_tool.py @@ -35,7 +35,7 @@ import sys # Number of parameters (ARCH, CIPHER_MODE, DIR, HASH_ALG, KEY_SIZE) PAR_NUM = 5 -COL_WIDTH = 14 +COL_WIDTH = 19 CYCLE_COST = False PACKET_SIZE = 0 SLOPE = False @@ -153,16 +153,16 @@ class VarList(list): #commandline flags and prints the appropriate values if CYCLE_COST: headings = ["NO", "ARCH", "CIPHER", "DIR", "HASH", - "KEYSZ", "CYCLE COST A", "CYCLE COST B"] + "KEYSZ", "CYCLE_COST_A", "CYCLE_COST_B"] print("Buffer size: {} bytes".format(PACKET_SIZE)) elif THROUGHPUT: headings = ["NO", "ARCH", "CIPHER", "DIR", "HASH", - "KEYSZ", "THROUGHPUT A", "THROUGHPUT B"] + "KEYSZ", "THROUGHPUT_A", "THROUGHPUT_B"] print("Buffer size: {} bytes".format(PACKET_SIZE)) print("Clock speed: {} MHz\nThroughput unit: Mbps".format(CLOCK_SPEED)) else: headings = ["NO", "ARCH", "CIPHER", "DIR", "HASH", - "KEYSZ", "SLOPE A", "INTERCEPT A", "SLOPE B", "INTERCEPT B"] + "KEYSZ", "SLOPE_A", "INTERCEPT_A", "SLOPE_B", "INTERCEPT_B"] print("".join(j.ljust(COL_WIDTH) for j in headings)) @@ -206,16 +206,16 @@ class VarList(list): """ if CYCLE_COST: headings = ["NO", "ARCH", "CIPHER", "DIR", "HASH", - "KEYSZ", "CYCLE COST A"] + "KEYSZ", "CYCLE_COST_A"] print("Buffer size: {} bytes".format(PACKET_SIZE)) elif THROUGHPUT: headings = ["NO", "ARCH", "CIPHER", "DIR", "HASH", - "KEYSZ", "THROUGHPUT A"] + "KEYSZ", "THROUGHPUT_A"] print("Buffer size: {} bytes".format(PACKET_SIZE)) print("Clock speed: {} MHz\nThroughput unit: Mbps".format(CLOCK_SPEED)) else: headings = ["NO", "ARCH", "CIPHER", "DIR", "HASH", - "KEYSZ", "SLOPE A", "INTERCEPT A"] + "KEYSZ", "SLOPE_A", "INTERCEPT_A"] print("".join(j.ljust(COL_WIDTH) for j in headings)) for i, obj in enumerate(self): number = i+1 diff --git a/perf/ipsec_perf.c b/perf/ipsec_perf.c index 394a87707793a8c5759185dbab94a15b9e7f21d7..e0b1a6111e5a99577f04ab1c27d78d0e25a8a981 100644 --- a/perf/ipsec_perf.c +++ b/perf/ipsec_perf.c @@ -32,6 +32,10 @@ #include #include #include +#ifdef LINUX +#include +#include +#endif #ifdef _WIN32 #include /* memalign() or _aligned_malloc()/aligned_free() */ @@ -102,6 +106,7 @@ typedef cpuset_t cpu_set_t; #define BITS(x) (sizeof(x) * 8) #define DIM(x) (sizeof(x)/sizeof(x[0])) +#define DIV_ROUND_UP(x, y) ((x + y - 1) / y) #define MAX_NUM_THREADS 16 /* Maximum number of threads that can be created */ @@ -109,6 +114,9 @@ typedef cpuset_t cpu_set_t; #define IA32_MSR_PERF_GLOBAL_CTR 0x38F #define IA32_MSR_CPU_UNHALTED_THREAD 0x30A +#define DEFAULT_BURST_SIZE 32 +#define MAX_BURST_SIZE 256 + enum arch_type_e { ARCH_SSE = 0, ARCH_AVX, @@ -152,14 +160,19 @@ enum test_cipher_mode_e { /* This enum will be mostly translated to IMB_HASH_ALG * (make sure to update h_alg_names list in print_times function) */ enum test_hash_alg_e { - TEST_SHA1 = 1, + TEST_SHA1_HMAC = 1, + TEST_SHA_224_HMAC, + TEST_SHA_256_HMAC, + TEST_SHA_384_HMAC, + TEST_SHA_512_HMAC, + TEST_XCBC, + TEST_MD5, + TEST_HASH_CMAC, /* added here to be included in AES tests */ + TEST_SHA1, TEST_SHA_224, TEST_SHA_256, TEST_SHA_384, TEST_SHA_512, - TEST_XCBC, - TEST_MD5, - TEST_HASH_CMAC, /* added here to be included in AES tests */ TEST_HASH_CMAC_BITLEN, TEST_HASH_CMAC_256, TEST_NULL_HASH, @@ -190,6 +203,7 @@ enum test_hash_alg_e { TEST_CRC8_WIMAX_OFDMA_HCS, TEST_CRC7_FP_HEADER, TEST_CRC6_IUUP_HEADER, + TEST_AUTH_GHASH, TEST_NUM_HASH_TESTS }; @@ -478,31 +492,31 @@ const struct str_value_mapping hash_algo_str_map[] = { { .name = "sha1-hmac", .values.job_params = { - .hash_alg = TEST_SHA1 + .hash_alg = TEST_SHA1_HMAC } }, { .name = "sha224-hmac", .values.job_params = { - .hash_alg = TEST_SHA_224 + .hash_alg = TEST_SHA_224_HMAC } }, { .name = "sha256-hmac", .values.job_params = { - .hash_alg = TEST_SHA_256 + .hash_alg = TEST_SHA_256_HMAC } }, { .name = "sha384-hmac", .values.job_params = { - .hash_alg = TEST_SHA_384 + .hash_alg = TEST_SHA_384_HMAC } }, { .name = "sha512-hmac", .values.job_params = { - .hash_alg = TEST_SHA_512 + .hash_alg = TEST_SHA_512_HMAC } }, { @@ -523,6 +537,42 @@ const struct str_value_mapping hash_algo_str_map[] = { .hash_alg = TEST_HASH_CMAC } }, + { + .name = "sha1", + .values.job_params = { + .hash_alg = TEST_SHA1 + } + }, + { + .name = "sha224", + .values.job_params = { + .hash_alg = TEST_SHA_224 + } + }, + { + .name = "sha256", + .values.job_params = { + .hash_alg = TEST_SHA_256 + } + }, + { + .name = "sha384", + .values.job_params = { + .hash_alg = TEST_SHA_384 + } + }, + { + .name = "sha512", + .values.job_params = { + .hash_alg = TEST_SHA_512 + } + }, + { + .name = "null", + .values.job_params = { + .hash_alg = TEST_NULL_HASH + } + }, { .name = "aes-cmac-bitlen", .values.job_params = { @@ -637,6 +687,12 @@ const struct str_value_mapping hash_algo_str_map[] = { .hash_alg = TEST_CRC6_IUUP_HEADER, } }, + { + .name = "ghash", + .values.job_params = { + .hash_alg = TEST_AUTH_GHASH, + } + }, #endif { .name = "snow3g-uia2", @@ -800,11 +856,11 @@ enum cache_type_e { enum cache_type_e cache_type = WARM; const uint32_t auth_tag_length_bytes[] = { - 12, /* SHA1 */ - 14, /* SHA_224 */ - 16, /* SHA_256 */ - 24, /* SHA_384 */ - 32, /* SHA_512 */ + 12, /* SHA1_HMAC */ + 14, /* SHA_224_HMAC */ + 16, /* SHA_256_HMAC */ + 24, /* SHA_384_HMAC */ + 32, /* SHA_512_HMAC */ 12, /* AES_XCBC */ 12, /* MD5 */ 0, /* NULL_HASH */ @@ -845,6 +901,7 @@ const uint32_t auth_tag_length_bytes[] = { 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ 4, /* IMB_AUTH_CRC7_FP_HEADER */ 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ + 16, /* IMB_AUTH_GHASH */ }; uint32_t index_limit; uint32_t key_idxs[NUM_OFFSETS]; @@ -871,6 +928,7 @@ uint32_t *hash_size_list = NULL; uint64_t *xgem_hdr_list = NULL; uint16_t imix_list_count = 0; uint32_t average_job_size = 0; +uint32_t max_job_size = 0; /* Size of IMIX list (needs to be multiple of 2) */ #define JOB_SIZE_IMIX_LIST 1024 @@ -890,7 +948,7 @@ struct custom_job_params custom_job_params = { }; uint8_t archs[NUM_ARCHS] = {1, 1, 1, 1, 1}; /* uses all function sets */ -int use_gcm_job_api = 0; +int use_job_api = 0; int use_gcm_sgl_api = 0; int use_unhalted_cycles = 0; /* read unhalted cycles instead of tsc */ uint64_t rd_cycles_cost = 0; /* cost of reading unhalted cycles */ @@ -911,6 +969,43 @@ static uint32_t pb_mod = 0; static int silent_progress_bar = 0; static int plot_output_option = 0; +/* API types */ +typedef enum { + TEST_API_JOB = 0, + TEST_API_BURST, + TEST_API_CIPHER_BURST, + TEST_API_HASH_BURST, + TEST_API_NUMOF +} TEST_API; + +const char *str_api_list[TEST_API_NUMOF] = {"single job", "burst", + "cipher-only burst", + "hash-only burst"}; + +static TEST_API test_api = TEST_API_JOB; /* test job API by default */ +static uint32_t burst_size = 0; /* num jobs to pass to burst API */ +static uint32_t segment_size = 0; /* segment size to test SGL (0 = no SGL) */ + +static volatile int timebox_on = 1; /* flag to stop the test loop */ +static int use_timebox = 1; /* time-box feature on/off flag */ + +#ifdef LINUX +static void timebox_callback(int sig) +{ + (void) sig; + timebox_on = 0; +} +#endif + +#ifdef _WIN32 +static void CALLBACK timebox_callback(PVOID lpParam, BOOLEAN TimerFired) +{ + (void) lpParam; + (void) TimerFired; + timebox_on = 0; +} +#endif + /* Return rdtsc to core cycle scale factor */ static double get_tsc_to_core_scale(const int turbo) { @@ -1389,7 +1484,10 @@ translate_cipher_mode(const enum test_cipher_mode_e test_mode) c_mode = IMB_CIPHER_DOCSIS_DES; break; case TEST_GCM: - c_mode = IMB_CIPHER_GCM; + if (segment_size != 0) + c_mode = IMB_CIPHER_GCM_SGL; + else + c_mode = IMB_CIPHER_GCM; break; case TEST_CCM: c_mode = IMB_CIPHER_CCM; @@ -1420,7 +1518,11 @@ translate_cipher_mode(const enum test_cipher_mode_e test_mode) c_mode = IMB_CIPHER_CHACHA20; break; case TEST_AEAD_CHACHA20: - c_mode = IMB_CIPHER_CHACHA20_POLY1305; + if (segment_size != 0) + c_mode = IMB_CIPHER_CHACHA20_POLY1305_SGL; + else + c_mode = IMB_CIPHER_CHACHA20_POLY1305; + break; case TEST_SNOW_V: c_mode = IMB_CIPHER_SNOW_V; @@ -1504,6 +1606,60 @@ set_job_fields(IMB_JOB *job, uint8_t *p_buffer, imb_uint128_t *p_keys, } } +static inline void +set_sgl_job_fields(IMB_JOB *job, uint8_t *p_buffer, imb_uint128_t *p_keys, + const uint32_t size_idx, const uint32_t buf_index, + struct IMB_SGL_IOV *sgl, struct gcm_context_data *gcm_ctx, + struct chacha20_poly1305_context_data *cp_ctx) +{ + uint8_t *src = get_src_buffer(buf_index, p_buffer); + uint8_t *dst = get_dst_buffer(buf_index, p_buffer); + uint8_t *aad = src; + uint32_t buf_size; + + job->src = src; + job->dst = dst; + + /* If IMIX testing is being done, set the buffer size to cipher and hash + * going through the list of sizes precalculated */ + if (imix_list_count != 0) { + uint32_t list_idx = size_idx & (JOB_SIZE_IMIX_LIST - 1); + + job->msg_len_to_cipher_in_bytes = cipher_size_list[list_idx]; + } + buf_size = (uint32_t) job->msg_len_to_cipher_in_bytes; + if (job->cipher_mode == IMB_CIPHER_GCM_SGL) { + job->u.GCM.aad = aad; + job->u.GCM.ctx = gcm_ctx; + } else { + job->u.CHACHA20_POLY1305.aad = aad; + job->u.CHACHA20_POLY1305.ctx = cp_ctx; + } + job->enc_keys = job->dec_keys = + (const uint32_t *) get_key_pointer(buf_index, + p_keys); + job->sgl_state = IMB_SGL_ALL; + + const uint32_t num_segs = buf_size / segment_size; + const uint32_t final_seg_sz = buf_size % segment_size; + unsigned i; + + job->num_sgl_io_segs = num_segs; + + for (i = 0; i < num_segs; i++) { + sgl[i].in = &src[i * segment_size]; + sgl[i].out = &dst[i * segment_size]; + sgl[i].len = segment_size; + } + if (final_seg_sz != 0) { + sgl[i].in = &src[num_segs * segment_size]; + sgl[i].out = &dst[num_segs * segment_size]; + sgl[i].len = final_seg_sz; + (job->num_sgl_io_segs)++; + } + job->sgl_io_segs = sgl; +}; + static void set_size_lists(uint32_t *cipher_size_list, uint32_t *hash_size_list, uint64_t *xgem_hdr_list, struct params_s *params) @@ -1614,6 +1770,11 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, uint64_t time = 0; uint8_t gcm_key[32]; uint8_t next_iv[IMB_AES_BLOCK_SIZE]; + IMB_JOB jobs[MAX_BURST_SIZE]; + struct gcm_context_data gcm_ctx[MAX_BURST_SIZE]; + struct chacha20_poly1305_context_data cp_ctx[MAX_BURST_SIZE]; + struct IMB_SGL_IOV *sgl[MAX_BURST_SIZE] = {NULL}; + uint32_t max_num_segs = 1; memset(&job_template, 0, sizeof(IMB_JOB)); @@ -1621,6 +1782,19 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, and set the XGEM header in case PON is used. */ set_size_lists(cipher_size_list, hash_size_list, xgem_hdr_list, params); + if (segment_size != 0) + max_num_segs = DIV_ROUND_UP(job_sizes[RANGE_MAX], + segment_size); + + for (i = 0; i < MAX_BURST_SIZE; i++) { + sgl[i] = malloc(sizeof(struct IMB_SGL_IOV) * + max_num_segs); + if (sgl[i] == NULL) { + fprintf(stderr, "malloc() failed\n"); + goto exit; + } + } + /* * If single size is used, set the cipher and hash lengths in the * job template, so they don't have to be set in every job @@ -1637,6 +1811,21 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, job_template.auth_tag_output = (uint8_t *) digest; switch (params->hash_alg) { + case TEST_SHA1: + job_template.hash_alg = IMB_AUTH_SHA_1; + break; + case TEST_SHA_224: + job_template.hash_alg = IMB_AUTH_SHA_224; + break; + case TEST_SHA_256: + job_template.hash_alg = IMB_AUTH_SHA_256; + break; + case TEST_SHA_384: + job_template.hash_alg = IMB_AUTH_SHA_384; + break; + case TEST_SHA_512: + job_template.hash_alg = IMB_AUTH_SHA_512; + break; case TEST_XCBC: job_template.u.XCBC._k1_expanded = k1_expanded; job_template.u.XCBC._k2 = k2; @@ -1647,7 +1836,10 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, job_template.hash_alg = IMB_AUTH_AES_CCM; break; case TEST_HASH_GCM: - job_template.hash_alg = IMB_AUTH_AES_GMAC; + if (segment_size != 0) + job_template.hash_alg = IMB_AUTH_GCM_SGL; + else + job_template.hash_alg = IMB_AUTH_AES_GMAC; break; case TEST_DOCSIS_CRC32: job_template.hash_alg = IMB_AUTH_DOCSIS_CRC32; @@ -1678,7 +1870,10 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, job_template.hash_alg = IMB_AUTH_POLY1305; break; case TEST_AEAD_POLY1305: - job_template.hash_alg = IMB_AUTH_CHACHA20_POLY1305; + if (segment_size != 0) + job_template.hash_alg = IMB_AUTH_CHACHA20_POLY1305_SGL; + else + job_template.hash_alg = IMB_AUTH_CHACHA20_POLY1305; break; case TEST_PON_CRC_BIP: job_template.hash_alg = IMB_AUTH_PON_CRC_BIP; @@ -1724,6 +1919,12 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, job_template.u.GMAC._iv = (uint8_t *) &auth_iv; job_template.u.GMAC.iv_len_in_bytes = 12; break; + case TEST_AUTH_GHASH: + job_template.hash_alg = IMB_AUTH_GHASH; + IMB_GHASH_PRE(mb_mgr, gcm_key, &gdata_key); + job_template.u.GHASH._key = &gdata_key; + job_template.u.GHASH._init_tag = (uint8_t *) &auth_iv; + break; case TEST_AUTH_SNOW_V_AEAD: job_template.hash_alg = IMB_AUTH_SNOW_V_AEAD; break; @@ -1800,7 +2001,8 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, /* Translating enum to the API's one */ job_template.cipher_mode = translate_cipher_mode(params->cipher_mode); job_template.key_len_in_bytes = params->aes_key_size; - if (job_template.cipher_mode == IMB_CIPHER_GCM) { + if (job_template.cipher_mode == IMB_CIPHER_GCM || + job_template.cipher_mode == IMB_CIPHER_GCM_SGL) { switch (params->aes_key_size) { case IMB_KEY_128_BYTES: IMB_AES128_GCM_PRE(mb_mgr, gcm_key, &gdata_key); @@ -1859,7 +2061,8 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, job_template.iv_len_in_bytes = 0; else if (job_template.cipher_mode == IMB_CIPHER_CHACHA20) job_template.iv_len_in_bytes = 12; - else if (job_template.cipher_mode == IMB_CIPHER_CHACHA20_POLY1305) { + else if (job_template.cipher_mode == IMB_CIPHER_CHACHA20_POLY1305 || + job_template.cipher_mode == IMB_CIPHER_CHACHA20_POLY1305_SGL) { job_template.hash_start_src_offset_in_bytes = 0; job_template.cipher_start_src_offset_in_bytes = 0; job_template.enc_keys = k1_expanded; @@ -1877,6 +2080,49 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, params->aad_size; } +#define TIMEOUT_MS 100 /*< max time for one packet size to be tested for */ + + uint32_t jobs_done = 0; /*< to track how many jobs done over time */ +#ifdef _WIN32 + HANDLE hTimebox = NULL; + HANDLE hTimeboxQueue = NULL; +#endif + + if (use_timebox) { +#ifdef LINUX + struct itimerval it_next; + + /* set up one shot timer */ + it_next.it_interval.tv_sec = 0; + it_next.it_interval.tv_usec = 0; + it_next.it_value.tv_sec = TIMEOUT_MS / 1000; + it_next.it_value.tv_usec = (TIMEOUT_MS % 1000) * 1000; + if (setitimer(ITIMER_REAL, &it_next, NULL)) { + perror("setitimer(one-shot)"); + goto exit; + } +#else /* _WIN32 */ + /* create the timer queue */ + hTimeboxQueue = CreateTimerQueue(); + if (NULL == hTimeboxQueue) { + fprintf(stderr, "CreateTimerQueue() error %u\n", + (unsigned) GetLastError()); + goto exit; + } + + /* set a timer to call the timebox */ + if (!CreateTimerQueueTimer(&hTimebox, hTimeboxQueue, + (WAITORTIMERCALLBACK) + timebox_callback, + NULL, TIMEOUT_MS, 0, 0)) { + fprintf(stderr, "CreateTimerQueueTimer() error %u\n", + (unsigned) GetLastError()); + goto exit; + } +#endif + timebox_on = 1; + } + #ifndef _WIN32 if (use_unhalted_cycles) time = read_cycles(params->core); @@ -1884,43 +2130,235 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, #endif time = perf_rdtscp(); - for (i = 0; i < num_iter; i++) { - job = IMB_GET_NEXT_JOB(mb_mgr); - *job = job_template; + /* test burst api */ + if (test_api == TEST_API_BURST) { + uint32_t num_jobs = num_iter; + IMB_JOB *jobs[IMB_MAX_BURST_SIZE] = {NULL}; - set_job_fields(job, p_buffer, p_keys, i, index); + while (num_jobs && timebox_on) { + uint32_t n = (num_jobs / burst_size) ? + burst_size : num_jobs; - index = get_next_index(index); + while (IMB_GET_NEXT_BURST(mb_mgr, n, jobs) < n) + IMB_FLUSH_BURST(mb_mgr, n, jobs); + + /* set all job params */ + for (i = 0; i < n; i++) { + IMB_JOB *job = jobs[i]; + *job = job_template; + + if (segment_size != 0) + set_sgl_job_fields(job, p_buffer, + p_keys, i, + index, sgl[i], + &gcm_ctx[i], + &cp_ctx[i]); + else + set_job_fields(job, p_buffer, p_keys, + i, index); + + index = get_next_index(index); + + } + /* submit burst */ #ifdef DEBUG - job = IMB_SUBMIT_JOB(mb_mgr); + jobs_done += IMB_SUBMIT_BURST(mb_mgr, n, jobs); + if (jobs_done == 0) { + const int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", + err, imb_get_strerror(err)); + } + } #else - job = IMB_SUBMIT_JOB_NOCHECK(mb_mgr); + jobs_done += + IMB_SUBMIT_BURST_NOCHECK(mb_mgr, n, jobs); #endif - while (job) { + num_jobs -= n; + } + jobs_done += + IMB_FLUSH_BURST(mb_mgr, IMB_MAX_BURST_SIZE, jobs); + + /* test cipher-only burst api */ + } else if (test_api == TEST_API_CIPHER_BURST) { + IMB_JOB *jt = &job_template; + uint32_t num_jobs = num_iter; + uint32_t list_idx; + + while (num_jobs && timebox_on) { + uint32_t n_jobs = + (num_jobs / burst_size) ? burst_size : num_jobs; + + /* set all job params */ + for (i = 0; i < n_jobs; i++) { + job = &jobs[i]; + + /* If IMIX testing is being done, set the buffer + * size to cipher going through the + * list of sizes precalculated */ + if (imix_list_count != 0) { + list_idx = i & (JOB_SIZE_IMIX_LIST - 1); + job->msg_len_to_cipher_in_bytes = + cipher_size_list[list_idx]; + } else + job->msg_len_to_cipher_in_bytes = + jt->msg_len_to_cipher_in_bytes; + + job->src = get_src_buffer(index, p_buffer); + job->dst = get_dst_buffer(index, p_buffer); + job->enc_keys = job->dec_keys = + (const uint32_t *) + get_key_pointer(index, p_keys); + job->cipher_start_src_offset_in_bytes = + jt->cipher_start_src_offset_in_bytes; + job->iv = jt->iv; + job->iv_len_in_bytes = jt->iv_len_in_bytes; + + index = get_next_index(index); + } + /* submit cipher-only burst */ #ifdef DEBUG - if (job->status != IMB_STATUS_COMPLETED) { - fprintf(stderr, "failed job, status:%d\n", - job->status); - return 1; + const uint32_t completed_jobs = + IMB_SUBMIT_CIPHER_BURST(mb_mgr, jobs, n_jobs, + jt->cipher_mode, + jt->cipher_direction, + jt->key_len_in_bytes); + + if (completed_jobs != n_jobs) { + const int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_cipher_burst error " + "%d : '%s'\n", err, + imb_get_strerror(err)); + } } +#else + IMB_SUBMIT_CIPHER_BURST_NOCHECK(mb_mgr, jobs, n_jobs, + jt->cipher_mode, + jt->cipher_direction, + jt->key_len_in_bytes); #endif - job = IMB_GET_COMPLETED_JOB(mb_mgr); + num_jobs -= n_jobs; } - } - - while ((job = IMB_FLUSH_JOB(mb_mgr))) { + jobs_done = num_iter - num_jobs; + + /* test hash-only burst api */ + } else if (test_api == TEST_API_HASH_BURST) { + IMB_JOB *jt = &job_template; + uint32_t num_jobs = num_iter; + uint32_t list_idx; + + while (num_jobs && timebox_on) { + uint32_t n_jobs = + (num_jobs / burst_size) ? burst_size : num_jobs; + + /* set all job params */ + for (i = 0; i < n_jobs; i++) { + job = &jobs[i]; + + /* If IMIX testing is being done, set the buffer + * size to cipher going through the + * list of sizes precalculated */ + if (imix_list_count != 0) { + list_idx = i & (JOB_SIZE_IMIX_LIST - 1); + job->msg_len_to_hash_in_bytes = + hash_size_list[list_idx]; + } else + job->msg_len_to_hash_in_bytes = + jt->msg_len_to_hash_in_bytes; + + job->src = get_src_buffer(index, p_buffer); + job->hash_start_src_offset_in_bytes = + jt->hash_start_src_offset_in_bytes; + job->auth_tag_output_len_in_bytes = + jt->auth_tag_output_len_in_bytes; + job->u.HMAC._hashed_auth_key_xor_ipad = + jt->u.HMAC._hashed_auth_key_xor_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = + jt->u.HMAC._hashed_auth_key_xor_opad; + job->auth_tag_output = jt->auth_tag_output; + + index = get_next_index(index); + } + /* submit hash-only burst */ #ifdef DEBUG - if (job->status != IMB_STATUS_COMPLETED) { - const int errc = imb_get_errno(mb_mgr); + const uint32_t completed_jobs = + IMB_SUBMIT_HASH_BURST(mb_mgr, jobs, n_jobs, + jt->hash_alg); + + if (completed_jobs != n_jobs) { + const int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_hash_burst error " + "%d : '%s'\n", err, + imb_get_strerror(err)); + } + } +#else + IMB_SUBMIT_HASH_BURST_NOCHECK(mb_mgr, jobs, n_jobs, + jt->hash_alg); +#endif + num_jobs -= n_jobs; + } + jobs_done = num_iter - num_jobs; + + } else { /* test job api */ + for (i = 0; (i < num_iter) && timebox_on; i++) { + job = IMB_GET_NEXT_JOB(mb_mgr); + *job = job_template; + + if (segment_size != 0) + set_sgl_job_fields(job, p_buffer, p_keys, + i, index, + sgl[0], &gcm_ctx[0], + &cp_ctx[0]); + else + set_job_fields(job, p_buffer, p_keys, i, index); - fprintf(stderr, - "failed job, status:%d, error code:%d, %s\n", - job->status, errc, imb_get_strerror(errc)); - return 1; + index = get_next_index(index); +#ifdef DEBUG + job = IMB_SUBMIT_JOB(mb_mgr); +#else + job = IMB_SUBMIT_JOB_NOCHECK(mb_mgr); +#endif + while (job) { +#ifdef DEBUG + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, + "failed job, status:%d\n", + job->status); + goto exit; + } +#endif + job = IMB_GET_COMPLETED_JOB(mb_mgr); + } } + jobs_done = i; + + while ((job = IMB_FLUSH_JOB(mb_mgr))) { +#ifdef DEBUG + if (job->status != IMB_STATUS_COMPLETED) { + const int errc = imb_get_errno(mb_mgr); + + fprintf(stderr, + "failed job, status:%d, " + "error code:%d, %s\n", job->status, + errc, imb_get_strerror(errc)); + goto exit; + } #else - (void)job; + (void)job; #endif + } + + } /* if test_api */ + + for (i = 0; i < MAX_BURST_SIZE; i++) { + free(sgl[i]); + sgl[i] = NULL; } #ifndef _WIN32 @@ -1930,10 +2368,41 @@ do_test(IMB_MGR *mb_mgr, struct params_s *params, #endif time = perf_rdtscp() - time; + if (use_timebox) { +#ifdef LINUX + /* disarm the timer */ + struct itimerval it_disarm; + + memset(&it_disarm, 0, sizeof(it_disarm)); + + if (setitimer(ITIMER_REAL, &it_disarm, NULL)) { + perror("setitimer(disarm)"); + goto exit; + } +#else /* _WIN32 */ + /* delete all timeboxes in the timer queue */ + if (!DeleteTimerQueue(hTimeboxQueue)) + fprintf(stderr, "DeleteTimerQueue() error %u\n", + (unsigned) GetLastError()); +#endif + + /* calculate return value */ + if (jobs_done == 0) + return 0; + + return time / jobs_done; + } + if (!num_iter) return time; return time / num_iter; + +exit: + for (i = 0; i < MAX_BURST_SIZE; i++) + free(sgl[i]); + + exit(EXIT_FAILURE); } static void @@ -1950,17 +2419,48 @@ run_gcm_sgl(aes_gcm_init_t init, aes_gcm_enc_dec_update_t update, uint8_t auth_tag[12]; DECLARE_ALIGNED(uint8_t iv[16], 16); - for (i = 0; i < num_iter; i++) { - uint8_t *pb = get_dst_buffer(index, p_buffer); + /* SGL */ + if (segment_size != 0) { + for (i = 0; i < num_iter; i++) { + uint8_t *pb = get_dst_buffer(index, p_buffer); + + if (imix_list_count != 0) + buf_size = get_next_size(i); + + const uint32_t num_segs = buf_size / segment_size; + const uint32_t final_seg_sz = buf_size % segment_size; + uint32_t j; + + init(gdata_key, gdata_ctx, iv, aad, aad_size); + for (j = 0; j < num_segs; j++) + update(gdata_key, gdata_ctx, + &pb[j*segment_size], + &pb[j*segment_size], + segment_size); + if (final_seg_sz != 0) + update(gdata_key, gdata_ctx, + &pb[j*segment_size], + &pb[j*segment_size], + final_seg_sz); + finalize(gdata_key, gdata_ctx, auth_tag, + sizeof(auth_tag)); + + index = get_next_index(index); + } + } else { + for (i = 0; i < num_iter; i++) { + uint8_t *pb = get_dst_buffer(index, p_buffer); - if (imix_list_count != 0) - buf_size = get_next_size(i); + if (imix_list_count != 0) + buf_size = get_next_size(i); - init(gdata_key, gdata_ctx, iv, aad, aad_size); - update(gdata_key, gdata_ctx, pb, pb, buf_size); - finalize(gdata_key, gdata_ctx, auth_tag, sizeof(auth_tag)); + init(gdata_key, gdata_ctx, iv, aad, aad_size); + update(gdata_key, gdata_ctx, pb, pb, buf_size); + finalize(gdata_key, gdata_ctx, auth_tag, + sizeof(auth_tag)); - index = get_next_index(index); + index = get_next_index(index); + } } } @@ -2003,6 +2503,10 @@ do_test_gcm(struct params_s *params, uint8_t *aad = NULL; uint64_t time = 0; + /* Force SGL API if segment size is not 0 */ + if (segment_size != 0) + use_gcm_sgl_api = 1; + key = (uint8_t *) malloc(sizeof(uint8_t) * params->aes_key_size); if (!key) { fprintf(stderr, "Could not malloc key\n"); @@ -2165,6 +2669,189 @@ do_test_gcm(struct params_s *params, return time / num_iter; } +/* Performs test using CHACHA20-POLY1305 direct API */ +static uint64_t +do_test_chacha_poly(struct params_s *params, + const uint32_t num_iter, IMB_MGR *mb_mgr, + uint8_t *p_buffer, imb_uint128_t *p_keys) +{ + uint8_t key[32]; + uint8_t auth_tag[16]; + DECLARE_ALIGNED(uint8_t iv[16], 16); + uint8_t *aad = NULL; + uint64_t time = 0; +#ifdef __x86_64__ + uint32_t aux; +#endif + struct chacha20_poly1305_context_data chacha_ctx; + static uint32_t index = 0; + uint32_t num_segs; + uint32_t final_seg_sz; + unsigned i, j; + + aad = (uint8_t *) malloc(sizeof(uint8_t) * params->aad_size); + if (!aad) { + fprintf(stderr, "Could not malloc AAD\n"); + free_mem(&p_buffer, &p_keys); + exit(EXIT_FAILURE); + } + + if (segment_size != 0) { + num_segs = params->size_aes / segment_size; + final_seg_sz = params->size_aes % segment_size; + } else { + num_segs = 0; + final_seg_sz = params->size_aes; + } + +#ifndef _WIN32 + if (use_unhalted_cycles) + time = read_cycles(params->core); + else +#endif + +#ifdef __aarch64__ + time = rdtscp(); +#else + time = __rdtscp(&aux); +#endif + for (i = 0; i < num_iter; i++) { + uint8_t *pb = get_dst_buffer(index, p_buffer); + + if (imix_list_count != 0) { + uint32_t buf_size = get_next_size(i); + + if (segment_size != 0) { + num_segs = buf_size / segment_size; + final_seg_sz = buf_size % segment_size; + } else { + num_segs = 0; + final_seg_sz = buf_size; + } + } + + IMB_CHACHA20_POLY1305_INIT(mb_mgr, key, &chacha_ctx, iv, + aad, params->aad_size); + + if (params->cipher_dir == IMB_DIR_ENCRYPT) { + for (j = 0; j < num_segs; j++) + IMB_CHACHA20_POLY1305_ENC_UPDATE(mb_mgr, key, + &chacha_ctx, + &pb[j*segment_size], + &pb[j*segment_size], + segment_size); + if (final_seg_sz != 0) + IMB_CHACHA20_POLY1305_ENC_UPDATE(mb_mgr, key, + &chacha_ctx, + &pb[j*segment_size], + &pb[j*segment_size], + final_seg_sz); + IMB_CHACHA20_POLY1305_ENC_FINALIZE(mb_mgr, + &chacha_ctx, + auth_tag, + sizeof(auth_tag)); + } else { /* IMB_DIR_DECRYPT */ + for (j = 0; j < num_segs; j++) + IMB_CHACHA20_POLY1305_ENC_UPDATE(mb_mgr, key, + &chacha_ctx, + &pb[j*segment_size], + &pb[j*segment_size], + segment_size); + if (final_seg_sz != 0) + IMB_CHACHA20_POLY1305_DEC_UPDATE(mb_mgr, key, + &chacha_ctx, + &pb[j*segment_size], + &pb[j*segment_size], + final_seg_sz); + IMB_CHACHA20_POLY1305_DEC_FINALIZE(mb_mgr, + &chacha_ctx, + auth_tag, + sizeof(auth_tag)); + } + index = get_next_index(index); + } +#ifndef _WIN32 + if (use_unhalted_cycles) + time = (read_cycles(params->core) - + rd_cycles_cost) - time; + else +#endif +#ifdef __aarch64__ + time = rdtscp() - time; +#else + time = __rdtscp(&aux) - time; +#endif + free(aad); + + if (!num_iter) + return time; + + return time / num_iter; +} + +/* Performs test using GCM */ +static uint64_t +do_test_ghash(struct params_s *params, + const uint32_t num_iter, IMB_MGR *mb_mgr, + uint8_t *p_buffer, imb_uint128_t *p_keys) +{ + static DECLARE_ALIGNED(struct gcm_key_data gdata_key, 512); + uint64_t time = 0; +#ifdef __x86_64__ + uint32_t aux; +#endif + uint32_t i, index = 0; + uint8_t auth_tag[16]; + + IMB_GHASH_PRE(mb_mgr, p_keys, &gdata_key); + +#ifndef _WIN32 + if (use_unhalted_cycles) + time = read_cycles(params->core); + else +#endif +#ifdef __aarch64__ + time = rdtscp(); +#else + time = __rdtscp(&aux); +#endif + if (imix_list_count != 0) { + for (i = 0; i < num_iter; i++) { + uint8_t *pb = get_dst_buffer(index, p_buffer); + const uint32_t buf_size = get_next_size(i); + + IMB_GHASH(mb_mgr, &gdata_key, pb, buf_size, + auth_tag, sizeof(auth_tag)); + index = get_next_index(index); + } + } else { + for (i = 0; i < num_iter; i++) { + uint8_t *pb = get_dst_buffer(index, p_buffer); + const uint32_t buf_size = params->size_aes; + + IMB_GHASH(mb_mgr, &gdata_key, pb, buf_size, + auth_tag, sizeof(auth_tag)); + index = get_next_index(index); + } + } + +#ifndef _WIN32 + if (use_unhalted_cycles) + time = (read_cycles(params->core) - + rd_cycles_cost) - time; + else +#endif +#ifdef __aarch64__ + time = rdtscp() - time; +#else + time = __rdtscp(&aux) - time; +#endif + if (!num_iter) + return time; + + return time / num_iter; +} + /* Computes mean of set of times after dropping bottom and top quarters */ static uint64_t mean_median(uint64_t *array, uint32_t size, @@ -2254,13 +2941,31 @@ process_variant(IMB_MGR *mgr, const enum arch_type_e arch, num_iter = iter_scale; params->size_aes = size_aes; - if (params->cipher_mode == TEST_GCM && (!use_gcm_job_api)) { + if (params->cipher_mode == TEST_GCM && (!use_job_api)) { if (job_iter == 0) *times = do_test_gcm(params, 2 * num_iter, mgr, p_buffer, p_keys); else *times = do_test_gcm(params, job_iter, mgr, p_buffer, p_keys); + } else if (params->cipher_mode == TEST_AEAD_CHACHA20 && + (!use_job_api)) { + if (job_iter == 0) + *times = do_test_chacha_poly(params, + 2 * num_iter, mgr, + p_buffer, p_keys); + else + *times = do_test_chacha_poly(params, + job_iter, mgr, + p_buffer, p_keys); + } else if (params->hash_alg == TEST_AUTH_GHASH && + (!use_job_api)) { + if (job_iter == 0) + *times = do_test_ghash(params, 2 * num_iter, + mgr, p_buffer, p_keys); + else + *times = do_test_ghash(params, job_iter, mgr, + p_buffer, p_keys); } else { if (job_iter == 0) *times = do_test(mgr, params, num_iter, @@ -2303,13 +3008,19 @@ print_times(struct variant_s *variant_list, struct params_s *params, "ENCRYPT", "DECRYPT" }; const char *h_alg_names[TEST_NUM_HASH_TESTS - 1] = { - "SHA1", "SHA_224", "SHA_256", "SHA_384", "SHA_512", - "XCBC", "MD5", "CMAC", "CMAC_BITLEN", "CMAC_256", + "SHA1_HMAC", "SHA_224_HMAC", "SHA_256_HMAC", + "SHA_384_HMAC", "SHA_512_HMAC", "XCBC", + "MD5", "CMAC", "SHA1", "SHA_224", "SHA_256", + "SHA_384", "SHA_512", "CMAC_BITLEN", "CMAC_256", "NULL_HASH", "CRC32", "GCM", "CUSTOM", "CCM", "BIP-CRC32", "ZUC_EIA3_BITLEN", "SNOW3G_UIA2_BITLEN", "KASUMI_UIA1", "GMAC-128", "GMAC-192", "GMAC-256", "POLY1305", "POLY1305_AEAD", "ZUC256_EIA3", - "SNOW_V_AEAD" + "SNOW_V_AEAD", "CRC32_ETH_FCS", "CRC32_SCTP", + "CRC32_WIMAX_DATA", "CRC24_LTE_A", "CR24_LTE_B", + "CR16_X25", "CRC16_FP_DATA", "CRC11_FP_HEADER", + "CRC10_IUUP_DATA", "CRC8_WIMAX_HCS", "CRC7_FP_HEADER", + "CRC6_IUUP_HEADER", "GHASH" }; struct params_s par; @@ -2339,7 +3050,7 @@ print_times(struct variant_s *variant_list, struct params_s *params, for (col = 0; col < total_variants; col++) { par = variant_list[col].params; - const uint8_t h_alg = par.hash_alg - TEST_SHA1; + const uint8_t h_alg = par.hash_alg - TEST_SHA1_HMAC; printf("\t%s", h_alg_names[h_alg]); } @@ -2529,6 +3240,12 @@ run_tests(void *arg) goto exit_failure; } + if (imb_get_errno(p_mgr) != 0) { + printf("Error initializing MB_MGR! %s\n", + imb_get_strerror(imb_get_errno(p_mgr))); + goto exit_failure; + } + process_variant(p_mgr, arch, ¶ms, variant_ptr, run, buf, keys); @@ -2595,10 +3312,12 @@ static void usage(void) "-o val: Use for the SHA size increment, default is 24\n" "--shani-on: use SHA extensions, default: auto-detect\n" "--shani-off: don't use SHA extensions\n" - "--gcm-job-api: use JOB API for GCM perf tests" - " (raw GCM API is default)\n" + "--gfni-on: use Galois Field extensions, default: auto-detect\n" + "--gfni-off: don't use Galois Field extensions\n" + "--force-job-api: use JOB API" + " (direct API used for GCM/GHASH/CHACHA20_POLY1305 API by default)\n" "--gcm-sgl-api: use direct SGL API for GCM perf tests" - " (raw GCM API is default)\n" + " (direct GCM API is default)\n" "--threads num: for the number of threads to run" " Max: %d\n" "--cores mask: CPU's to run threads\n" @@ -2614,6 +3333,7 @@ static void usage(void) " - range: test multiple sizes with following format" " min:step:max (e.g. 16:16:256)\n" " (-o still applies for MAC)\n" + "--segment-size: size of segment to test SGL (default: 0)\n" "--imix: set numbers that establish occurrence proportions" " between packet sizes.\n" " It requires a list of sizes through --job-size.\n" @@ -2629,7 +3349,13 @@ static void usage(void) " (Use when turbo enabled)\n" "--no-tsc-detect: don't check TSC to core scaling\n" "--tag-size: modify tag size\n" - "--plot: Adjust text output for direct use with plot output\n", + "--plot: Adjust text output for direct use with plot output\n" + "--no-time-box: disables 100ms watchdog timer on " + "an algorithm@packet-size performance test\n" + "--burst-api: use burst API for perf tests\n" + "--cipher-burst-api: use cipher-only burst API for perf tests\n" + "--hash-burst-api: use hash-only burst API for perf tests\n" + "--burst-size: number of jobs to submit per burst\n", MAX_NUM_THREADS + 1); } @@ -3068,8 +3794,12 @@ int main(int argc, char *argv[]) flags &= (~IMB_FLAG_SHANI_OFF); } else if (strcmp(argv[i], "--shani-off") == 0) { flags |= IMB_FLAG_SHANI_OFF; - } else if (strcmp(argv[i], "--gcm-job-api") == 0) { - use_gcm_job_api = 1; + } else if (strcmp(argv[i], "--gfni-on") == 0) { + flags &= (~IMB_FLAG_GFNI_OFF); + } else if (strcmp(argv[i], "--gfni-off") == 0) { + flags |= IMB_FLAG_GFNI_OFF; + } else if (strcmp(argv[i], "--force-job-api") == 0) { + use_job_api = 1; } else if (strcmp(argv[i], "--gcm-sgl-api") == 0) { use_gcm_sgl_api = 1; } else if (strcmp(argv[i], "--quick") == 0) { @@ -3212,11 +3942,68 @@ int main(int argc, char *argv[]) } else if (strcmp(argv[i], "--tag-size") == 0) { i = get_next_num_arg((const char * const *)argv, i, argc, &tag_size, sizeof(tag_size)); + } else if (strcmp(argv[i], "--burst-api") == 0) { + test_api = TEST_API_BURST; + } else if (strcmp(argv[i], "--cipher-burst-api") == 0) { + test_api = TEST_API_CIPHER_BURST; + } else if (strcmp(argv[i], "--hash-burst-api") == 0) { + test_api = TEST_API_HASH_BURST; + } else if (strcmp(argv[i], "--burst-size") == 0) { + i = get_next_num_arg((const char * const *)argv, i, + argc, &burst_size, + sizeof(burst_size)); + if (burst_size > (MAX_BURST_SIZE)) { + fprintf(stderr, "Burst size cannot be " + "more than %d\n", MAX_BURST_SIZE); + return EXIT_FAILURE; + } + } else if (strcmp(argv[i], "--segment-size") == 0) { + i = get_next_num_arg((const char * const *)argv, i, + argc, &segment_size, + sizeof(segment_size)); + if (segment_size > (JOB_SIZE_TOP)) { + fprintf(stderr, "Segment size cannot be " + "more than %d\n", JOB_SIZE_TOP); + return EXIT_FAILURE; + } + } else if (strcmp(argv[i], "--no-time-box") == 0) { + use_timebox = 0; } else { usage(); return EXIT_FAILURE; } + if (burst_size != 0 && test_api == TEST_API_JOB) { + fprintf(stderr, "--burst-size can only be used with " + "--burst-api, --cipher-burst-api or " + "--hash-burst-api options\n"); + return EXIT_FAILURE; + } + + if (test_api != TEST_API_JOB && burst_size == 0) + burst_size = DEFAULT_BURST_SIZE; + + /* currently only AES-CBC & CTR supported by cipher-only burst API */ + if (test_api == TEST_API_CIPHER_BURST && + (custom_job_params.cipher_mode != TEST_CBC && + custom_job_params.cipher_mode != TEST_CNTR)) { + fprintf(stderr, "Unsupported cipher-only burst " + "API algorithm selected\n"); + return EXIT_FAILURE; + } + + /* currently only HMAC-SHAx algs supported by hash-only burst API */ + if (test_api == TEST_API_HASH_BURST && + ((custom_job_params.hash_alg != TEST_SHA1_HMAC) && + (custom_job_params.hash_alg != TEST_SHA_224_HMAC) && + (custom_job_params.hash_alg != TEST_SHA_256_HMAC) && + (custom_job_params.hash_alg != TEST_SHA_384_HMAC) && + (custom_job_params.hash_alg != TEST_SHA_512_HMAC))) { + fprintf(stderr, + "Unsupported hash-only burst API algorithm selected\n"); + return EXIT_FAILURE; + } + if (aead_algo_set == 0 && cipher_algo_set == 0 && hash_algo_set == 0) { fprintf(stderr, "No cipher, hash or " @@ -3224,6 +4011,7 @@ int main(int argc, char *argv[]) usage(); return EXIT_FAILURE; } + if (aead_algo_set && (cipher_algo_set || hash_algo_set)) { fprintf(stderr, "AEAD algorithm cannot be used " "combined with another cipher/hash " @@ -3326,6 +4114,15 @@ int main(int argc, char *argv[]) return EXIT_FAILURE; } + /* Check timebox option vs number of threads bigger than 1 */ + if (use_timebox && num_t > 1) { + fprintf(stderr, + "Time-box feature, enabled by default, doesn't work " + "safely with number of threads bigger than one! Please " + "use '--no-time-box' option to disable\n"); + return EXIT_FAILURE; + } + /* if cycles selected then init MSR module */ if (use_unhalted_cycles) { if (core_mask == 0) { @@ -3367,6 +4164,16 @@ int main(int argc, char *argv[]) "Library version: %s\n", sha_size_incr, IMB_VERSION_STR, imb_get_version_str()); + if (!use_job_api) + fprintf(stderr, "API type: direct\n"); + else { + fprintf(stderr, "API type: %s", str_api_list[test_api]); + if (test_api != TEST_API_JOB) + fprintf(stderr, " (burst size = %u)\n", burst_size); + else + fprintf(stderr, "\n"); + } + if (custom_job_params.cipher_mode == TEST_GCM) fprintf(stderr, "GCM AAD = %"PRIu64"\n", gcm_aad_size); @@ -3402,6 +4209,16 @@ int main(int argc, char *argv[]) srand(ITER_SCALE_LONG + ITER_SCALE_SHORT + ITER_SCALE_SMOKE); +#ifdef LINUX + if (use_timebox) { + /* set up timebox callback function */ + if (signal(SIGALRM, timebox_callback) == SIG_ERR) { + perror("signal(SIGALRM)"); + return EXIT_FAILURE; + } + } +#endif + if (num_t > 1) { uint32_t n; diff --git a/perf/ipsec_perf_tool.py b/perf/ipsec_perf_tool.py index 52a18349372590b1bc4ff11d25d9dd99f9b12a7c..34d4f60d10552798fad298e71083f4e25a1910f7 100755 --- a/perf/ipsec_perf_tool.py +++ b/perf/ipsec_perf_tool.py @@ -60,9 +60,9 @@ class Variant: """Class to setup and run test case variant""" def __init__(self, idx=None, arch=None, direction='encrypt', cipher_alg=None, hash_alg=None, aead_alg=None, sizes=None, offset=None, - cold_cache=False, shani_off=False, gcm_job_api=False, + cold_cache=False, shani_off=False, force_job_api=False, unhalted_cycles=False, quick_test=False, smoke_test=False, - imix=None, aad_size=None, job_iter=None): + imix=None, aad_size=None, job_iter=None, no_time_box=False): """Build perf app command line""" global PERF_APP @@ -80,13 +80,14 @@ class Variant: self.core = None self.cold_cache = cold_cache self.shani_off = shani_off - self.gcm_job_api = gcm_job_api + self.force_job_api = force_job_api self.unhalted_cycles = unhalted_cycles self.quick_test = quick_test self.smoke_test = smoke_test self.imix = imix self.aad_size = aad_size self.job_iter = job_iter + self.no_time_box = no_time_box if self.arch is not None: self.cmd += ' --arch {}'.format(self.arch) @@ -127,8 +128,8 @@ class Variant: if self.shani_off is True: self.cmd += ' --shani-off' - if self.gcm_job_api is True: - self.cmd += ' --gcm-job-api' + if self.force_job_api is True: + self.cmd += ' --force-job-api' if self.unhalted_cycles is True: self.cmd += ' --unhalted-cycles' @@ -139,6 +140,9 @@ class Variant: if self.smoke_test is True: self.cmd += ' --smoke' + if self.no_time_box is True: + self.cmd += ' --no-time-box' + if self.imix is not None: self.cmd += ' --imix {}'.format(self.imix) @@ -389,8 +393,8 @@ def parse_args(): parser.add_argument("--arch-best", action='store_true', help="detect available architectures and run only on the best one") parser.add_argument("--shani-off", action='store_true', help="don't use SHA extensions") - parser.add_argument("--gcm-job-api", action='store_true', - help="use JOB API for GCM perf tests (raw GCM API is default)") + parser.add_argument("--force-job-api", action='store_true', + help="use JOB API for algorithms supported through direct API (i.e. AES-GCM, chacha20-poly1305)") parser.add_argument("--unhalted-cycles", action='store_true', help=textwrap.dedent('''\ measure using unhalted cycles (requires root). @@ -415,7 +419,8 @@ def parse_args(): help="size of AAD for AEAD algorithms") parser.add_argument("--job-iter", default=None, type=int, help="number of tests iterations for each job size") - + parser.add_argument("--no-time-box", default=False, action='store_true', + help="disables time box feature for single packet size test duration (100ms)") args = parser.parse_args() @@ -452,9 +457,9 @@ def parse_args(): return args.arch, cores, directions, args.offset, \ alg_types, args.job_size, args.cold_cache, args.arch_best, \ - args.shani_off, args.gcm_job_api, args.unhalted_cycles, \ + args.shani_off, args.force_job_api, args.unhalted_cycles, \ args.quick, args.smoke, args.imix, \ - args.aad_size, args.job_iter + args.aad_size, args.job_iter, args.no_time_box def run_test(core=None): @@ -523,8 +528,8 @@ def main(): # parse command line args archs, cores, directions, offset, alg_types, sizes, cold_cache, arch_best, \ - shani_off, gcm_job_api, unhalted_cycles, quick_test, smoke_test, \ - imix, aad_size, job_iter = parse_args() + shani_off, force_job_api, unhalted_cycles, quick_test, smoke_test, \ + imix, aad_size, job_iter, no_time_box = parse_args() # validate requested archs are supported if arch_best is True: @@ -555,7 +560,7 @@ def main(): print(' Cores: {}'.format(cores), file=sys.stderr) print(' Cache: {}'.format("cold" if cold_cache else "warm"), file=sys.stderr) print(' SHANI: {}'.format("off" if shani_off else "on"), file=sys.stderr) - print(' GCM API: {}'.format("job" if gcm_job_api else "direct"), file=sys.stderr) + print(' API: {}'.format("job" if force_job_api else "direct"), file=sys.stderr) print(' Measuring using {}'.format("unhalted cycles" if unhalted_cycles \ else "rdtsc"), file=sys.stderr) if quick_test is True or smoke_test is True: @@ -573,9 +578,9 @@ def main(): TODO_Q.put(Variant(idx=TOTAL_VARIANTS, arch=arch, direction=direction, offset=offset, sizes=sizes, cipher_alg=cipher_alg, cold_cache=cold_cache, shani_off=shani_off, - gcm_job_api=gcm_job_api, unhalted_cycles=unhalted_cycles, + force_job_api=force_job_api, unhalted_cycles=unhalted_cycles, quick_test=quick_test, smoke_test=smoke_test, imix=imix, - aad_size=aad_size, job_iter=job_iter)) + aad_size=aad_size, job_iter=job_iter, no_time_box=no_time_box)) TOTAL_VARIANTS += 1 if 'hash-only' in alg_types: @@ -584,9 +589,9 @@ def main(): TODO_Q.put(Variant(idx=TOTAL_VARIANTS, arch=arch, direction=None, offset=offset, sizes=sizes, hash_alg=hash_alg, cold_cache=cold_cache, shani_off=shani_off, - gcm_job_api=gcm_job_api, unhalted_cycles=unhalted_cycles, + force_job_api=force_job_api, unhalted_cycles=unhalted_cycles, quick_test=quick_test, smoke_test=smoke_test, imix=imix, - aad_size=aad_size, job_iter=job_iter)) + aad_size=aad_size, job_iter=job_iter, no_time_box=no_time_box)) TOTAL_VARIANTS += 1 if 'aead-only' in alg_types: @@ -595,9 +600,9 @@ def main(): TODO_Q.put(Variant(idx=TOTAL_VARIANTS, arch=arch, direction=direction, offset=offset, sizes=sizes, aead_alg=aead_alg, cold_cache=cold_cache, shani_off=shani_off, - gcm_job_api=gcm_job_api, unhalted_cycles=unhalted_cycles, + force_job_api=force_job_api, unhalted_cycles=unhalted_cycles, quick_test=quick_test, smoke_test=smoke_test, imix=imix, - aad_size=aad_size, job_iter=job_iter)) + aad_size=aad_size, job_iter=job_iter, no_time_box=no_time_box)) TOTAL_VARIANTS += 1 if 'cipher-hash-all' in alg_types: @@ -608,10 +613,10 @@ def main(): TODO_Q.put(Variant(idx=TOTAL_VARIANTS, arch=arch, direction=direction, offset=offset, sizes=sizes, cipher_alg=cipher_alg, hash_alg=hash_alg, cold_cache=cold_cache, - shani_off=shani_off, gcm_job_api=gcm_job_api, + shani_off=shani_off, force_job_api=force_job_api, unhalted_cycles=unhalted_cycles, quick_test=quick_test, smoke_test=smoke_test, imix=imix, aad_size=aad_size, - job_iter=job_iter)) + job_iter=job_iter, no_time_box=no_time_box)) TOTAL_VARIANTS += 1 # take starting timestamp diff --git a/perf/win_x64.mak b/perf/win_x64.mak index 751b80a5bf88552e54d527eb011a5226de7b1706..06165a383b10a48d9738e905fe966c7863f73019 100644 --- a/perf/win_x64.mak +++ b/perf/win_x64.mak @@ -62,13 +62,7 @@ DLFLAGS = CC = cl -# check for CET support -!if ([$(CC) /? 2>&1 | findstr /C:"guard:cf" > nul] == 0) -DCFLAGS = $(DCFLAGS) /guard:cf -DLFLAGS = $(DLFLAGS) /CETCOMPAT /GUARD:CF /DYNAMICBASE -!endif - -# _CRT_SECURE_NO_WARNINGS disables warning C4996 about unsecure strtok() being used +# _CRT_SECURE_NO_WARNINGS disables warning C4996 about insecure strtok() being used CFLAGS = /nologo /DNO_COMPAT_IMB_API_053 /D_CRT_SECURE_NO_WARNINGS $(DCFLAGS) /Y- /W3 /WX- /Gm- /fp:precise /EHsc $(EXTRA_CFLAGS) $(INCDIR) LNK = link diff --git a/test/Makefile b/test/Makefile index 3d804c0cd43508328a0f0b92213d7bec7ba9bba9..279e617593dca20d17b7930cc34e361af1236f94 100644 --- a/test/Makefile +++ b/test/Makefile @@ -46,7 +46,12 @@ CFLAGS = -MMD -D_GNU_SOURCE -DNO_COMPAT_IMB_API_053 \ -Wformat -Wformat-security \ -Wunreachable-code -Wmissing-noreturn -Wsign-compare -Wno-endif-labels \ -Wstrict-prototypes -Wmissing-prototypes -Wold-style-definition \ - -fno-strict-overflow -fno-delete-null-pointer-checks -fwrapv + -fno-delete-null-pointer-checks -fwrapv + +# -fno-strict-overflow is not supported by clang +ifneq ($(CC),clang) +CFLAGS += -fno-strict-overflow +endif # if "-z ibt" is supported then assume "-z shstk, -z cet-report=error" are also supported # "-fcf-protection" needs to be checked separately @@ -58,9 +63,11 @@ CET_LDFLAGS=-r -z ibt -z shstk endif # MINGW endif # x86_64 +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) CFLAGS += -fcf-protection=full endif +endif YASM_FLAGS := -f x64 -f elf64 -X gnu -g dwarf2 -DLINUX -D__linux__ ifeq ($(MINGW),0) @@ -76,9 +83,11 @@ else LDFLAGS = -fPIE endif +ifeq ($(ARCH),x86_64) ifeq ($(CC_HAS_CET),1) LDFLAGS += -fcf-protection=full -Wl,-z,ibt -Wl,-z,shstk -Wl,-z,cet-report=error endif +endif LDLIBS = -lIPSec_MB ifeq ("$(shell test -r $(INSTPATH) && echo -n yes)","yes") @@ -189,11 +198,15 @@ else endif $(FUZZ_APP): $(FUZZ_SOURCES) +ifeq ($(MINGW),0) ifneq (, $(shell which clang)) clang $(FUZZ_CFLAGS) $(FUZZ_SOURCES) $(LDLIBS) -o $@ else @echo "Clang is not installed. $(FUZZ_APP) is not built" endif +else + @echo "Fuzzing not supported on MinGW build. $(FUZZ_APP) is not built" +endif .PHONY: clean diff --git a/test/acvp_app_main.c b/test/acvp_app_main.c index dbd4329267b2e06b6175d480ffc4d25dbe97f8f8..8f8ed0d0b140f09cbebab83c7574821667184e70 100644 --- a/test/acvp_app_main.c +++ b/test/acvp_app_main.c @@ -46,149 +46,1239 @@ IMB_MGR *mb_mgr = NULL; int verbose = 0; int direct_api = 0; /* job API by default */ +static int aes_cbc_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_SYM_CIPHER_TC *tc; + IMB_JOB *job = NULL; + DECLARE_ALIGNED(uint32_t enc_keys[15*4], 16); + DECLARE_ALIGNED(uint32_t dec_keys[15*4], 16); + static uint8_t next_iv[16]; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.symmetric; + + if (tc->direction != ACVP_SYM_CIPH_DIR_ENCRYPT && + tc->direction != ACVP_SYM_CIPH_DIR_DECRYPT) { + fprintf(stderr, "Unsupported direction\n"); + return EXIT_FAILURE; + } + + switch (tc->key_len) { + case 128: + IMB_AES_KEYEXP_128(mb_mgr, tc->key, enc_keys, dec_keys); + break; + case 192: + IMB_AES_KEYEXP_192(mb_mgr, tc->key, enc_keys, dec_keys); + break; + case 256: + IMB_AES_KEYEXP_256(mb_mgr, tc->key, enc_keys, dec_keys); + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len >> 3; + job->cipher_mode = IMB_CIPHER_CBC; + job->hash_alg = IMB_AUTH_NULL; + /* + * If Monte-carlo test, use the IV from the ciphertext of + * the previous iteration + */ + if (tc->test_type == ACVP_SYM_TEST_TYPE_MCT && + tc->mct_index != 0) + job->iv = next_iv; + else + job->iv = tc->iv; + + job->iv_len_in_bytes = tc->iv_len; + job->cipher_start_src_offset_in_bytes = 0; + job->enc_keys = enc_keys; + job->dec_keys = dec_keys; + + if (tc->direction == ACVP_SYM_CIPH_DIR_ENCRYPT) { + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->src = tc->pt; + job->dst = tc->ct; + job->msg_len_to_cipher_in_bytes = tc->pt_len; + tc->ct_len = tc->pt_len; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } else /* DECRYPT */ { + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = tc->ct; + job->dst = tc->pt; + job->msg_len_to_cipher_in_bytes = tc->ct_len; + tc->pt_len = tc->ct_len; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + /* + * If Monte-carlo test, copy the ciphertext for + * the IV of the next iteration + */ + if (tc->test_type == ACVP_SYM_TEST_TYPE_MCT) + memcpy(next_iv, tc->ct, 16); + + return EXIT_SUCCESS; +} + static int aes_gcm_handler(ACVP_TEST_CASE *test_case) { - ACVP_SYM_CIPHER_TC *tc; + ACVP_SYM_CIPHER_TC *tc; + IMB_JOB *job = NULL; + aes_gcm_init_var_iv_t gcm_init_var_iv = mb_mgr->gcm128_init_var_iv; + aes_gcm_enc_dec_update_t gcm_update_enc = mb_mgr->gcm128_enc_update; + aes_gcm_enc_dec_finalize_t gcm_finalize_enc = + mb_mgr->gcm128_enc_finalize; + aes_gcm_enc_dec_update_t gcm_update_dec = mb_mgr->gcm128_dec_update; + aes_gcm_enc_dec_finalize_t gcm_finalize_dec = + mb_mgr->gcm128_dec_finalize; + struct gcm_key_data key; + struct gcm_context_data ctx; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.symmetric; + + if (tc->direction != ACVP_SYM_CIPH_DIR_ENCRYPT && + tc->direction != ACVP_SYM_CIPH_DIR_DECRYPT) { + return EXIT_FAILURE; + } + + switch (tc->key_len) { + case 128: + IMB_AES128_GCM_PRE(mb_mgr, tc->key, &key); + break; + case 192: + IMB_AES192_GCM_PRE(mb_mgr, tc->key, &key); + break; + case 256: + IMB_AES256_GCM_PRE(mb_mgr, tc->key, &key); + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + + if (direct_api == 1) { + switch (tc->key_len) { + case 128: + /* Function pointers already set for 128-bit key */ + break; + case 192: + gcm_init_var_iv = mb_mgr->gcm192_init_var_iv; + gcm_update_enc = mb_mgr->gcm192_enc_update; + gcm_finalize_enc = mb_mgr->gcm192_enc_finalize; + gcm_update_dec = mb_mgr->gcm192_dec_update; + gcm_finalize_dec = mb_mgr->gcm192_dec_finalize; + break; + case 256: + gcm_init_var_iv = mb_mgr->gcm256_init_var_iv; + gcm_update_enc = mb_mgr->gcm256_enc_update; + gcm_finalize_enc = mb_mgr->gcm256_enc_finalize; + gcm_update_dec = mb_mgr->gcm256_dec_update; + gcm_finalize_dec = mb_mgr->gcm256_dec_finalize; + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + } else { + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len >> 3; + job->cipher_mode = IMB_CIPHER_GCM; + job->hash_alg = IMB_AUTH_AES_GMAC; + job->u.GCM.aad = tc->aad; + job->u.GCM.aad_len_in_bytes = tc->aad_len; + job->enc_keys = &key; + job->dec_keys = &key; + job->iv = tc->iv; + job->iv_len_in_bytes = tc->iv_len; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->auth_tag_output_len_in_bytes = tc->tag_len; + } + + if (tc->direction == ACVP_SYM_CIPH_DIR_ENCRYPT) { + if (direct_api == 1) { + gcm_init_var_iv(&key, &ctx, tc->iv, tc->iv_len, + tc->aad, tc->aad_len); + gcm_update_enc(&key, &ctx, tc->ct, + tc->pt, tc->pt_len); + gcm_finalize_enc(&key, &ctx, tc->tag, + tc->tag_len); + } else { + job->src = tc->pt; + job->dst = tc->ct; + job->msg_len_to_cipher_in_bytes = tc->pt_len; + job->msg_len_to_hash_in_bytes = tc->pt_len; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->auth_tag_output = tc->tag; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + } else /* DECRYPT */ { + uint8_t res_tag[MAX_TAG_LENGTH] = {0}; + + if (direct_api == 1) { + gcm_init_var_iv(&key, &ctx, tc->iv, tc->iv_len, + tc->aad, tc->aad_len); + gcm_update_dec(&key, &ctx, tc->pt, + tc->ct, tc->ct_len); + gcm_finalize_dec(&key, &ctx, + res_tag, tc->tag_len); + } else { + job->src = tc->ct; + job->dst = tc->pt; + job->msg_len_to_cipher_in_bytes = tc->ct_len; + job->msg_len_to_hash_in_bytes = tc->ct_len; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->auth_tag_output = res_tag; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + if (memcmp(res_tag, tc->tag, tc->tag_len) != 0) { + if (verbose) { + hexdump(stdout, "result tag: ", + res_tag, tc->tag_len); + hexdump(stdout, "reference tag: ", + tc->tag, tc->tag_len); + fprintf(stderr, "Invalid tag\n"); + } + return EXIT_FAILURE; + } + } + return EXIT_SUCCESS; +} + +static int aes_gmac_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_SYM_CIPHER_TC *tc; + IMB_JOB *job = NULL; + aes_gmac_init_t gmac_init_var = mb_mgr->gmac128_init; + aes_gmac_update_t gmac_update = mb_mgr->gmac128_update; + aes_gmac_finalize_t gmac_finalize = mb_mgr->gmac128_finalize; + struct gcm_key_data key; + struct gcm_context_data ctx; + IMB_HASH_ALG hash_mode; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.symmetric; + + if (tc->direction != ACVP_SYM_CIPH_DIR_ENCRYPT && + tc->direction != ACVP_SYM_CIPH_DIR_DECRYPT) { + return EXIT_FAILURE; + } + + switch (tc->key_len) { + case 128: + IMB_AES128_GCM_PRE(mb_mgr, tc->key, &key); + hash_mode = IMB_AUTH_AES_GMAC_128; + break; + case 192: + IMB_AES192_GCM_PRE(mb_mgr, tc->key, &key); + hash_mode = IMB_AUTH_AES_GMAC_192; + break; + case 256: + IMB_AES256_GCM_PRE(mb_mgr, tc->key, &key); + hash_mode = IMB_AUTH_AES_GMAC_256; + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + + if (direct_api == 1) { + switch (tc->key_len) { + case 128: + /* Function pointers already set for 128-bit key */ + break; + case 192: + gmac_init_var = mb_mgr->gmac192_init; + gmac_update = mb_mgr->gmac192_update; + gmac_finalize = mb_mgr->gmac192_finalize; + break; + case 256: + gmac_init_var = mb_mgr->gmac256_init; + gmac_update = mb_mgr->gmac256_update; + gmac_finalize = mb_mgr->gmac256_finalize; + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + } else { + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len >> 3; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = hash_mode; + job->u.GMAC._iv = tc->iv; + job->u.GMAC.iv_len_in_bytes = tc->iv_len; + job->u.GMAC._key = &key; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->auth_tag_output_len_in_bytes = tc->tag_len; + } + + if (tc->direction == ACVP_SYM_CIPH_DIR_ENCRYPT) { + if (direct_api == 1) { + gmac_init_var(&key, &ctx, tc->iv, tc->iv_len); + gmac_update(&key, &ctx, tc->aad, tc->aad_len); + gmac_finalize(&key, &ctx, tc->tag, + tc->tag_len); + } else { + job->src = tc->aad; + job->msg_len_to_hash_in_bytes = tc->aad_len; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->auth_tag_output = tc->tag; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + } else /* DECRYPT */ { + uint8_t res_tag[MAX_TAG_LENGTH] = {0}; + + if (direct_api == 1) { + gmac_init_var(&key, &ctx, tc->iv, tc->iv_len); + gmac_update(&key, &ctx, tc->aad, tc->aad_len); + gmac_finalize(&key, &ctx, res_tag, tc->tag_len); + } else { + job->src = tc->aad; + job->msg_len_to_hash_in_bytes = tc->aad_len; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->auth_tag_output = res_tag; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + if (memcmp(res_tag, tc->tag, tc->tag_len) != 0) { + if (verbose) { + hexdump(stdout, "result tag: ", + res_tag, tc->tag_len); + hexdump(stdout, "reference tag: ", + tc->tag, tc->tag_len); + fprintf(stderr, "Invalid tag\n"); + } + return EXIT_FAILURE; + } + } + return EXIT_SUCCESS; +} + +static int aes_ctr_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_SYM_CIPHER_TC *tc; + IMB_JOB *job = NULL; + DECLARE_ALIGNED(uint32_t enc_keys[15*4], 16); + DECLARE_ALIGNED(uint32_t dec_keys[15*4], 16); + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.symmetric; + + if (tc->direction != ACVP_SYM_CIPH_DIR_ENCRYPT && + tc->direction != ACVP_SYM_CIPH_DIR_DECRYPT) { + fprintf(stderr, "Unsupported direction\n"); + return EXIT_FAILURE; + } + + switch (tc->key_len) { + case 128: + IMB_AES_KEYEXP_128(mb_mgr, tc->key, enc_keys, dec_keys); + break; + case 192: + IMB_AES_KEYEXP_192(mb_mgr, tc->key, enc_keys, dec_keys); + break; + case 256: + IMB_AES_KEYEXP_256(mb_mgr, tc->key, enc_keys, dec_keys); + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len >> 3; + job->cipher_mode = IMB_CIPHER_CNTR; + job->hash_alg = IMB_AUTH_NULL; + + job->iv = tc->iv; + job->iv_len_in_bytes = tc->iv_len; + job->cipher_start_src_offset_in_bytes = 0; + job->enc_keys = enc_keys; + job->dec_keys = dec_keys; + + if (tc->direction == ACVP_SYM_CIPH_DIR_ENCRYPT) { + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->src = tc->pt; + job->dst = tc->ct; + job->msg_len_to_cipher_in_bytes = tc->pt_len; + tc->ct_len = tc->pt_len; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } else /* DECRYPT */ { + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = tc->ct; + job->dst = tc->pt; + job->msg_len_to_cipher_in_bytes = tc->ct_len; + tc->pt_len = tc->ct_len; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + return EXIT_SUCCESS; +} + +static int aes_ccm_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_SYM_CIPHER_TC *tc; + IMB_JOB *job = NULL; + DECLARE_ALIGNED(uint32_t enc_keys[15*4], 16); + DECLARE_ALIGNED(uint32_t dec_keys[15*4], 16); + uint8_t res_tag[MAX_TAG_LENGTH] = {0}; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.symmetric; + + if (tc->direction != ACVP_SYM_CIPH_DIR_ENCRYPT && + tc->direction != ACVP_SYM_CIPH_DIR_DECRYPT) { + fprintf(stderr, "Unsupported direction\n"); + return EXIT_FAILURE; + } + + switch (tc->key_len) { + case 128: + IMB_AES_KEYEXP_128(mb_mgr, tc->key, enc_keys, dec_keys); + break; + case 192: + IMB_AES_KEYEXP_192(mb_mgr, tc->key, enc_keys, dec_keys); + break; + case 256: + IMB_AES_KEYEXP_256(mb_mgr, tc->key, enc_keys, dec_keys); + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len >> 3; + job->cipher_mode = IMB_CIPHER_CCM; + job->hash_alg = IMB_AUTH_AES_CCM; + + job->iv = tc->iv; + job->iv_len_in_bytes = tc->iv_len; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->enc_keys = enc_keys; + job->dec_keys = dec_keys; + job->auth_tag_output_len_in_bytes = tc->tag_len; + job->u.CCM.aad = tc->aad; + job->u.CCM.aad_len_in_bytes = tc->aad_len; + + if (tc->direction == ACVP_SYM_CIPH_DIR_ENCRYPT) { + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = tc->pt; + job->dst = tc->ct; + job->msg_len_to_cipher_in_bytes = tc->pt_len; + job->msg_len_to_hash_in_bytes = tc->pt_len; + /* Auth tag must be placed at the end of the ciphertext. */ + job->auth_tag_output = tc->ct + tc->pt_len; + tc->ct_len = tc->pt_len + tc->tag_len; + } else /* DECRYPT */ { + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->src = tc->ct; + job->dst = tc->pt; + job->msg_len_to_hash_in_bytes = tc->ct_len; + job->msg_len_to_cipher_in_bytes = tc->ct_len; + job->auth_tag_output = res_tag; + tc->pt_len = tc->ct_len; + } + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + + if (tc->direction == ACVP_SYM_CIPH_DIR_DECRYPT) { + /* Tag is placed at the end of the ciphertext. */ + const uint8_t *ref_tag = tc->ct + tc->ct_len; + + if (memcmp(res_tag, ref_tag, tc->tag_len) != 0) { + if (verbose) { + hexdump(stdout, "result tag: ", + res_tag, tc->tag_len); + hexdump(stdout, "reference tag: ", + ref_tag, tc->tag_len); + fprintf(stderr, "Invalid tag\n"); + } + return EXIT_FAILURE; + } + } + return EXIT_SUCCESS; +} + +static int aes_cmac_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_CMAC_TC *tc; + IMB_JOB *job = NULL; + DECLARE_ALIGNED(uint32_t expkey[4*15], 16); + DECLARE_ALIGNED(uint32_t dust[4*15], 16); + uint32_t skey1[4], skey2[4]; + uint8_t res_tag[MAX_TAG_LENGTH] = {0}; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.cmac; + + switch (tc->key_len) { + case 16: + IMB_AES_KEYEXP_128(mb_mgr, tc->key, expkey, dust); + IMB_AES_CMAC_SUBKEY_GEN_128(mb_mgr, expkey, skey1, skey2); + break; + case 32: + IMB_AES_KEYEXP_256(mb_mgr, tc->key, expkey, dust); + IMB_AES_CMAC_SUBKEY_GEN_256(mb_mgr, expkey, skey1, skey2); + break; + default: + fprintf(stderr, "Unsupported AES key length\n"); + return EXIT_FAILURE; + } + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len; + job->cipher_mode = IMB_CIPHER_NULL; + + if (tc->key_len == 32) + job->hash_alg = IMB_AUTH_AES_CMAC_256; + else + job->hash_alg = IMB_AUTH_AES_CMAC; + + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->u.CMAC._key_expanded = expkey; + job->u.CMAC._skey1 = skey1; + job->u.CMAC._skey2 = skey2; + job->src = tc->msg; + job->msg_len_to_hash_in_bytes = tc->msg_len; + job->auth_tag_output_len_in_bytes = tc->mac_len; + + if (tc->verify == 1) + job->auth_tag_output = res_tag; + else /* verify == 0 */ + job->auth_tag_output = tc->mac; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + + if (tc->verify == 1) { + if (memcmp(res_tag, tc->mac, tc->mac_len) != 0) { + if (verbose) { + hexdump(stdout, "result tag: ", + res_tag, (tc->mac_len)); + hexdump(stdout, "reference tag: ", + tc->mac, tc->mac_len); + fprintf(stderr, "Invalid tag\n"); + } + tc->ver_disposition = ACVP_TEST_DISPOSITION_FAIL; + } else + tc->ver_disposition = ACVP_TEST_DISPOSITION_PASS; + } + return EXIT_SUCCESS; +} + +static int hmac_sha1_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HMAC_TC *tc; + IMB_JOB *job = NULL; + uint32_t i = 0; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA1_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA1_DIGEST_SIZE_IN_BYTES], 16); + uint8_t key[IMB_SHA1_BLOCK_SIZE]; + uint8_t buf[IMB_SHA1_BLOCK_SIZE]; + uint32_t key_len = 0; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hmac; + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (tc->key_len <= IMB_SHA1_BLOCK_SIZE) { + memcpy(key, tc->key, tc->key_len); + key_len = (uint32_t) tc->key_len; + } else { + IMB_SHA1(mb_mgr, tc->key, tc->key_len, key); + key_len = IMB_SHA1_DIGEST_SIZE_IN_BYTES; + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA1_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA1_ONE_BLOCK(mb_mgr, buf, opad_hash); + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_1; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->src = tc->msg; + job->msg_len_to_hash_in_bytes = tc->msg_len; + /* + * The library only supports 12 or 20-byte tags and therefore, + * we are outputting 20 bytes always + */ + job->auth_tag_output_len_in_bytes = IMB_SHA1_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->mac; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + return EXIT_SUCCESS; +} + +static int hmac_sha256_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HMAC_TC *tc; + IMB_JOB *job = NULL; + uint32_t i = 0; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA256_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA256_DIGEST_SIZE_IN_BYTES], 16); + uint8_t key[IMB_SHA_256_BLOCK_SIZE]; + uint8_t buf[IMB_SHA_256_BLOCK_SIZE]; + uint32_t key_len = 0; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hmac; + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (tc->key_len <= IMB_SHA_256_BLOCK_SIZE) { + memcpy(key, tc->key, tc->key_len); + key_len = (uint32_t) tc->key_len; + } else { + IMB_SHA256(mb_mgr, tc->key, tc->key_len, key); + key_len = IMB_SHA256_DIGEST_SIZE_IN_BYTES; + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA256_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA256_ONE_BLOCK(mb_mgr, buf, opad_hash); + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_256; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->src = tc->msg; + job->msg_len_to_hash_in_bytes = tc->msg_len; + /* + * The library only supports 16 or 32-byte tags and therefore, + * we are outputting 32 bytes always + */ + job->auth_tag_output_len_in_bytes = IMB_SHA256_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->mac; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + return EXIT_SUCCESS; +} + +static int hmac_sha224_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HMAC_TC *tc; IMB_JOB *job = NULL; - aes_gcm_init_var_iv_t init_var_iv = mb_mgr->gcm128_init_var_iv; - aes_gcm_enc_dec_update_t update_enc = mb_mgr->gcm128_enc_update; - aes_gcm_enc_dec_finalize_t finalize_enc = mb_mgr->gcm128_enc_finalize; - aes_gcm_enc_dec_update_t update_dec = mb_mgr->gcm128_dec_update; - aes_gcm_enc_dec_finalize_t finalize_dec = mb_mgr->gcm128_dec_finalize; - int ret = 0; + uint32_t i = 0; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA224_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA224_DIGEST_SIZE_IN_BYTES], 16); + uint8_t key[IMB_SHA_256_BLOCK_SIZE]; + uint8_t buf[IMB_SHA_256_BLOCK_SIZE]; + uint32_t key_len = 0; - struct gcm_key_data key; - struct gcm_context_data ctx; + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hmac; + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (tc->key_len <= IMB_SHA_256_BLOCK_SIZE) { + memcpy(key, tc->key, tc->key_len); + key_len = (uint32_t) tc->key_len; + } else { + IMB_SHA224(mb_mgr, tc->key, tc->key_len, key); + key_len = IMB_SHA224_DIGEST_SIZE_IN_BYTES; + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA224_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA224_ONE_BLOCK(mb_mgr, buf, opad_hash); + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_224; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->src = tc->msg; + job->msg_len_to_hash_in_bytes = tc->msg_len; + /* + * The library only supports 14 or 28-byte tags and therefore, + * we are outputting 28 bytes always + */ + job->auth_tag_output_len_in_bytes = IMB_SHA224_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->mac; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + return EXIT_SUCCESS; +} + +static int hmac_sha384_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HMAC_TC *tc; + IMB_JOB *job = NULL; + uint32_t i = 0; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + uint8_t key[IMB_SHA_384_BLOCK_SIZE]; + uint8_t buf[IMB_SHA_384_BLOCK_SIZE]; + uint32_t key_len = 0; if (test_case == NULL) return EXIT_FAILURE; - tc = test_case->tc.symmetric; + tc = test_case->tc.hmac; - if (tc->direction != ACVP_SYM_CIPH_DIR_ENCRYPT && - tc->direction != ACVP_SYM_CIPH_DIR_DECRYPT) { - fprintf(stderr, "Unsupported direction\n"); + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (tc->key_len <= IMB_SHA_384_BLOCK_SIZE) { + memcpy(key, tc->key, tc->key_len); + key_len = (uint32_t) tc->key_len; + } else { + IMB_SHA384(mb_mgr, tc->key, tc->key_len, key); + key_len = IMB_SHA384_DIGEST_SIZE_IN_BYTES; + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA384_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA384_ONE_BLOCK(mb_mgr, buf, opad_hash); + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_384; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->src = tc->msg; + job->msg_len_to_hash_in_bytes = tc->msg_len; + /* + * The library only supports 24 or 48-byte tags and therefore, + * we are outputting 48 bytes always + */ + job->auth_tag_output_len_in_bytes = IMB_SHA384_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->mac; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + return EXIT_SUCCESS; +} + +static int hmac_sha512_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HMAC_TC *tc; + IMB_JOB *job = NULL; + uint32_t i = 0; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + uint8_t key[IMB_SHA_512_BLOCK_SIZE]; + uint8_t buf[IMB_SHA_512_BLOCK_SIZE]; + uint32_t key_len = 0; + + if (test_case == NULL) return EXIT_FAILURE; + + tc = test_case->tc.hmac; + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (tc->key_len <= IMB_SHA_512_BLOCK_SIZE) { + memcpy(key, tc->key, tc->key_len); + key_len = (uint32_t) tc->key_len; + } else { + IMB_SHA512(mb_mgr, tc->key, tc->key_len, key); + key_len = IMB_SHA512_DIGEST_SIZE_IN_BYTES; } - switch (tc->key_len) { - case 128: - IMB_AES128_GCM_PRE(mb_mgr, tc->key, &key); - break; - case 192: - IMB_AES192_GCM_PRE(mb_mgr, tc->key, &key); - break; - case 256: - IMB_AES256_GCM_PRE(mb_mgr, tc->key, &key); - break; - default: - fprintf(stderr, "Unsupported AES key length\n"); - ret = 1; - goto end; + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA512_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA512_ONE_BLOCK(mb_mgr, buf, opad_hash); + + job = IMB_GET_NEXT_JOB(mb_mgr); + job->key_len_in_bytes = tc->key_len; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_512; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->src = tc->msg; + job->msg_len_to_hash_in_bytes = tc->msg_len; + /* + * The library only supports 32 or 64-byte tags and therefore, + * we are outputting 64 bytes always + */ + job->auth_tag_output_len_in_bytes = IMB_SHA512_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->mac; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + return EXIT_SUCCESS; +} + +static int sha1_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HASH_TC *tc; + IMB_JOB *job = NULL; + unsigned len; + uint8_t *m; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hash; + + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) { + m = malloc(tc->msg_len * 3); + len = tc->msg_len * 3; + + if (m == NULL) { + printf("Can't allocate buffer memory\n"); + return EXIT_FAILURE; + } + memcpy(m, tc->m1, tc->msg_len); + memcpy(m + tc->msg_len, tc->m2, tc->msg_len); + memcpy(m + tc->msg_len * 2, tc->m3, tc->msg_len); + } else { + m = tc->msg; + len = tc->msg_len; } if (direct_api == 1) { - switch (tc->key_len) { - case 128: - /* Function pointers already set for 128-bit key */ - break; - case 192: - init_var_iv = mb_mgr->gcm192_init_var_iv; - update_enc = mb_mgr->gcm192_enc_update; - finalize_enc = mb_mgr->gcm192_enc_finalize; - update_dec = mb_mgr->gcm192_dec_update; - finalize_dec = mb_mgr->gcm192_dec_finalize; - break; - case 256: - init_var_iv = mb_mgr->gcm256_init_var_iv; - update_enc = mb_mgr->gcm256_enc_update; - finalize_enc = mb_mgr->gcm256_enc_finalize; - update_dec = mb_mgr->gcm256_dec_update; - finalize_dec = mb_mgr->gcm256_dec_finalize; - break; - default: - fprintf(stderr, "Unsupported AES key length\n"); - ret = 1; - goto end; + IMB_SHA1(mb_mgr, m, len, tc->md); + } else { + job = IMB_GET_NEXT_JOB(mb_mgr); + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_SHA_1; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->src = m; + job->msg_len_to_hash_in_bytes = len; + job->auth_tag_output_len_in_bytes = + IMB_SHA1_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->md; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) + free(m); + tc->md_len = IMB_SHA1_DIGEST_SIZE_IN_BYTES; + return EXIT_SUCCESS; +} + +static int sha2_224_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HASH_TC *tc; + IMB_JOB *job = NULL; + unsigned len; + uint8_t *m; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hash; + + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) { + m = malloc(tc->msg_len * 3); + len = tc->msg_len * 3; + + if (m == NULL) { + printf("Can't allocate buffer memory\n"); + return EXIT_FAILURE; } + memcpy(m, tc->m1, tc->msg_len); + memcpy(m + tc->msg_len, tc->m2, tc->msg_len); + memcpy(m + tc->msg_len * 2, tc->m3, tc->msg_len); + } else { + m = tc->msg; + len = tc->msg_len; + } + + if (direct_api == 1) { + IMB_SHA224(mb_mgr, m, len, tc->md); } else { job = IMB_GET_NEXT_JOB(mb_mgr); - job->key_len_in_bytes = tc->key_len >> 3; - job->cipher_mode = IMB_CIPHER_GCM; - job->hash_alg = IMB_AUTH_AES_GMAC; - job->iv = tc->iv; - job->iv_len_in_bytes = tc->iv_len; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_SHA_224; job->cipher_start_src_offset_in_bytes = 0; job->hash_start_src_offset_in_bytes = 0; - job->enc_keys = &key; - job->dec_keys = &key; - job->u.GCM.aad = tc->aad; - job->u.GCM.aad_len_in_bytes = tc->aad_len; - job->auth_tag_output_len_in_bytes = tc->tag_len; + job->src = m; + job->msg_len_to_hash_in_bytes = len; + job->auth_tag_output_len_in_bytes = + IMB_SHA224_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->md; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } } + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) + free(m); + tc->md_len = IMB_SHA224_DIGEST_SIZE_IN_BYTES; + return EXIT_SUCCESS; +} - if (tc->direction == ACVP_SYM_CIPH_DIR_ENCRYPT) { - if (direct_api == 1) { - init_var_iv(&key, &ctx, tc->iv, tc->iv_len, - tc->aad, tc->aad_len); - update_enc(&key, &ctx, tc->ct, tc->pt, tc->pt_len); - finalize_enc(&key, &ctx, tc->tag, tc->tag_len); - } else { - job->cipher_direction = IMB_DIR_ENCRYPT; - job->chain_order = IMB_ORDER_CIPHER_HASH; - job->src = tc->pt; - job->dst = tc->ct; - job->msg_len_to_cipher_in_bytes = tc->pt_len; - job->msg_len_to_hash_in_bytes = tc->pt_len; - job->auth_tag_output = tc->tag; +static int sha2_256_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HASH_TC *tc; + IMB_JOB *job = NULL; + unsigned len; + uint8_t *m; - job = IMB_SUBMIT_JOB(mb_mgr); - if (job == NULL) - job = IMB_FLUSH_JOB(mb_mgr); - if (job->status != IMB_STATUS_COMPLETED) { - ret = 1; - fprintf(stderr, "Invalid job\n"); - } + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hash; + + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) { + m = malloc(tc->msg_len * 3); + len = tc->msg_len * 3; + + if (m == NULL) { + printf("Can't allocate buffer memory\n"); + return EXIT_FAILURE; } - } else /* DECRYPT */ { - uint8_t res_tag[MAX_TAG_LENGTH]; + memcpy(m, tc->m1, tc->msg_len); + memcpy(m + tc->msg_len, tc->m2, tc->msg_len); + memcpy(m + tc->msg_len * 2, tc->m3, tc->msg_len); + } else { + m = tc->msg; + len = tc->msg_len; + } - if (direct_api == 1) { - init_var_iv(&key, &ctx, tc->iv, tc->iv_len, - tc->aad, tc->aad_len); - update_dec(&key, &ctx, tc->pt, tc->ct, tc->ct_len); - finalize_dec(&key, &ctx, res_tag, tc->tag_len); - } else { - job->cipher_direction = IMB_DIR_DECRYPT; - job->chain_order = IMB_ORDER_HASH_CIPHER; - job->src = tc->ct; - job->dst = tc->pt; - job->msg_len_to_cipher_in_bytes = tc->ct_len; - job->msg_len_to_hash_in_bytes = tc->ct_len; - job->auth_tag_output = res_tag; + if (direct_api == 1) { + IMB_SHA256(mb_mgr, m, len, tc->md); + } else { + job = IMB_GET_NEXT_JOB(mb_mgr); + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_SHA_256; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->src = m; + job->msg_len_to_hash_in_bytes = len; + job->auth_tag_output_len_in_bytes = + IMB_SHA256_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->md; - job = IMB_SUBMIT_JOB(mb_mgr); - if (job == NULL) - job = IMB_FLUSH_JOB(mb_mgr); - if (job->status != IMB_STATUS_COMPLETED) { - ret = 1; - fprintf(stderr, "Invalid job\n"); - goto end; - } + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; } - if (memcmp(res_tag, tc->tag, tc->tag_len) != 0) { - ret = 1; - if (verbose) { - hexdump(stdout, "result tag: ", - res_tag, tc->tag_len); - hexdump(stdout, "reference tag: ", - tc->tag, tc->tag_len); - fprintf(stderr, "Invalid tag\n"); - } + } + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) + free(m); + tc->md_len = IMB_SHA256_DIGEST_SIZE_IN_BYTES; + return EXIT_SUCCESS; +} + +static int sha2_384_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HASH_TC *tc; + IMB_JOB *job = NULL; + unsigned len; + uint8_t *m; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hash; + + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) { + m = malloc(tc->msg_len * 3); + len = tc->msg_len * 3; + + if (m == NULL) { + printf("Can't allocate buffer memory\n"); + return EXIT_FAILURE; } + memcpy(m, tc->m1, tc->msg_len); + memcpy(m + tc->msg_len, tc->m2, tc->msg_len); + memcpy(m + tc->msg_len * 2, tc->m3, tc->msg_len); + } else { + m = tc->msg; + len = tc->msg_len; } -end: - return ret; + + if (direct_api == 1) { + IMB_SHA384(mb_mgr, m, len, tc->md); + } else { + job = IMB_GET_NEXT_JOB(mb_mgr); + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_SHA_384; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->src = m; + job->msg_len_to_hash_in_bytes = len; + job->auth_tag_output_len_in_bytes = + IMB_SHA384_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->md; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) + free(m); + tc->md_len = IMB_SHA384_DIGEST_SIZE_IN_BYTES; + return EXIT_SUCCESS; +} + +static int sha2_512_handler(ACVP_TEST_CASE *test_case) +{ + ACVP_HASH_TC *tc; + IMB_JOB *job = NULL; + unsigned len; + uint8_t *m; + + if (test_case == NULL) + return EXIT_FAILURE; + + tc = test_case->tc.hash; + + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) { + m = malloc(tc->msg_len * 3); + len = tc->msg_len * 3; + + if (m == NULL) { + printf("Can't allocate buffer memory\n"); + return EXIT_FAILURE; + } + memcpy(m, tc->m1, tc->msg_len); + memcpy(m + tc->msg_len, tc->m2, tc->msg_len); + memcpy(m + tc->msg_len * 2, tc->m3, tc->msg_len); + } else { + m = tc->msg; + len = tc->msg_len; + } + + if (direct_api == 1) { + IMB_SHA512(mb_mgr, m, len, tc->md); + } else { + job = IMB_GET_NEXT_JOB(mb_mgr); + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_SHA_512; + job->cipher_start_src_offset_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->src = m; + job->msg_len_to_hash_in_bytes = len; + job->auth_tag_output_len_in_bytes = + IMB_SHA512_DIGEST_SIZE_IN_BYTES; + job->auth_tag_output = tc->md; + + job = IMB_SUBMIT_JOB(mb_mgr); + if (job == NULL) + job = IMB_FLUSH_JOB(mb_mgr); + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "Invalid job\n"); + return EXIT_FAILURE; + } + } + if (tc->test_type == ACVP_HASH_TEST_TYPE_MCT) + free(m); + tc->md_len = IMB_SHA512_DIGEST_SIZE_IN_BYTES; + return EXIT_SUCCESS; } static void usage(const char *app_name) @@ -205,8 +1295,7 @@ static void usage(const char *app_name) app_name, app_name); } -int -main(int argc, char **argv) +int main(int argc, char **argv) { ACVP_RESULT acvp_ret = ACVP_SUCCESS; ACVP_CTX *ctx = NULL; @@ -287,7 +1376,7 @@ main(int argc, char **argv) goto exit; } - /* Create test session and enable GCM tests */ + /* Create test session and enable supported algorithms */ acvp_ret = acvp_create_test_session(&ctx, logger, ACVP_LOG_LVL_INFO); if (acvp_ret != ACVP_SUCCESS) goto exit; @@ -296,6 +1385,65 @@ main(int argc, char **argv) &aes_gcm_handler) != ACVP_SUCCESS) goto exit; + if (acvp_cap_sym_cipher_enable(ctx, ACVP_AES_CBC, + &aes_cbc_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_sym_cipher_enable(ctx, ACVP_AES_CTR, + &aes_ctr_handler) != ACVP_SUCCESS) + goto exit; + if (acvp_cap_sym_cipher_enable(ctx, ACVP_AES_GMAC, + &aes_gmac_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_sym_cipher_enable(ctx, ACVP_AES_CCM, + &aes_ccm_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_cmac_enable(ctx, ACVP_CMAC_AES, + &aes_cmac_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hmac_enable(ctx, ACVP_HMAC_SHA1, + &hmac_sha1_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hmac_enable(ctx, ACVP_HMAC_SHA2_256, + &hmac_sha256_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hmac_enable(ctx, ACVP_HMAC_SHA2_224, + &hmac_sha224_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hmac_enable(ctx, ACVP_HMAC_SHA2_384, + &hmac_sha384_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hmac_enable(ctx, ACVP_HMAC_SHA2_512, + &hmac_sha512_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hash_enable(ctx, ACVP_HASH_SHA1, + &sha1_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hash_enable(ctx, ACVP_HASH_SHA224, + &sha2_224_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hash_enable(ctx, ACVP_HASH_SHA256, + &sha2_256_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hash_enable(ctx, ACVP_HASH_SHA384, + &sha2_384_handler) != ACVP_SUCCESS) + goto exit; + + if (acvp_cap_hash_enable(ctx, ACVP_HASH_SHA512, + &sha2_512_handler) != ACVP_SUCCESS) + goto exit; + /* Allocate and initialize MB_MGR */ if (test_arch == IMB_ARCH_NOAESNI) mb_mgr = alloc_mb_mgr(IMB_FLAG_AESNI_OFF); @@ -333,16 +1481,35 @@ main(int argc, char **argv) goto exit; } + if ((mb_mgr != NULL) && (mb_mgr->features & IMB_FEATURE_SELF_TEST)) { + if (mb_mgr->features & IMB_FEATURE_SELF_TEST_PASS) + printf("SELF-TEST: PASS\n"); + else + printf("SELF-TEST: FAIL\n"); + } else { + printf("SELF-TEST: N/A (requires >= v1.3)\n"); + } + + if (imb_get_errno(mb_mgr) != 0) { + fprintf(stderr, "Error initializing MB_MGR structure! %s\n", + imb_get_strerror(imb_get_errno(mb_mgr))); + goto exit; + } + /* Parse request file, run crypto tests and write out response file */ acvp_run_vectors_from_file(ctx, req_filename, resp_filename); + ret = EXIT_SUCCESS; + +exit: /* Free MB_MGR and test session */ - free_mb_mgr(mb_mgr); + if (mb_mgr != NULL) + free_mb_mgr(mb_mgr); + if (ctx != NULL) acvp_free_test_session(ctx); - ret = EXIT_SUCCESS; -exit: - free(req_filename); + if (req_filename != NULL) + free(req_filename); return ret; } diff --git a/test/aes_test.c b/test/aes_test.c index abce11f834cc6728fabf3d537a47968c9ca8f36b..ca2ed4b2a575073ee91ba1754b5ec71ca50b193c 100644 --- a/test/aes_test.c +++ b/test/aes_test.c @@ -36,6 +36,8 @@ #include "gcm_ctr_vectors_test.h" #include "utils.h" +#define MAX_BURST_JOBS 64 + int aes_test(struct IMB_MGR *mb_mgr); struct aes_vector { @@ -1972,6 +1974,230 @@ end_alloc: return ret; } +static int +test_aes_many_burst(struct IMB_MGR *mb_mgr, + void *enc_keys, + void *dec_keys, + const void *iv, + const uint8_t *in_text, + const uint8_t *out_text, + const unsigned text_len, + const int dir, + const int order, + const IMB_CIPHER_MODE cipher, + const int in_place, + const int key_len, + const int num_jobs) +{ + struct IMB_JOB *job, *jobs[MAX_BURST_JOBS] = {NULL}; + uint8_t padding[16]; + uint8_t **targets = malloc(num_jobs * sizeof(void *)); + int i, completed_jobs, jobs_rx = 0, ret = -1; + + if (targets == NULL) + goto end_alloc; + + memset(targets, 0, num_jobs * sizeof(void *)); + memset(padding, -1, sizeof(padding)); + + for (i = 0; i < num_jobs; i++) { + targets[i] = malloc(text_len + (sizeof(padding) * 2)); + if (targets[i] == NULL) + goto end_alloc; + memset(targets[i], -1, text_len + (sizeof(padding) * 2)); + if (in_place) { + /* copy input text to the allocated buffer */ + memcpy(targets[i] + sizeof(padding), in_text, text_len); + } + } + + while (IMB_GET_NEXT_BURST(mb_mgr, num_jobs, jobs) < (uint32_t)num_jobs) + IMB_FLUSH_BURST(mb_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + + job->cipher_direction = dir; + job->chain_order = order; + job->key_len_in_bytes = key_len; + job->cipher_mode = cipher; + job->hash_alg = IMB_AUTH_NULL; + + if (!in_place) { + job->dst = targets[i] + sizeof(padding); + job->src = in_text; + } else { + job->dst = targets[i] + sizeof(padding); + job->src = targets[i] + sizeof(padding); + } + + job->enc_keys = enc_keys; + job->dec_keys = dec_keys; + job->iv = iv; + job->iv_len_in_bytes = 16; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = text_len; + job->user_data = targets[i]; + job->user_data2 = (void *)((uint64_t)i); + } + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, num_jobs, jobs); + if (completed_jobs == 0) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } + } + +check_burst_jobs: + for (i = 0; i < completed_jobs; i++) { + job = jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %d status not complete!\n", i+1); + goto end; + } + + if (!aes_job_ok(job, out_text, job->user_data, padding, + sizeof(padding), text_len)) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + completed_jobs = IMB_FLUSH_BURST(mb_mgr, + num_jobs - completed_jobs, + jobs); + if (completed_jobs == 0) { + printf("Expected %d jobs, received %d\n", + num_jobs, jobs_rx); + goto end; + } + goto check_burst_jobs; + } + ret = 0; + + end: + + end_alloc: + if (targets != NULL) { + for (i = 0; i < num_jobs; i++) + free(targets[i]); + free(targets); + } + + return ret; +} + +static int +test_aes_many_cipher_burst(struct IMB_MGR *mb_mgr, + void *enc_keys, + void *dec_keys, + const void *iv, + const uint8_t *in_text, + const uint8_t *out_text, + const unsigned text_len, + const int dir, + const IMB_CIPHER_MODE cipher, + const int in_place, + const int key_len, + const int num_jobs) +{ + struct IMB_JOB *job, jobs[MAX_BURST_JOBS]; + uint8_t padding[16]; + uint8_t **targets = malloc(num_jobs * sizeof(void *)); + int i, completed_jobs, jobs_rx = 0, ret = -1; + + if (targets == NULL) + goto end_alloc; + + memset(targets, 0, num_jobs * sizeof(void *)); + memset(padding, -1, sizeof(padding)); + + for (i = 0; i < num_jobs; i++) { + targets[i] = malloc(text_len + (sizeof(padding) * 2)); + if (targets[i] == NULL) + goto end_alloc; + memset(targets[i], -1, text_len + (sizeof(padding) * 2)); + if (in_place) { + /* copy input text to the allocated buffer */ + memcpy(targets[i] + sizeof(padding), in_text, text_len); + } + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + + /* only set fields for generic burst API */ + if (!in_place) { + job->dst = targets[i] + sizeof(padding); + job->src = in_text; + } else { + job->dst = targets[i] + sizeof(padding); + job->src = targets[i] + sizeof(padding); + } + + job->enc_keys = enc_keys; + job->dec_keys = dec_keys; + job->iv = iv; + job->iv_len_in_bytes = 16; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = text_len; + job->user_data = targets[i]; + job->user_data2 = (void *)((uint64_t)i); + } + + completed_jobs = IMB_SUBMIT_CIPHER_BURST(mb_mgr, jobs, num_jobs, + cipher, dir, key_len); + if (completed_jobs != num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + goto end; + } + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %d status not complete!\n", i+1); + goto end; + } + + if (!aes_job_ok(job, out_text, job->user_data, padding, + sizeof(padding), text_len)) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + printf("Expected %d jobs, received %d\n", num_jobs, jobs_rx); + goto end; + } + ret = 0; + + end: + + end_alloc: + if (targets != NULL) { + for (i = 0; i < num_jobs; i++) + free(targets[i]); + free(targets); + } + + return ret; +} + static void test_aes_vectors(struct IMB_MGR *mb_mgr, struct test_suite_context *ctx128, @@ -2027,6 +2253,19 @@ test_aes_vectors(struct IMB_MGR *mb_mgr, test_suite_update(ctx, 1, 0); } + if (test_aes_many_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].P, vec_tab[vect].C, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_ENCRYPT, IMB_ORDER_CIPHER_HASH, + cipher, 0, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d encrypt burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + if (test_aes_many(mb_mgr, enc_keys, dec_keys, vec_tab[vect].IV, vec_tab[vect].C, vec_tab[vect].P, @@ -2040,6 +2279,19 @@ test_aes_vectors(struct IMB_MGR *mb_mgr, test_suite_update(ctx, 1, 0); } + if (test_aes_many_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].C, vec_tab[vect].P, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_DECRYPT, IMB_ORDER_HASH_CIPHER, + cipher, 0, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d decrypt burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + if (test_aes_many(mb_mgr, enc_keys, dec_keys, vec_tab[vect].IV, vec_tab[vect].P, vec_tab[vect].C, @@ -2053,6 +2305,19 @@ test_aes_vectors(struct IMB_MGR *mb_mgr, test_suite_update(ctx, 1, 0); } + if (test_aes_many_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].P, vec_tab[vect].C, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_ENCRYPT, IMB_ORDER_CIPHER_HASH, + cipher, 1, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d encrypt burst in-place\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + if (test_aes_many(mb_mgr, enc_keys, dec_keys, vec_tab[vect].IV, vec_tab[vect].C, vec_tab[vect].P, @@ -2065,6 +2330,76 @@ test_aes_vectors(struct IMB_MGR *mb_mgr, } else { test_suite_update(ctx, 1, 0); } + + if (test_aes_many_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].C, vec_tab[vect].P, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_DECRYPT, IMB_ORDER_HASH_CIPHER, + cipher, 1, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d decrypt burst in-place\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + /** + * Test cipher only burst API + * Currently only AES-CBC supported + */ + if (cipher != IMB_CIPHER_CBC) + continue; + + if (test_aes_many_cipher_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].P, vec_tab[vect].C, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_ENCRYPT, cipher, 0, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d encrypt cipher burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_aes_many_cipher_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].C, vec_tab[vect].P, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_DECRYPT, cipher, 0, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d decrypt cipher burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_aes_many_cipher_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].P, vec_tab[vect].C, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_ENCRYPT, cipher, 1, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d encrypt cipher burst " + "in-place\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_aes_many_cipher_burst(mb_mgr, enc_keys, dec_keys, + vec_tab[vect].IV, + vec_tab[vect].C, vec_tab[vect].P, + (unsigned) vec_tab[vect].Plen, + IMB_DIR_DECRYPT, cipher, 1, + vec_tab[vect].Klen, num_jobs)) { + printf("error #%d decrypt cipher " + "burst in-place\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } } printf("\n"); } @@ -2394,7 +2729,7 @@ int aes_test(struct IMB_MGR *mb_mgr) { const int num_jobs_tab[] = { - 1, 3, 4, 5, 7, 8, 9, 15, 16, 17 + 1, 3, 4, 5, 7, 8, 9, 15, 16, 17, MAX_BURST_JOBS }; unsigned i; int errors = 0; diff --git a/test/api_test.c b/test/api_test.c index f0abc03ad4c5d3d3c848c862ab9c72e4ed00b25a..414a00395ece2217dd4ec9ac1a3a0e7edf45bb39 100644 --- a/test/api_test.c +++ b/test/api_test.c @@ -38,11 +38,14 @@ #define __func__ __FUNCTION__ #endif +#define MAX_BURST_JOBS 32 + int api_test(struct IMB_MGR *mb_mgr); enum { TEST_UNEXPECTED_JOB = 1, TEST_INVALID_JOB, + TEST_INVALID_BURST, TEST_AUTH_SRC_NULL = 100, TEST_AUTH_AUTH_TAG_OUTPUT_NULL, TEST_AUTH_TAG_OUTPUT_LEN_ZERO, @@ -54,6 +57,11 @@ enum { TEST_AUTH_NULL_XCBC_K1_EXP, TEST_AUTH_NULL_XCBC_K2, TEST_AUTH_NULL_XCBC_K3, + TEST_AUTH_NULL_GHASH_KEY, + TEST_AUTH_NULL_GHASH_INIT_TAG, + TEST_AUTH_NULL_GMAC_KEY, + TEST_AUTH_NULL_GMAC_IV, + TEST_AUTH_GMAC_IV_LEN, TEST_CIPH_SRC_NULL = 200, TEST_CIPH_DST_NULL, TEST_CIPH_IV_NULL, @@ -63,6 +71,7 @@ enum { TEST_CIPH_MSG_LEN_GT_MAX, TEST_CIPH_NEXT_IV_NULL, TEST_CIPH_IV_LEN, + TEST_CIPH_DIR, TEST_INVALID_PON_PLI = 300, }; @@ -227,6 +236,7 @@ fill_in_job(struct IMB_JOB *job, 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ 4, /* IMB_AUTH_CRC7_FP_HEADER */ 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ + 16, /* IMB_AUTH_GHASH */ }; static DECLARE_ALIGNED(uint8_t dust_bin[2048], 64); static void *ks_ptrs[3]; @@ -483,6 +493,11 @@ fill_in_job(struct IMB_JOB *job, job->u.GMAC.iv_len_in_bytes = 12; job->auth_tag_output_len_in_bytes = 16; break; + case IMB_AUTH_GHASH: + job->u.GHASH._key = (struct gcm_key_data *) dust_bin; + job->u.GHASH._init_tag = dust_bin; + job->auth_tag_output_len_in_bytes = 16; + break; case IMB_AUTH_POLY1305: job->u.POLY1305._key = dust_bin; job->auth_tag_output_len_in_bytes = 16; @@ -620,6 +635,276 @@ is_submit_invalid(struct IMB_MGR *mb_mgr, const struct IMB_JOB *job, return 1; } +/* + * @brief Submits \a job using the burst API and verifies it failed with + * invalid arguments status and error value + */ +static int +is_submit_burst_invalid(struct IMB_MGR *mb_mgr, const struct IMB_JOB *job, + const int test_num, int expected_errnum) +{ + // To do: it's not implemented on Arm platform +#ifdef __x86_64__ + IMB_JOB * jobs[MAX_BURST_JOBS] = {NULL}; + uint32_t i, completed_jobs, n_jobs = MAX_BURST_JOBS; + int err; + + while (IMB_GET_NEXT_BURST(mb_mgr, n_jobs, jobs) < n_jobs) + IMB_FLUSH_BURST(mb_mgr, n_jobs, jobs); + + /* duplicate job to test */ + for (i = 0; i < n_jobs; i++) + *jobs[i] = *job; + + /* submit the job for processing */ + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, n_jobs, jobs); + if (completed_jobs != 0) { + printf("%s : test %d, hash_alg %d, chain_order %d, " + "cipher_dir %d, cipher_mode %d : " + "unexpected number of completed jobs: %u\n", + __func__, test_num, (int) job->hash_alg, + (int) job->chain_order, (int) job->cipher_direction, + (int) job->cipher_mode, completed_jobs); + } + + err = imb_get_errno(mb_mgr); + if (err != expected_errnum) { + printf("%s : test %d, hash_alg %d, chain_order %d, " + "cipher_dir %d, cipher_mode %d : " + "unexpected error: %s\n", + __func__, test_num, (int) job->hash_alg, + (int) job->chain_order, (int) job->cipher_direction, + (int) job->cipher_mode, imb_get_strerror(err)); + return 0; + } + + if (jobs[0]->status != IMB_STATUS_INVALID_ARGS) { + printf("%s : test %d, hash_alg %d, chain_order %d, " + "cipher_dir %d, cipher_mode %d : " + "unexpected job->status %d != IMB_STATUS_INVALID_ARGS\n", + __func__, test_num, (int) job->hash_alg, + (int) job->chain_order, + (int) job->cipher_direction, + (int) job->cipher_mode, (int) job->status); + return 0; + } +#endif + return 1; +} + +/* + * @brief Performs BURST API behavior tests + */ +static int +test_burst_api(struct IMB_MGR *mb_mgr) +{ + struct IMB_JOB *job = NULL, *jobs[MAX_BURST_JOBS] = {NULL}; + uint32_t i, completed_jobs, n_jobs = MAX_BURST_JOBS; + struct IMB_JOB **null_jobs = NULL; + int err; + + printf("SUBMIT_BURST() API behavior test:\n"); + + /* ======== test 1 : NULL pointer to jobs array */ + + if (mb_mgr->features & IMB_FEATURE_SAFE_PARAM) { + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, n_jobs, null_jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed " + "jobs\n", __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_NULL_BURST) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf("."); + + /* ======== test 2 : NULL jobs array */ + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, n_jobs, jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed " + "jobs\n", __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_NULL_JOB) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf("."); + + /* ========== test 3: invalid burst size */ + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, + IMB_MAX_BURST_SIZE + 1, jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed " + "jobs\n", __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_BURST_SIZE) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf("."); + } + + /* ======== test 4 : invalid job order */ + + while (IMB_GET_NEXT_BURST(mb_mgr, n_jobs, jobs) < n_jobs) + IMB_FLUSH_BURST(mb_mgr, n_jobs, jobs); + + /* fill in valid jobs */ + for (i = 0; i < n_jobs; i++) { + job = jobs[i]; + fill_in_job(job, IMB_CIPHER_CBC, IMB_DIR_ENCRYPT, IMB_AUTH_NULL, + IMB_ORDER_CIPHER_HASH, NULL, NULL); + } + + /* set invalid job order */ + jobs[n_jobs / 2] = jobs[n_jobs - 1]; + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, n_jobs, jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed " + "jobs\n", __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_BURST_OOO) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf("."); + + /* ======== test 5 : invalid job */ + + while (IMB_GET_NEXT_BURST(mb_mgr, n_jobs, jobs) < n_jobs) + IMB_FLUSH_BURST(mb_mgr, n_jobs, jobs); + + /* fill in valid jobs */ + for (i = 0; i < n_jobs; i++) { + job = jobs[i]; + fill_in_job(job, IMB_CIPHER_CBC, IMB_DIR_ENCRYPT, IMB_AUTH_NULL, + IMB_ORDER_CIPHER_HASH, NULL, NULL); + } + + /* set a single invalid field */ + jobs[n_jobs - 1]->enc_keys = NULL; + + /* no jobs should complete if any job is invalid */ + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, n_jobs, jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed jobs\n", + __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_JOB_NULL_KEY) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, imb_get_strerror(err)); + return 1; + } + printf("."); + + /* check invalid job returned in jobs[0] */ + if (jobs[0] != jobs[n_jobs - 1]) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + "invalid job not returned in burst_job[0]"); + return 1; + } + + printf("\n"); + + if ((mb_mgr->features & IMB_FEATURE_SAFE_PARAM) == 0) + return 0; + + printf("GET_NEXT_BURST() API behavior test:\n"); + + /* ======== test 6 : NULL pointer to burst job array */ + + completed_jobs = IMB_GET_NEXT_BURST(mb_mgr, n_jobs, null_jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed jobs\n", + __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_NULL_BURST) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf("."); + + /* ======== test 7 : Invalid burst size */ + + completed_jobs = IMB_GET_NEXT_BURST(mb_mgr, + IMB_MAX_BURST_SIZE + 1, jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed jobs\n", + __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_BURST_SIZE) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf(".\n"); + + printf("FLUSH_BURST() API behavior test:\n"); + + completed_jobs = IMB_FLUSH_BURST(mb_mgr, n_jobs, null_jobs); + if (completed_jobs != 0) { + printf("%s: test %d, unexpected number of completed jobs\n", + __func__, TEST_INVALID_BURST); + return 1; + } + printf("."); + + err = imb_get_errno(mb_mgr); + if (err != IMB_ERR_NULL_BURST) { + printf("%s: test %d, unexpected error: %s\n", + __func__, TEST_INVALID_BURST, + imb_get_strerror(err)); + return 1; + } + printf(".\n"); + + return 0; +} + /* * @brief Checks for AEAD algorithms */ @@ -701,6 +986,12 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_SRC_NULL, IMB_ERR_JOB_NULL_SRC)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_AUTH_SRC_NULL, + IMB_ERR_JOB_NULL_SRC)) + return 1; printf("."); } @@ -739,6 +1030,12 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_AUTH_TAG_OUTPUT_NULL, IMB_ERR_JOB_NULL_AUTH)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_AUTH_AUTH_TAG_OUTPUT_NULL, + IMB_ERR_JOB_NULL_AUTH)) + return 1; printf("."); } @@ -777,6 +1074,11 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_TAG_OUTPUT_LEN_ZERO, IMB_ERR_JOB_AUTH_TAG_LEN)) return 1; + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_AUTH_TAG_OUTPUT_LEN_ZERO, + IMB_ERR_JOB_AUTH_TAG_LEN)) + return 1; printf("."); } @@ -809,7 +1111,8 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) hash == IMB_AUTH_CRC8_WIMAX_OFDMA_HCS || hash == IMB_AUTH_CRC7_FP_HEADER || hash == IMB_AUTH_CRC6_IUUP_HEADER || - hash == IMB_AUTH_POLY1305) + hash == IMB_AUTH_POLY1305 || + hash == IMB_AUTH_GHASH) continue; #ifdef __aarch64__ if (hash != IMB_AUTH_SNOW3G_UIA2_BITLEN && @@ -860,6 +1163,12 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_MSG_LEN_GT_MAX, IMB_ERR_JOB_AUTH_LEN)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_AUTH_MSG_LEN_GT_MAX, + IMB_ERR_JOB_AUTH_LEN)) + return 1; printf("."); } @@ -915,6 +1224,12 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_MSG_LEN_ZERO, IMB_ERR_JOB_AUTH_LEN)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_AUTH_MSG_LEN_ZERO, + IMB_ERR_JOB_AUTH_LEN)) + return 1; printf("."); } @@ -954,6 +1269,11 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_IV_LEN, IMB_ERR_JOB_IV_LEN)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_IV_LEN, + IMB_ERR_JOB_IV_LEN)) + return 1; printf("."); } @@ -997,6 +1317,11 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_NULL_HMAC_IPAD, err_ipad)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_HMAC_IPAD, + err_ipad)) + return 1; printf("."); fill_in_job(job, cipher, dir, @@ -1010,6 +1335,11 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_NULL_HMAC_OPAD, err_opad)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_HMAC_OPAD, + err_opad)) + return 1; printf("."); } @@ -1033,6 +1363,11 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_NULL_XCBC_K1_EXP, IMB_ERR_JOB_NULL_XCBC_K1_EXP)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_XCBC_K1_EXP, + IMB_ERR_JOB_NULL_XCBC_K1_EXP)) + return 1; printf("."); fill_in_job(job, cipher, dir, @@ -1043,6 +1378,11 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_NULL_XCBC_K2, IMB_ERR_JOB_NULL_XCBC_K2)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_XCBC_K2, + IMB_ERR_JOB_NULL_XCBC_K2)) + return 1; printf("."); fill_in_job(job, cipher, dir, @@ -1053,8 +1393,110 @@ test_job_invalid_mac_args(struct IMB_MGR *mb_mgr) TEST_AUTH_NULL_XCBC_K3, IMB_ERR_JOB_NULL_XCBC_K3)) return 1; + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_XCBC_K3, + IMB_ERR_JOB_NULL_XCBC_K3)) + return 1; printf("."); } + /* + * Invalid GHASH parameters + */ + for (order = IMB_ORDER_CIPHER_HASH; order <= IMB_ORDER_HASH_CIPHER; + order++) + for (dir = IMB_DIR_ENCRYPT; dir <= IMB_DIR_DECRYPT; dir++) { + IMB_JOB *job = &template_job; + + hash = IMB_AUTH_GHASH; + + fill_in_job(job, cipher, dir, + hash, order, &chacha_ctx, + &gcm_ctx); + job->u.GHASH._key = NULL; + if (!is_submit_invalid(mb_mgr, job, + TEST_AUTH_NULL_GHASH_KEY, + IMB_ERR_JOB_NULL_AUTH_KEY)) + return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_GHASH_KEY, + IMB_ERR_JOB_NULL_AUTH_KEY)) + return 1; + printf("."); + + fill_in_job(job, cipher, dir, + hash, order, &chacha_ctx, + &gcm_ctx); + job->u.GHASH._init_tag = NULL; + if (!is_submit_invalid(mb_mgr, job, + TEST_AUTH_NULL_GHASH_INIT_TAG, + IMB_ERR_JOB_NULL_GHASH_INIT_TAG)) + return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_GHASH_INIT_TAG, + IMB_ERR_JOB_NULL_GHASH_INIT_TAG)) + return 1; + printf("."); + } + + /* + * Invalid GMAC parameters + */ + for (order = IMB_ORDER_CIPHER_HASH; order <= IMB_ORDER_HASH_CIPHER; + order++) + for (dir = IMB_DIR_ENCRYPT; dir <= IMB_DIR_DECRYPT; dir++) { + for (hash = IMB_AUTH_AES_GMAC_128; + hash <= IMB_AUTH_AES_GMAC_256; hash++) { + IMB_JOB *job = &template_job; + + fill_in_job(job, cipher, dir, + hash, order, &chacha_ctx, + &gcm_ctx); + job->u.GMAC._key = NULL; + + if (!is_submit_invalid(mb_mgr, job, + TEST_AUTH_NULL_GMAC_KEY, + IMB_ERR_JOB_NULL_AUTH_KEY)) + return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_GMAC_KEY, + IMB_ERR_JOB_NULL_AUTH_KEY)) + return 1; + printf("."); + + fill_in_job(job, cipher, dir, + hash, order, &chacha_ctx, + &gcm_ctx); + job->u.GMAC._iv = NULL; + if (!is_submit_invalid(mb_mgr, job, + TEST_AUTH_NULL_GMAC_IV, + IMB_ERR_JOB_NULL_IV)) + return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_NULL_GMAC_IV, + IMB_ERR_JOB_NULL_IV)) + return 1; + printf("."); + + fill_in_job(job, cipher, dir, + hash, order, &chacha_ctx, + &gcm_ctx); + job->u.GMAC.iv_len_in_bytes = 0; + if (!is_submit_invalid(mb_mgr, job, + TEST_AUTH_GMAC_IV_LEN, + IMB_ERR_JOB_IV_LEN)) + return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_AUTH_GMAC_IV_LEN, + IMB_ERR_JOB_IV_LEN)) + return 1; + printf("."); + } + } #endif /* clean up */ while (IMB_FLUSH_JOB(mb_mgr) != NULL) @@ -1115,6 +1557,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_SRC_NULL, IMB_ERR_JOB_NULL_SRC)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_SRC_NULL, + IMB_ERR_JOB_NULL_SRC)) + return 1; printf("."); } @@ -1149,6 +1597,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_DST_NULL, IMB_ERR_JOB_NULL_DST)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_DST_NULL, + IMB_ERR_JOB_NULL_DST)) + return 1; printf("."); } @@ -1187,8 +1641,57 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_IV_NULL, IMB_ERR_JOB_NULL_IV)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_IV_NULL, + IMB_ERR_JOB_NULL_IV)) + return 1; printf("."); } + /* + * CIPHER_DIR = Invalid dir + */ + for (dir = 0; dir <= 10; dir++) { + /* skip valid directions */ + if (dir == IMB_DIR_ENCRYPT || dir == IMB_DIR_DECRYPT) + continue; + + for (cipher = IMB_CIPHER_CBC; + cipher < IMB_CIPHER_NUM; cipher++) { + + if (cipher == IMB_CIPHER_NULL || + cipher == IMB_CIPHER_CUSTOM) + continue; +#ifdef __aarch64__ + if (cipher != IMB_CIPHER_SNOW3G_UEA2_BITLEN && + cipher != IMB_CIPHER_ZUC_EEA3) + continue; +#endif + /* + * Skip cipher algorithms belonging to AEAD + * algorithms, as the test is for cipher + * only algorithms */ + if (check_aead(hash, cipher)) + continue; + + order = IMB_ORDER_CIPHER_HASH; + + fill_in_job(&template_job, cipher, dir, + hash, order, &chacha_ctx, &gcm_ctx); + + if (!is_submit_invalid(mb_mgr, &template_job, + TEST_CIPH_DIR, + IMB_ERR_JOB_CIPH_DIR)) + return 1; + + if (!is_submit_burst_invalid(mb_mgr, &template_job, + TEST_CIPH_DIR, + IMB_ERR_JOB_CIPH_DIR)) + return 1; + printf("."); + } + } /* ======== (encrypt test) * AES_ENC_KEY_EXPANDED = NULL @@ -1225,6 +1728,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_ENC_KEY_NULL, IMB_ERR_JOB_NULL_KEY)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_ENC_KEY_NULL, + IMB_ERR_JOB_NULL_KEY)) + return 1; break; } printf("."); @@ -1266,6 +1775,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_DEC_KEY_NULL, IMB_ERR_JOB_NULL_KEY)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_DEC_KEY_NULL, + IMB_ERR_JOB_NULL_KEY)) + return 1; break; case IMB_CIPHER_CNTR: case IMB_CIPHER_CNTR_BITLEN: @@ -1280,6 +1795,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_DEC_KEY_NULL, IMB_ERR_JOB_NULL_KEY)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_DEC_KEY_NULL, + IMB_ERR_JOB_NULL_KEY)) + return 1; break; case IMB_CIPHER_DOCSIS_SEC_BPI: template_job.enc_keys = NULL; @@ -1287,6 +1808,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_DEC_KEY_NULL, IMB_ERR_JOB_NULL_KEY)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_DEC_KEY_NULL, + IMB_ERR_JOB_NULL_KEY)) + return 1; template_job.enc_keys = template_job.dec_keys; template_job.dec_keys = NULL; @@ -1294,6 +1821,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_DEC_KEY_NULL, IMB_ERR_JOB_NULL_KEY)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + &template_job, + TEST_CIPH_DEC_KEY_NULL, + IMB_ERR_JOB_NULL_KEY)) + return 1; break; case IMB_CIPHER_NULL: case IMB_CIPHER_CUSTOM: @@ -1351,6 +1884,12 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_MSG_LEN_ZERO, IMB_ERR_JOB_CIPH_LEN)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, + job, + TEST_CIPH_MSG_LEN_ZERO, + IMB_ERR_JOB_CIPH_LEN)) + return 1; } printf("."); } @@ -1435,6 +1974,11 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) IMB_ERR_JOB_CIPH_LEN)) return 1; + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_CIPH_MSG_LEN_GT_MAX, + IMB_ERR_JOB_CIPH_LEN)) + return 1; + printf("."); } @@ -1588,6 +2132,11 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_IV_LEN, IMB_ERR_JOB_IV_LEN)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_CIPH_IV_LEN, + IMB_ERR_JOB_IV_LEN)) + return 1; printf("."); } } @@ -1622,6 +2171,11 @@ test_job_invalid_cipher_args(struct IMB_MGR *mb_mgr) TEST_CIPH_NEXT_IV_NULL, IMB_ERR_JOB_NULL_NEXT_IV)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_CIPH_NEXT_IV_NULL, + IMB_ERR_JOB_NULL_NEXT_IV)) + return 1; printf("."); } #endif @@ -1679,6 +2233,11 @@ test_job_invalid_misc_args(struct IMB_MGR *mb_mgr) TEST_INVALID_PON_PLI, IMB_ERR_JOB_PON_PLI)) return 1; + + if (!is_submit_burst_invalid(mb_mgr, &template_job, + TEST_INVALID_PON_PLI, + IMB_ERR_JOB_PON_PLI)) + return 1; printf("."); } @@ -1727,6 +2286,11 @@ test_job_invalid_misc_args(struct IMB_MGR *mb_mgr) IMB_ERR_JOB_CIPH_LEN)) return 1; + if (!is_submit_burst_invalid(mb_mgr, job, + TEST_CIPH_MSG_LEN_GT_MAX, + IMB_ERR_JOB_CIPH_LEN)) + return 1; + printf("."); } @@ -1783,7 +2347,9 @@ submit_reset_check_job(struct IMB_MGR *mb_mgr, if (next_job->status != IMB_STATUS_COMPLETED) { printf("Returned job's status is not completed\n"); printf("cipher = %u\n", cipher); - printf("imb errno = %u\n", mb_mgr->imb_errno); + printf("imb errno = %u (%s)\n", + mb_mgr->imb_errno, + imb_get_strerror(mb_mgr->imb_errno)); exit(0); } @@ -1944,6 +2510,12 @@ api_test(struct IMB_MGR *mb_mgr) errors += test_job_api(mb_mgr); run++; + // To do... it's not implemented for arm platform +#ifdef __x86_64__ + errors += test_burst_api(mb_mgr); + run++; +#endif + errors += test_job_invalid_mac_args(mb_mgr); run++; diff --git a/test/chacha20_poly1305_test.c b/test/chacha20_poly1305_test.c index 8c68d1836c885c50dc516a3b33274ff959dd9970..60cf960b31cfbe4a26a0c95d1b7ac2ac1a18ed04 100644 --- a/test/chacha20_poly1305_test.c +++ b/test/chacha20_poly1305_test.c @@ -437,6 +437,114 @@ test_aead(struct IMB_MGR *mb_mgr, goto end; } + /* + * ******************************************* + * BURST API TEST + * ******************************************* + */ + + /* create job array */ + IMB_JOB * jobs[32] = {NULL}; + + jobs_rx = 0; + + /* reset buffers */ + for (i = 0; i < num_jobs; i++) { + memset(auths[i], -1, 16 + (sizeof(padding) * 2)); + memset(targets[i], -1, vec->msg_len + (sizeof(padding) * 2)); + + if (in_place) { + if (dir == IMB_DIR_ENCRYPT) + memcpy(targets[i] + sizeof(padding), + vec->plain, vec->msg_len); + else + memcpy(targets[i] + sizeof(padding), + vec->cipher, vec->msg_len); + } + } + + while (IMB_GET_NEXT_BURST(mb_mgr, num_jobs, jobs) < (uint32_t)num_jobs) + IMB_FLUSH_BURST(mb_mgr, num_jobs, jobs); + + /** + * Set all job params before submitting burst + */ + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + job->cipher_direction = dir; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305; + job->hash_alg = IMB_AUTH_CHACHA20_POLY1305; + job->enc_keys = vec->key; + job->dec_keys = vec->key; + job->key_len_in_bytes = 32; + + job->u.CHACHA20_POLY1305.aad = vec->aad; + job->u.CHACHA20_POLY1305.aad_len_in_bytes = vec->aad_len; + + if (in_place) + job->src = targets[i] + sizeof(padding); + else + if (dir == IMB_DIR_ENCRYPT) + job->src = vec->plain; + else + job->src = vec->cipher; + job->dst = targets[i] + sizeof(padding); + + job->iv = vec->iv; + job->iv_len_in_bytes = 12; + job->msg_len_to_cipher_in_bytes = vec->msg_len; + job->cipher_start_src_offset_in_bytes = 0; + + job->msg_len_to_hash_in_bytes = vec->msg_len; + job->hash_start_src_offset_in_bytes = 0; + job->auth_tag_output = auths[i] + sizeof(padding); + job->auth_tag_output_len_in_bytes = 16; + + job->user_data = auths[i]; + } + + uint32_t completed_jobs = IMB_SUBMIT_BURST(mb_mgr, num_jobs, jobs); + + if (completed_jobs != (uint32_t)num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + goto end; + } + } + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %d status not complete!\n", i+1); + goto end; + } + + if (!aead_job_ok(mb_mgr, vec, job, job->user_data, + padding, sizeof(padding))) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + printf("Expected %d jobs after burst, " + "received %d\n", num_jobs, jobs_rx); + goto end; + } + + /* + * ******************************************* + * END BURST API TEST + * ******************************************* + */ ret = 0; @@ -528,6 +636,207 @@ test_aead_vectors(struct IMB_MGR *mb_mgr, } +static void +test_single_job_sgl(struct IMB_MGR *mb_mgr, + struct test_suite_context *ctx, + const uint32_t buffer_sz, + const uint32_t seg_sz, + const IMB_CIPHER_DIRECTION cipher_dir) +{ + struct IMB_JOB *job; + uint8_t *in_buffer = NULL; + uint8_t **segments = NULL; + uint8_t linear_digest[DIGEST_SZ]; + uint8_t sgl_digest[DIGEST_SZ]; + uint8_t key[KEY_SZ]; + unsigned i; + uint8_t aad[AAD_SZ]; + uint8_t iv[IV_SZ]; + struct chacha20_poly1305_context_data chacha_ctx; + uint32_t last_seg_sz = buffer_sz % seg_sz; + struct IMB_SGL_IOV *sgl_segs = NULL; + const uint32_t num_segments = DIV_ROUND_UP(buffer_sz, seg_sz); + + sgl_segs = malloc(sizeof(struct IMB_SGL_IOV) * num_segments); + if (sgl_segs == NULL) { + fprintf(stderr, "Could not allocate memory for SGL segments\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + + if (last_seg_sz == 0) + last_seg_sz = seg_sz; + + in_buffer = malloc(buffer_sz); + if (in_buffer == NULL) { + fprintf(stderr, "Could not allocate memory for input buffer\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + + /* + * Initialize tags with different values, to make sure the comparison + * is false if they are not updated by the library + */ + memset(sgl_digest, 0, DIGEST_SZ); + memset(linear_digest, 0xFF, DIGEST_SZ); + + generate_random_buf(in_buffer, buffer_sz); + generate_random_buf(key, KEY_SZ); + generate_random_buf(iv, IV_SZ); + generate_random_buf(aad, AAD_SZ); + + segments = malloc(num_segments * sizeof(*segments)); + if (segments == NULL) { + fprintf(stderr, + "Could not allocate memory for segments array\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + memset(segments, 0, num_segments * sizeof(*segments)); + + for (i = 0; i < (num_segments - 1); i++) { + segments[i] = malloc(seg_sz); + if (segments[i] == NULL) { + fprintf(stderr, + "Could not allocate memory for segment %u\n", + i); + test_suite_update(ctx, 0, 1); + goto exit; + } + memcpy(segments[i], in_buffer + seg_sz * i, seg_sz); + sgl_segs[i].in = segments[i]; + sgl_segs[i].out = segments[i]; + sgl_segs[i].len = seg_sz; + } + segments[i] = malloc(last_seg_sz); + if (segments[i] == NULL) { + fprintf(stderr, "Could not allocate memory for segment %u\n", + i); + test_suite_update(ctx, 0, 1); + goto exit; + } + memcpy(segments[i], in_buffer + seg_sz * i, last_seg_sz); + sgl_segs[i].in = segments[i]; + sgl_segs[i].out = segments[i]; + sgl_segs[i].len = last_seg_sz; + + /* Process linear (single segment) buffer */ + job = IMB_GET_NEXT_JOB(mb_mgr); + job->cipher_direction = cipher_dir; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305; + job->hash_alg = IMB_AUTH_CHACHA20_POLY1305; + job->enc_keys = key; + job->dec_keys = key; + job->src = in_buffer; + job->dst = in_buffer; + job->key_len_in_bytes = KEY_SZ; + + job->u.CHACHA20_POLY1305.aad = aad; + job->u.CHACHA20_POLY1305.aad_len_in_bytes = AAD_SZ; + + job->iv = iv; + job->iv_len_in_bytes = IV_SZ; + job->msg_len_to_cipher_in_bytes = buffer_sz; + job->cipher_start_src_offset_in_bytes = 0; + + job->msg_len_to_hash_in_bytes = buffer_sz; + job->hash_start_src_offset_in_bytes = 0; + job->auth_tag_output = linear_digest; + job->auth_tag_output_len_in_bytes = DIGEST_SZ; + + job = IMB_SUBMIT_JOB(mb_mgr); + + if (job->status == IMB_STATUS_COMPLETED) + test_suite_update(ctx, 1, 0); + else { + fprintf(stderr, "job status returned as not successful" + " for the linear buffer\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + + /* Process multi-segment buffer */ + job = IMB_GET_NEXT_JOB(mb_mgr); + job->cipher_direction = cipher_dir; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305_SGL; + job->hash_alg = IMB_AUTH_CHACHA20_POLY1305_SGL; + job->enc_keys = key; + job->dec_keys = key; + job->key_len_in_bytes = KEY_SZ; + + job->u.CHACHA20_POLY1305.aad = aad; + job->u.CHACHA20_POLY1305.aad_len_in_bytes = AAD_SZ; + job->u.CHACHA20_POLY1305.ctx = &chacha_ctx; + + job->iv = iv; + job->iv_len_in_bytes = IV_SZ; + job->cipher_start_src_offset_in_bytes = 0; + + job->hash_start_src_offset_in_bytes = 0; + job->auth_tag_output = sgl_digest; + job->auth_tag_output_len_in_bytes = DIGEST_SZ; + + job->num_sgl_io_segs = num_segments; + job->sgl_state = IMB_SGL_ALL; + job->sgl_io_segs = sgl_segs; + job = IMB_SUBMIT_JOB(mb_mgr); + + if (job->status == IMB_STATUS_COMPLETED) { + for (i = 0; i < (num_segments - 1); i++) { + if (memcmp(in_buffer + i*seg_sz, segments[i], + seg_sz) != 0) { + printf("ciphertext mismatched " + "in segment number %u " + "(segment size = %u)\n", + i, seg_sz); + hexdump(stderr, "Linear output", + in_buffer + i*seg_sz, seg_sz); + hexdump(stderr, "SGL output", segments[i], + seg_sz); + test_suite_update(ctx, 0, 1); + goto exit; + } + } + /* Check last segment */ + if (memcmp(in_buffer + i*seg_sz, segments[i], + last_seg_sz) != 0) { + printf("ciphertext mismatched " + "in segment number %u (segment size = %u)\n", + i, seg_sz); + hexdump(stderr, "Linear output", + in_buffer + i*seg_sz, last_seg_sz); + hexdump(stderr, "SGL output", segments[i], last_seg_sz); + test_suite_update(ctx, 0, 1); + } + if (memcmp(sgl_digest, linear_digest, 16) != 0) { + printf("hash mismatched (segment size = %u)\n", + seg_sz); + hexdump(stderr, "Linear digest", + linear_digest, DIGEST_SZ); + hexdump(stderr, "SGL digest", sgl_digest, DIGEST_SZ); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + } else { + fprintf(stderr, "job status returned as not successful" + " for the segmented buffer\n"); + test_suite_update(ctx, 0, 1); + } + +exit: + free(sgl_segs); + free(in_buffer); + if (segments != NULL) { + for (i = 0; i < num_segments; i++) + free(segments[i]); + free(segments); + } +} + static void test_sgl(struct IMB_MGR *mb_mgr, struct test_suite_context *ctx, @@ -574,16 +883,16 @@ test_sgl(struct IMB_MGR *mb_mgr, generate_random_buf(iv, IV_SZ); generate_random_buf(aad, AAD_SZ); - segments = malloc(num_segments * 8); + segments = malloc(num_segments * sizeof(*segments)); if (segments == NULL) { fprintf(stderr, "Could not allocate memory for segments array\n"); test_suite_update(ctx, 0, 1); goto exit; } - memset(segments, 0, num_segments * 8); + memset(segments, 0, num_segments * sizeof(*segments)); - segment_sizes = malloc(num_segments * 4); + segment_sizes = malloc(num_segments * sizeof(*segment_sizes)); if (segment_sizes == NULL) { fprintf(stderr, "Could not allocate memory for array of sizes\n"); @@ -863,6 +1172,11 @@ chacha20_poly1305_test(struct IMB_MGR *mb_mgr) test_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, IMB_DIR_DECRYPT, 1, 0); test_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, IMB_DIR_ENCRYPT, 1, 1); test_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, IMB_DIR_DECRYPT, 1, 1); + /* Single job SGL API */ + test_single_job_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, + IMB_DIR_ENCRYPT); + test_single_job_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, + IMB_DIR_DECRYPT); /* Direct API */ test_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, IMB_DIR_ENCRYPT, 0, 1); test_sgl(mb_mgr, &ctx, BUF_SZ, seg_sz, IMB_DIR_DECRYPT, 0, 1); diff --git a/test/ctr_test.c b/test/ctr_test.c index f28b1d87d5bb6d923beb4c88c24d75564a211631..8ad8debec69359b1bc615b011f3256da4e8779ed 100644 --- a/test/ctr_test.c +++ b/test/ctr_test.c @@ -34,6 +34,8 @@ #include "gcm_ctr_vectors_test.h" #include "utils.h" +#define MAX_CTR_JOBS 32 + #define BYTE_ROUND_UP(x) ((x + 7) / 8) /* * Test Vector from @@ -1350,8 +1352,8 @@ test_ctr(struct IMB_MGR *mb_mgr, const uint8_t *in_text, const uint8_t *out_text, unsigned text_len, - int dir, - int order, + const IMB_CIPHER_DIRECTION dir, + const IMB_CHAIN_ORDER order, const IMB_CIPHER_MODE alg) { uint32_t text_byte_len; @@ -1445,6 +1447,259 @@ test_ctr(struct IMB_MGR *mb_mgr, return ret; } +static int +test_ctr_burst(struct IMB_MGR *mb_mgr, + const void *expkey, + unsigned key_len, + const void *iv, + unsigned iv_len, + const uint8_t *in_text, + const uint8_t *out_text, + unsigned text_len, + const IMB_CIPHER_DIRECTION dir, + const IMB_CHAIN_ORDER order, + const IMB_CIPHER_MODE alg, + const uint32_t num_jobs) +{ + uint32_t text_byte_len, i, completed_jobs, jobs_rx = 0; + struct IMB_JOB *job, *jobs[MAX_CTR_JOBS]; + uint8_t padding[16]; + uint8_t **targets = malloc(num_jobs * sizeof(void *)); + int ret = -1; + + if (targets == NULL) + goto end_alloc; + + /* Get number of bytes (in case algo is CNTR_BITLEN) */ + if (alg == IMB_CIPHER_CNTR) + text_byte_len = text_len; + else + text_byte_len = BYTE_ROUND_UP(text_len); + + memset(targets, 0, num_jobs * sizeof(void *)); + memset(padding, -1, sizeof(padding)); + + for (i = 0; i < num_jobs; i++) { + targets[i] = malloc(text_byte_len + (sizeof(padding) * 2)); + if (targets[i] == NULL) + goto end_alloc; + memset(targets[i], -1, text_byte_len + (sizeof(padding) * 2)); + } + + while (IMB_GET_NEXT_BURST(mb_mgr, num_jobs, jobs) < num_jobs) + IMB_FLUSH_BURST(mb_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + job->cipher_direction = dir; + job->chain_order = order; + job->dst = targets[i] + sizeof(padding); + job->src = in_text; + job->cipher_mode = alg; + job->enc_keys = expkey; + job->dec_keys = expkey; + job->key_len_in_bytes = key_len; + job->iv = iv; + job->iv_len_in_bytes = iv_len; + job->cipher_start_src_offset_in_bytes = 0; + if (alg == IMB_CIPHER_CNTR) + job->msg_len_to_cipher_in_bytes = text_byte_len; + else + job->msg_len_to_cipher_in_bits = text_len; + job->hash_alg = IMB_AUTH_NULL; + job->user_data = targets[i]; + job->user_data2 = (void *)((uint64_t)i); + } + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, num_jobs, jobs); + if (completed_jobs != num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + goto end; + } + } + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + goto end; + } + if (memcmp(out_text, targets[i] + sizeof(padding), + text_byte_len)) { + printf("mismatched\n"); + hexdump(stderr, "Target", targets[i] + sizeof(padding), + text_byte_len); + hexdump(stderr, "Expected", out_text, text_byte_len); + goto end; + } + if (memcmp(padding, targets[i], sizeof(padding))) { + printf("overwrite head\n"); + hexdump(stderr, "Target", targets[i], text_byte_len + + (sizeof(padding) * 2)); + goto end; + } + if (memcmp(padding, targets[i] + sizeof(padding) + + text_byte_len, sizeof(padding))) { + printf("overwrite tail\n"); + hexdump(stderr, "Target", targets[i], text_byte_len + + (sizeof(padding) * 2)); + goto end; + } + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + printf("Expected %u jobs, received %u\n", num_jobs, jobs_rx); + goto end; + } + ret = 0; + end: + + end_alloc: + if (targets != NULL) { + for (i = 0; i < num_jobs; i++) + free(targets[i]); + free(targets); + } + + return ret; +} + +static int +test_ctr_cipher_burst(struct IMB_MGR *mb_mgr, + const void *expkey, + unsigned key_len, + const void *iv, + unsigned iv_len, + const uint8_t *in_text, + const uint8_t *out_text, + unsigned text_len, + const IMB_CIPHER_DIRECTION dir, + const IMB_CHAIN_ORDER order, + const IMB_CIPHER_MODE alg, + const uint32_t num_jobs) +{ + uint32_t text_byte_len, i, completed_jobs, jobs_rx = 0; + struct IMB_JOB *job, jobs[MAX_CTR_JOBS]; + uint8_t padding[16]; + uint8_t **targets = malloc(num_jobs * sizeof(void *)); + int ret = -1; + + if (targets == NULL) + goto end_alloc; + + /* Get number of bytes (in case algo is CNTR_BITLEN) */ + if (alg == IMB_CIPHER_CNTR) + text_byte_len = text_len; + else + text_byte_len = BYTE_ROUND_UP(text_len); + + memset(targets, 0, num_jobs * sizeof(void *)); + memset(padding, -1, sizeof(padding)); + + for (i = 0; i < num_jobs; i++) { + targets[i] = malloc(text_byte_len + (sizeof(padding) * 2)); + if (targets[i] == NULL) + goto end_alloc; + memset(targets[i], -1, text_byte_len + (sizeof(padding) * 2)); + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + job->cipher_direction = dir; + job->chain_order = order; + job->dst = targets[i] + sizeof(padding); + job->src = in_text; + job->cipher_mode = alg; + job->enc_keys = expkey; + job->dec_keys = expkey; + job->key_len_in_bytes = key_len; + job->iv = iv; + job->iv_len_in_bytes = iv_len; + job->cipher_start_src_offset_in_bytes = 0; + if (alg == IMB_CIPHER_CNTR) + job->msg_len_to_cipher_in_bytes = text_byte_len; + else + job->msg_len_to_cipher_in_bits = text_len; + job->hash_alg = IMB_AUTH_NULL; + job->user_data = targets[i]; + job->user_data2 = (void *)((uint64_t)i); + } + + + completed_jobs = IMB_SUBMIT_CIPHER_BURST(mb_mgr, jobs, num_jobs, + alg, dir, key_len); + if (completed_jobs != num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + goto end; + } + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + goto end; + } + if (memcmp(out_text, targets[i] + sizeof(padding), + text_byte_len)) { + printf("mismatched\n"); + hexdump(stderr, "Target", targets[i] + sizeof(padding), + text_byte_len); + hexdump(stderr, "Expected", out_text, text_byte_len); + goto end; + } + if (memcmp(padding, targets[i], sizeof(padding))) { + printf("overwrite head\n"); + hexdump(stderr, "Target", targets[i], text_byte_len + + (sizeof(padding) * 2)); + goto end; + } + if (memcmp(padding, targets[i] + sizeof(padding) + + text_byte_len, sizeof(padding))) { + printf("overwrite tail\n"); + hexdump(stderr, "Target", targets[i], text_byte_len + + (sizeof(padding) * 2)); + goto end; + } + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + printf("Expected %u jobs, received %u\n", num_jobs, jobs_rx); + goto end; + } + ret = 0; + end: + + end_alloc: + if (targets != NULL) { + for (i = 0; i < num_jobs; i++) + free(targets[i]); + free(targets); + } + + return ret; +} + static void test_ctr_vectors(struct IMB_MGR *mb_mgr, struct test_suite_context *ctx128, @@ -1572,9 +1827,232 @@ test_ctr_vectors(struct IMB_MGR *mb_mgr, printf("\n"); } +static void +test_ctr_vectors_burst(struct IMB_MGR *mb_mgr, + struct test_suite_context *ctx128, + struct test_suite_context *ctx192, + struct test_suite_context *ctx256, + const struct gcm_ctr_vector *vectors, + const uint32_t vectors_cnt, const IMB_CIPHER_MODE alg, + const uint32_t num_jobs) +{ + uint32_t vect; + DECLARE_ALIGNED(uint32_t expkey[4*15], 16); + DECLARE_ALIGNED(uint32_t dust[4*15], 16); + + printf("AES-CTR standard test vectors - Burst API:\n"); + for (vect = 0; vect < vectors_cnt; vect++) { + struct test_suite_context *ctx; +#ifdef DEBUG + if (alg == IMB_CIPHER_CNTR) + printf("Standard vector %u/%u Keylen:%d " + "IVlen:%d PTLen:%d (burst)\n", + vect, vectors_cnt - 1, + (int) vectors[vect].Klen, + (int) vectors[vect].IVlen, + (int) vectors[vect].Plen); + else + printf("Bit vector %u/%u Keylen:%d " + "IVlen:%d PTLen:%d (burst)\n", + vect, vectors_cnt - 1, + (int) vectors[vect].Klen, + (int) vectors[vect].IVlen, + (int) vectors[vect].Plen); +#else + printf("."); +#endif + + switch (vectors[vect].Klen) { + case IMB_KEY_128_BYTES: + IMB_AES_KEYEXP_128(mb_mgr, vectors[vect].K, + expkey, dust); + ctx = ctx128; + break; + case IMB_KEY_192_BYTES: + IMB_AES_KEYEXP_192(mb_mgr, vectors[vect].K, + expkey, dust); + ctx = ctx192; + break; + case IMB_KEY_256_BYTES: + IMB_AES_KEYEXP_256(mb_mgr, vectors[vect].K, + expkey, dust); + ctx = ctx256; + break; + default: + return; + } + + if (test_ctr_burst(mb_mgr, + expkey, vectors[vect].Klen, + vectors[vect].IV, + (unsigned) vectors[vect].IVlen, + vectors[vect].P, vectors[vect].C, + (unsigned) vectors[vect].Plen, + IMB_DIR_ENCRYPT, + IMB_ORDER_CIPHER_HASH, alg, num_jobs)) { + printf("error #%u encrypt burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_ctr_burst(mb_mgr, + expkey, vectors[vect].Klen, + vectors[vect].IV, + (unsigned) vectors[vect].IVlen, + vectors[vect].C, vectors[vect].P, + (unsigned) vectors[vect].Plen, + IMB_DIR_DECRYPT, + IMB_ORDER_HASH_CIPHER, alg, num_jobs)) { + printf("error #%u decrypt burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (vectors[vect].IVlen == 12 && + alg == IMB_CIPHER_CNTR) { + /* IV in the table didn't + * include block counter (12 bytes). + * Let's encrypt & decrypt the same but + * with 16 byte IV that includes block counter. + */ + const unsigned new_iv_len = 16; + const unsigned orig_iv_len = 12; + uint8_t local_iv[16]; + + memcpy(local_iv, vectors[vect].IV, orig_iv_len); + /* 32-bit 0x1 in BE == 0x01000000 in LE */ + local_iv[12] = 0x00; + local_iv[13] = 0x00; + local_iv[14] = 0x00; + local_iv[15] = 0x01; + + if (test_ctr_burst(mb_mgr, + expkey, vectors[vect].Klen, + local_iv, new_iv_len, + vectors[vect].P, + vectors[vect].C, (unsigned) + vectors[vect].Plen, + IMB_DIR_ENCRYPT, + IMB_ORDER_CIPHER_HASH, + alg, num_jobs)) { + printf("error #%u encrypt burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_ctr_burst(mb_mgr, + expkey, vectors[vect].Klen, + local_iv, new_iv_len, + vectors[vect].C, + vectors[vect].P, (unsigned) + vectors[vect].Plen, + IMB_DIR_DECRYPT, + IMB_ORDER_HASH_CIPHER, + alg, num_jobs)) { + printf("error #%u decrypt burst\n", vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + } + + /* skip bitlen cipher-only burst api tests */ + if (alg == IMB_CIPHER_CNTR_BITLEN) + continue; + + if (test_ctr_cipher_burst(mb_mgr, + expkey, vectors[vect].Klen, + vectors[vect].IV, + (unsigned) vectors[vect].IVlen, + vectors[vect].P, vectors[vect].C, + (unsigned) vectors[vect].Plen, + IMB_DIR_ENCRYPT, + IMB_ORDER_CIPHER_HASH, alg, + num_jobs)) { + printf("error #%u encrypt cipher-only burst\n", + vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_ctr_cipher_burst(mb_mgr, + expkey, vectors[vect].Klen, + vectors[vect].IV, + (unsigned) vectors[vect].IVlen, + vectors[vect].C, vectors[vect].P, + (unsigned) vectors[vect].Plen, + IMB_DIR_DECRYPT, + IMB_ORDER_HASH_CIPHER, alg, + num_jobs)) { + printf("error #%u decrypt cipher-only burst\n", + vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (vectors[vect].IVlen == 12 && + alg == IMB_CIPHER_CNTR) { + /* IV in the table didn't + * include block counter (12 bytes). + * Let's encrypt & decrypt the same but + * with 16 byte IV that includes block counter. + */ + const unsigned new_iv_len = 16; + const unsigned orig_iv_len = 12; + uint8_t local_iv[16]; + + memcpy(local_iv, vectors[vect].IV, orig_iv_len); + /* 32-bit 0x1 in BE == 0x01000000 in LE */ + local_iv[12] = 0x00; + local_iv[13] = 0x00; + local_iv[14] = 0x00; + local_iv[15] = 0x01; + + if (test_ctr_cipher_burst(mb_mgr, + expkey, vectors[vect].Klen, + local_iv, new_iv_len, + vectors[vect].P, + vectors[vect].C, (unsigned) + vectors[vect].Plen, + IMB_DIR_ENCRYPT, + IMB_ORDER_CIPHER_HASH, + alg, num_jobs)) { + printf("error #%u encrypt cipher-only burst\n", + vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + + if (test_ctr_cipher_burst(mb_mgr, + expkey, vectors[vect].Klen, + local_iv, new_iv_len, + vectors[vect].C, + vectors[vect].P, (unsigned) + vectors[vect].Plen, + IMB_DIR_DECRYPT, + IMB_ORDER_HASH_CIPHER, + alg, num_jobs)) { + printf("error #%u decrypt cipher-only burst\n", + vect + 1); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + } + } + printf("\n"); +} + int ctr_test(struct IMB_MGR *mb_mgr) { + uint32_t i; int errors = 0; struct test_suite_context ctx128; struct test_suite_context ctx192; @@ -1590,6 +2068,11 @@ ctr_test(struct IMB_MGR *mb_mgr) &ctx128, &ctx192, &ctx256, ctr_vectors, ctr_vec_cnt, IMB_CIPHER_CNTR); + for (i = 1; i <= MAX_CTR_JOBS; i++) + test_ctr_vectors_burst(mb_mgr, + &ctx128, &ctx192, &ctx256, + ctr_vectors, ctr_vec_cnt, + IMB_CIPHER_CNTR, i); errors += test_suite_end(&ctx128); errors += test_suite_end(&ctx192); errors += test_suite_end(&ctx256); @@ -1601,6 +2084,11 @@ ctr_test(struct IMB_MGR *mb_mgr) test_ctr_vectors(mb_mgr, &ctx128, &ctx192, &ctx256, ctr_bit_vectors, ctr_bit_vec_cnt, IMB_CIPHER_CNTR_BITLEN); + for (i = 1; i <= MAX_CTR_JOBS; i++) + test_ctr_vectors_burst(mb_mgr, + &ctx128, &ctx192, &ctx256, + ctr_bit_vectors, ctr_bit_vec_cnt, + IMB_CIPHER_CNTR_BITLEN, i); errors += test_suite_end(&ctx128); errors += test_suite_end(&ctx192); errors += test_suite_end(&ctx256); diff --git a/test/direct_api_param_test.c b/test/direct_api_param_test.c index 7ecd78ec0228926c4716a611e718a71b87895a9f..0ff0c20c6cfc3b1842f47bf796e0d405c9a65eed 100644 --- a/test/direct_api_param_test.c +++ b/test/direct_api_param_test.c @@ -5248,14 +5248,15 @@ direct_api_param_test(struct IMB_MGR *mb_mgr) void *handler; #endif #endif - printf("Extended Invalid Direct API arguments test:"); + printf("Extended Invalid Direct API arguments test:\n"); test_suite_start(&ts, "INVALID-ARGS"); #ifndef DEBUG handler = signal(SIGSEGV, seg_handler); #endif if ((mb_mgr->features & IMB_FEATURE_SAFE_PARAM) == 0) { - printf("SAFE_PARAM feature disabled, skipping remaining tests"); + printf("SAFE_PARAM feature disabled, " + "skipping remaining tests\n"); goto dir_api_exit; } errors += test_IMB_AES_KEYEXP_128(mb_mgr); diff --git a/test/gcm_test.c b/test/gcm_test.c index c98519915995c7e3961496a66758775c21565fc3..01fcbda27f191377f8365fb3c4424628fe1d1149 100644 --- a/test/gcm_test.c +++ b/test/gcm_test.c @@ -43,6 +43,7 @@ #define IV_SZ 12 #define DIGEST_SZ 16 #define MAX_KEY_SZ 32 +#define GCM_MAX_JOBS 32 /* * 60-Byte Packet Encryption Using GCM-AES-128 @@ -1282,7 +1283,7 @@ static const struct gcm_ctr_vector gcm_vectors[] = { extra_vector(22), }; -/* Variable IV vectrors (not 12 bytes) */ +/* Variable IV vectors (not 12 bytes) */ static const struct gcm_ctr_vector gcm_iv_vectors[] = { /* * field order {K, Klen, IV, IVlen, A, Alen, P, Plen, C, T, Tlen}; @@ -1316,6 +1317,14 @@ typedef int (*gcm_enc_dec_fn_t)(IMB_MGR *, const struct gcm_key_data *, const uint8_t *, uint64_t, uint8_t *, uint64_t, IMB_KEY_SIZE_BYTES); +typedef int (*gcm_enc_dec_many_fn_t)(IMB_MGR *, const struct gcm_key_data *, + struct gcm_context_data **, + uint8_t **, const uint8_t *, + const uint64_t, const uint8_t *, + const uint64_t, const uint8_t *, + const uint64_t, uint8_t **, const uint64_t, + const IMB_KEY_SIZE_BYTES, const uint32_t); + static IMB_MGR *p_gcm_mgr = NULL; static int check_data(const uint8_t *test, const uint8_t *expected, @@ -1620,19 +1629,190 @@ sgl_aes_gcm_dec(IMB_MGR *p_mgr, return 0; } +/***************************************************************************** + * burst API + *****************************************************************************/ +static int +aes_gcm_burst(IMB_MGR *mb_mgr, + const IMB_CIPHER_DIRECTION cipher_dir, + const struct gcm_key_data *key, + const uint64_t key_len, + uint8_t **out, const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, const uint8_t *aad, + const uint64_t aad_len, uint8_t **auth_tag, + const uint64_t auth_tag_len, struct gcm_context_data **ctx, + const IMB_CIPHER_MODE cipher_mode, const IMB_SGL_STATE sgl_state, + const uint32_t num_jobs) +{ + IMB_JOB *job, *jobs[GCM_MAX_JOBS]; + uint32_t i; + + while (IMB_GET_NEXT_BURST(mb_mgr, num_jobs, jobs) < num_jobs) + IMB_FLUSH_BURST(mb_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + + job->cipher_mode = cipher_mode; + job->chain_order = + (cipher_dir == IMB_DIR_ENCRYPT) ? + IMB_ORDER_CIPHER_HASH : + IMB_ORDER_HASH_CIPHER; + job->enc_keys = key; + job->dec_keys = key; + job->key_len_in_bytes = key_len; + job->src = in; + job->dst = out[i]; + job->msg_len_to_cipher_in_bytes = len; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = iv; + job->iv_len_in_bytes = iv_len; + job->u.GCM.aad = aad; + job->u.GCM.aad_len_in_bytes = aad_len; + job->auth_tag_output = auth_tag[i]; + job->auth_tag_output_len_in_bytes = auth_tag_len; + job->cipher_direction = cipher_dir; + if (cipher_mode == IMB_CIPHER_GCM_SGL) { + job->u.GCM.ctx = ctx[i]; + job->sgl_state = sgl_state; + job->hash_alg = IMB_AUTH_GCM_SGL; + } else + job->hash_alg = IMB_AUTH_AES_GMAC; + } + + const uint32_t completed_jobs = + IMB_SUBMIT_BURST(mb_mgr, num_jobs, jobs); + + if (completed_jobs != num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + return -1; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + return -1; + } + } + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + return -1; + } + } + + return 0; +} + +static int +burst_aes_gcm_enc(IMB_MGR *p_mgr, + const struct gcm_key_data *key, + struct gcm_context_data **ctx, uint8_t **out, + const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t **auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len, const uint32_t num_jobs) +{ + return aes_gcm_burst(p_mgr, IMB_DIR_ENCRYPT, key, + key_len, out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM, 0, + num_jobs); +} + +static int +burst_aes_gcm_dec(IMB_MGR *p_mgr, + const struct gcm_key_data *key, + struct gcm_context_data **ctx, uint8_t **out, + const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t **auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len, const uint32_t num_jobs) +{ + return aes_gcm_burst(p_mgr, IMB_DIR_DECRYPT, key, + key_len, out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM, 0, + num_jobs); +} + +static int +burst_sgl_aes_gcm(IMB_MGR *p_mgr, + IMB_CIPHER_DIRECTION cipher_dir, + const struct gcm_key_data *key, + struct gcm_context_data **ctx, uint8_t **out, + const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t **auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len, const uint32_t num_jobs) +{ + if (aes_gcm_burst(p_mgr, cipher_dir, key, + key_len, out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, + IMB_SGL_INIT, num_jobs) < 0) + return -1; + if (aes_gcm_burst(p_mgr, cipher_dir, key, + key_len, out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, + IMB_SGL_UPDATE, num_jobs) < 0) + return -1; + if (aes_gcm_burst(p_mgr, cipher_dir, key, + key_len, out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, + IMB_SGL_COMPLETE, num_jobs) < 0) + return -1; + + return 0; +} + +static int +burst_sgl_aes_gcm_enc(IMB_MGR *p_mgr, + const struct gcm_key_data *key, + struct gcm_context_data **ctx, uint8_t **out, + const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t **auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len, const uint32_t num_jobs) +{ + return burst_sgl_aes_gcm(p_mgr, IMB_DIR_ENCRYPT, key, ctx, out, in, + len, iv, iv_len, aad, aad_len, auth_tag, + auth_tag_len, key_len, num_jobs); +} + +static int +burst_sgl_aes_gcm_dec(IMB_MGR *p_mgr, + const struct gcm_key_data *key, + struct gcm_context_data **ctx, uint8_t **out, + const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t **auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len, const uint32_t num_jobs) +{ + return burst_sgl_aes_gcm(p_mgr, IMB_DIR_DECRYPT, key, ctx, out, in, + len, iv, iv_len, aad, aad_len, auth_tag, + auth_tag_len, key_len, num_jobs); +} + /***************************************************************************** * job API *****************************************************************************/ static int aes_gcm_job(IMB_MGR *mb_mgr, IMB_CIPHER_DIRECTION cipher_dir, - const struct gcm_key_data *key, - uint64_t key_len, - uint8_t *out, const uint8_t *in, uint64_t len, + const struct gcm_key_data *key, const uint64_t key_len, + uint8_t *out, const uint8_t *in, const uint64_t len, const uint8_t *iv, const uint64_t iv_len, const uint8_t *aad, - uint64_t aad_len, uint8_t *auth_tag, uint64_t auth_tag_len, - struct gcm_context_data *ctx, IMB_CIPHER_MODE cipher_mode, - IMB_SGL_STATE sgl_state) + const uint64_t aad_len, uint8_t *auth_tag, + const uint64_t auth_tag_len, struct gcm_context_data *ctx, + const IMB_CIPHER_MODE cipher_mode, const IMB_SGL_STATE sgl_state) { IMB_JOB *job; @@ -1679,12 +1859,10 @@ aes_gcm_job(IMB_MGR *mb_mgr, static int job_aes_gcm_enc(IMB_MGR *p_mgr, const struct gcm_key_data *key, - struct gcm_context_data *ctx, - uint8_t *out, const uint8_t *in, uint64_t len, - const uint8_t *iv, const uint64_t iv_len, - const uint8_t *aad, uint64_t aad_len, - uint8_t *auth_tag, uint64_t auth_tag_len, - IMB_KEY_SIZE_BYTES key_len) + struct gcm_context_data *ctx, uint8_t *out, const uint8_t *in, + const uint64_t len, const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, uint8_t *auth_tag, + const uint64_t auth_tag_len, const IMB_KEY_SIZE_BYTES key_len) { return aes_gcm_job(p_mgr, IMB_DIR_ENCRYPT, key, key_len, out, in, len, iv, iv_len, aad, aad_len, @@ -1694,12 +1872,10 @@ job_aes_gcm_enc(IMB_MGR *p_mgr, static int job_aes_gcm_dec(IMB_MGR *p_mgr, const struct gcm_key_data *key, - struct gcm_context_data *ctx, - uint8_t *out, const uint8_t *in, uint64_t len, - const uint8_t *iv, const uint64_t iv_len, - const uint8_t *aad, uint64_t aad_len, - uint8_t *auth_tag, uint64_t auth_tag_len, - IMB_KEY_SIZE_BYTES key_len) + struct gcm_context_data *ctx, uint8_t *out, const uint8_t *in, + const uint64_t len, const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, uint8_t *auth_tag, + const uint64_t auth_tag_len, const IMB_KEY_SIZE_BYTES key_len) { return aes_gcm_job(p_mgr, IMB_DIR_DECRYPT, key, key_len, out, in, len, iv, iv_len, aad, aad_len, @@ -1707,26 +1883,27 @@ job_aes_gcm_dec(IMB_MGR *p_mgr, } static int -job_sgl_aes_gcm_enc(IMB_MGR *p_mgr, - const struct gcm_key_data *key, - struct gcm_context_data *ctx, - uint8_t *out, const uint8_t *in, uint64_t len, - const uint8_t *iv, const uint64_t iv_len, - const uint8_t *aad, uint64_t aad_len, - uint8_t *auth_tag, uint64_t auth_tag_len, - IMB_KEY_SIZE_BYTES key_len) +job_sgl_aes_gcm(IMB_MGR *p_mgr, + const IMB_CIPHER_DIRECTION cipher_dir, + const struct gcm_key_data *key, + struct gcm_context_data *ctx, uint8_t *out, + const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t *auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len) { - if (aes_gcm_job(p_mgr, IMB_DIR_ENCRYPT, key, + if (aes_gcm_job(p_mgr, cipher_dir, key, key_len, out, in, len, iv, iv_len, aad, aad_len, auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, IMB_SGL_INIT) < 0) return -1; - if (aes_gcm_job(p_mgr, IMB_DIR_ENCRYPT, key, + if (aes_gcm_job(p_mgr, cipher_dir, key, key_len, out, in, len, iv, iv_len, aad, aad_len, auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, IMB_SGL_UPDATE) < 0) return -1; - if (aes_gcm_job(p_mgr, IMB_DIR_ENCRYPT, key, + if (aes_gcm_job(p_mgr, cipher_dir, key, key_len, out, in, len, iv, iv_len, aad, aad_len, auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, IMB_SGL_COMPLETE) < 0) @@ -1734,32 +1911,84 @@ job_sgl_aes_gcm_enc(IMB_MGR *p_mgr, return 0; } +static int +aes_gcm_single_job_sgl(IMB_MGR *mb_mgr, + IMB_CIPHER_DIRECTION cipher_dir, + const struct gcm_key_data *key, + const uint64_t key_len, + struct IMB_SGL_IOV *sgl_segs, + const unsigned num_sgl_segs, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t *auth_tag, const uint64_t auth_tag_len, + struct gcm_context_data *ctx) +{ + IMB_JOB *job; + + job = IMB_GET_NEXT_JOB(mb_mgr); + if (!job) { + fprintf(stderr, "failed to get job\n"); + return -1; + } + + job->cipher_mode = IMB_CIPHER_GCM_SGL; + job->cipher_direction = cipher_dir; + job->hash_alg = IMB_AUTH_GCM_SGL; + job->chain_order = + (cipher_dir == IMB_DIR_ENCRYPT) ? IMB_ORDER_CIPHER_HASH : + IMB_ORDER_HASH_CIPHER; + job->enc_keys = key; + job->dec_keys = key; + job->key_len_in_bytes = key_len; + job->num_sgl_io_segs = num_sgl_segs; + job->sgl_io_segs = sgl_segs; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = iv; + job->iv_len_in_bytes = iv_len; + job->u.GCM.aad = aad; + job->u.GCM.aad_len_in_bytes = aad_len; + job->auth_tag_output = auth_tag; + job->auth_tag_output_len_in_bytes = auth_tag_len; + job->u.GCM.ctx = ctx; + job->sgl_state = IMB_SGL_ALL; + job = IMB_SUBMIT_JOB(mb_mgr); + + if (job->status != IMB_STATUS_COMPLETED) { + fprintf(stderr, "failed job, status:%d\n", job->status); + return -1; + } + + return 0; +} + +static int +job_sgl_aes_gcm_enc(IMB_MGR *p_mgr, + const struct gcm_key_data *key, + struct gcm_context_data *ctx, + uint8_t *out, const uint8_t *in, const uint64_t len, + const uint8_t *iv, const uint64_t iv_len, + const uint8_t *aad, const uint64_t aad_len, + uint8_t *auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len) +{ + return job_sgl_aes_gcm(p_mgr, IMB_DIR_ENCRYPT, key, ctx, + out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, key_len); +} + static int job_sgl_aes_gcm_dec(IMB_MGR *p_mgr, const struct gcm_key_data *key, struct gcm_context_data *ctx, - uint8_t *out, const uint8_t *in, uint64_t len, + uint8_t *out, const uint8_t *in, const uint64_t len, const uint8_t *iv, const uint64_t iv_len, - const uint8_t *aad, uint64_t aad_len, - uint8_t *auth_tag, uint64_t auth_tag_len, - IMB_KEY_SIZE_BYTES key_len) + const uint8_t *aad, const uint64_t aad_len, + uint8_t *auth_tag, const uint64_t auth_tag_len, + const IMB_KEY_SIZE_BYTES key_len) { - if (aes_gcm_job(p_mgr, IMB_DIR_DECRYPT, key, - key_len, out, in, len, iv, iv_len, aad, aad_len, - auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, - IMB_SGL_INIT) < 0) - return -1; - if (aes_gcm_job(p_mgr, IMB_DIR_DECRYPT, key, - key_len, out, in, len, iv, iv_len, aad, aad_len, - auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, - IMB_SGL_UPDATE) < 0) - return -1; - if (aes_gcm_job(p_mgr, IMB_DIR_DECRYPT, key, - key_len, out, in, len, iv, iv_len, aad, aad_len, - auth_tag, auth_tag_len, ctx, IMB_CIPHER_GCM_SGL, - IMB_SGL_COMPLETE) < 0) - return -1; - return 0; + return job_sgl_aes_gcm(p_mgr, IMB_DIR_DECRYPT, key, ctx, + out, in, len, iv, iv_len, aad, aad_len, + auth_tag, auth_tag_len, key_len); } /*****************************************************************************/ @@ -1916,6 +2145,166 @@ test_gcm_vectors(struct gcm_ctr_vector const *vector, free(T2_test); } +static void +test_gcm_vectors_burst(struct gcm_ctr_vector const *vector, + gcm_enc_dec_many_fn_t encfn, + gcm_enc_dec_many_fn_t decfn, + struct test_suite_context *ts) +{ + struct gcm_key_data gdata_key; + int is_error = 0; + /* Temporary array for the calculated vectors */ + struct gcm_context_data **gdata_ctx = NULL; + uint8_t **ct_test = NULL; + uint8_t **pt_test = NULL; + uint8_t **T_test = NULL; + const uint8_t *iv = vector->IV; + uint64_t iv_len = vector->IVlen; + uint32_t i, j; + const uint32_t num_jobs = GCM_MAX_JOBS; + + /* Allocate space for the calculated ciphertext */ + ct_test = malloc(num_jobs * sizeof(void *)); + if (ct_test == NULL) { + fprintf(stderr, "Can't allocate ciphertext memory\n"); + goto test_gcm_vectors_burst_exit; + } + memset(ct_test, 0, num_jobs * sizeof(void *)); + + /* Allocate space for the calculated plaintext */ + pt_test = malloc(num_jobs * sizeof(void *)); + if (pt_test == NULL) { + fprintf(stderr, "Can't allocate plaintext memory\n"); + goto test_gcm_vectors_burst_exit; + } + memset(pt_test, 0, num_jobs * sizeof(void *)); + + /* Allocate space for the GCM context data */ + gdata_ctx = malloc(num_jobs * sizeof(void *)); + if (gdata_ctx == NULL) { + fprintf(stderr, "Can't allocate GCM ctx memory\n"); + goto test_gcm_vectors_burst_exit; + } + memset(gdata_ctx, 0, num_jobs * sizeof(void *)); + + /* Allocate space for the calculated tag */ + T_test = malloc(num_jobs * sizeof(void *)); + if (T_test == NULL) { + fprintf(stderr, "Can't allocate tag memory\n"); + goto test_gcm_vectors_burst_exit; + } + memset(T_test, 0, num_jobs * sizeof(void *)); + + /* Zero buffers */ + for (i = 0; i < num_jobs; i++) { + if (vector->Plen != 0) { + ct_test[i] = malloc(vector->Plen); + if (ct_test[i] == NULL) + goto test_gcm_vectors_burst_exit; + memset(ct_test[i], 0, vector->Plen); + + pt_test[i] = malloc(vector->Plen); + if (pt_test[i] == NULL) + goto test_gcm_vectors_burst_exit; + memset(pt_test[i], 0, vector->Plen); + } + + gdata_ctx[i] = malloc(sizeof(struct gcm_context_data)); + if (gdata_ctx[i] == NULL) + goto test_gcm_vectors_burst_exit; + memset(gdata_ctx[i], 0, sizeof(struct gcm_context_data)); + + T_test[i] = malloc(vector->Tlen); + if (T_test[i] == NULL) + goto test_gcm_vectors_burst_exit; + memset(T_test[i], 0, vector->Tlen); + } + + /* This is only required once for a given key */ + switch (vector->Klen) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_PRE(p_gcm_mgr, vector->K, &gdata_key); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_PRE(p_gcm_mgr, vector->K, &gdata_key); + break; + case IMB_KEY_256_BYTES: + default: + IMB_AES256_GCM_PRE(p_gcm_mgr, vector->K, &gdata_key); + break; + } + + /* Test encrypt and decrypt */ + for (i = 0; i < num_jobs; i++) { + /* + * Encrypt + */ + is_error = encfn(p_gcm_mgr, &gdata_key, gdata_ctx, + ct_test, vector->P, vector->Plen, + iv, iv_len, vector->A, + vector->Alen, T_test, vector->Tlen, + vector->Klen, i + 1); + + for (j = 0; j <= i; j++) { + is_error |= check_data(ct_test[j], vector->C, + vector->Plen, + "encrypted cipher text (burst)"); + is_error |= check_data(T_test[j], vector->T, + vector->Tlen, "tag (burst)"); + } + if (is_error) + test_suite_update(ts, 0, 1); + else + test_suite_update(ts, 1, 0); + /* + * Decrypt + */ + is_error = decfn(p_gcm_mgr, &gdata_key, gdata_ctx, pt_test, + vector->C, vector->Plen, iv, iv_len, vector->A, + vector->Alen, T_test, vector->Tlen, + vector->Klen, i + 1); + + for (j = 0; j <= i; j++) { + is_error |= check_data(pt_test[j], vector->P, + vector->Plen, + "decrypted plain text (burst)"); + /* + * GCM decryption outputs a 16 byte tag value + * that must be verified against the expected tag value + */ + is_error |= check_data(T_test[j], vector->T, + vector->Tlen, + "decrypted tag (burst)"); + } + if (is_error) + test_suite_update(ts, 0, 1); + else + test_suite_update(ts, 1, 0); + } + + test_gcm_vectors_burst_exit: + if (NULL != ct_test) { + for (i = 0; i < num_jobs; i++) + free(ct_test[i]); + free(ct_test); + } + if (NULL != pt_test) { + for (i = 0; i < num_jobs; i++) + free(pt_test[i]); + free(pt_test); + } + if (NULL != gdata_ctx) { + for (i = 0; i < num_jobs; i++) + free(gdata_ctx[i]); + free(gdata_ctx); + } + if (NULL != T_test) { + for (i = 0; i < num_jobs; i++) + free(T_test[i]); + free(T_test); + } +} + static void test_gcm_std_vectors(struct test_suite_context *ts128, struct test_suite_context *ts192, @@ -1952,6 +2341,10 @@ test_gcm_std_vectors(struct test_suite_context *ts128, job_sgl_aes_gcm_enc, job_sgl_aes_gcm_dec, ts128); + test_gcm_vectors_burst(&vectors[vect], + burst_sgl_aes_gcm_enc, + burst_sgl_aes_gcm_dec, + ts128); } else { test_gcm_vectors(&vectors[vect], aes_gcm_enc, @@ -1961,6 +2354,10 @@ test_gcm_std_vectors(struct test_suite_context *ts128, job_aes_gcm_enc, job_aes_gcm_dec, ts128); + test_gcm_vectors_burst(&vectors[vect], + burst_aes_gcm_enc, + burst_aes_gcm_dec, + ts128); } break; case IMB_KEY_192_BYTES: @@ -1973,6 +2370,10 @@ test_gcm_std_vectors(struct test_suite_context *ts128, job_sgl_aes_gcm_enc, job_sgl_aes_gcm_dec, ts192); + test_gcm_vectors_burst(&vectors[vect], + burst_sgl_aes_gcm_enc, + burst_sgl_aes_gcm_dec, + ts192); } else { test_gcm_vectors(&vectors[vect], aes_gcm_enc, @@ -1982,6 +2383,10 @@ test_gcm_std_vectors(struct test_suite_context *ts128, job_aes_gcm_enc, job_aes_gcm_dec, ts192); + test_gcm_vectors_burst(&vectors[vect], + burst_aes_gcm_enc, + burst_aes_gcm_dec, + ts192); } break; case IMB_KEY_256_BYTES: @@ -1994,6 +2399,11 @@ test_gcm_std_vectors(struct test_suite_context *ts128, job_sgl_aes_gcm_enc, job_sgl_aes_gcm_dec, ts256); + test_gcm_vectors_burst(&vectors[vect], + burst_sgl_aes_gcm_enc, + burst_sgl_aes_gcm_dec, + ts256); + } else { test_gcm_vectors(&vectors[vect], aes_gcm_enc, @@ -2003,6 +2413,11 @@ test_gcm_std_vectors(struct test_suite_context *ts128, job_aes_gcm_enc, job_aes_gcm_dec, ts256); + test_gcm_vectors_burst(&vectors[vect], + burst_aes_gcm_enc, + burst_aes_gcm_dec, + ts256); + } break; default: @@ -2014,12 +2429,13 @@ test_gcm_std_vectors(struct test_suite_context *ts128, } static void -test_ghash(struct test_suite_context *ts) +test_ghash(struct test_suite_context *ts, const int use_job_api) { const int vectors_cnt = DIM(ghash_vectors); int vect; - printf("GHASH test vectors:\n"); + printf("GHASH test vectors (%s API):\n", + use_job_api ? "job" : "direct"); for (vect = 0; vect < vectors_cnt; vect++) { struct gcm_key_data gdata_key; struct gcm_ctr_vector const *vector = &ghash_vectors[vect]; @@ -2028,8 +2444,44 @@ test_ghash(struct test_suite_context *ts) memset(&gdata_key, 0, sizeof(struct gcm_key_data)); memset(T_test, 0, sizeof(T_test)); IMB_GHASH_PRE(p_gcm_mgr, vector->K, &gdata_key); - IMB_GHASH(p_gcm_mgr, &gdata_key, vector->P, vector->Plen, - T_test, vector->Tlen); + + if (!use_job_api) { + IMB_GHASH(p_gcm_mgr, &gdata_key, vector->P, + vector->Plen, T_test, vector->Tlen); + } else { + IMB_JOB *job = IMB_GET_NEXT_JOB(p_gcm_mgr); + + if (!job) { + fprintf(stderr, + "failed to get job for ghash\n"); + return; + } + + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_GHASH; + job->u.GHASH._key = &gdata_key; + job->u.GHASH._init_tag = T_test; + job->src = vector->P; + job->msg_len_to_hash_in_bytes = vector->Plen; + job->hash_start_src_offset_in_bytes = UINT64_C(0); + job->auth_tag_output = T_test; + job->auth_tag_output_len_in_bytes = vector->Tlen; + + job = IMB_SUBMIT_JOB(p_gcm_mgr); + while (job) { + if (job->status != IMB_STATUS_COMPLETED) + fprintf(stderr, + "failed job, status:%d\n", + job->status); + job = IMB_GET_COMPLETED_JOB(p_gcm_mgr); + } + while ((job = IMB_FLUSH_JOB(p_gcm_mgr)) != NULL) { + if (job->status != IMB_STATUS_COMPLETED) + fprintf(stderr, + "failed job, status:%d\n", + job->status); + } + } if (check_data(T_test, vector->T, vector->Tlen, "generated tag (T)")) @@ -2222,6 +2674,162 @@ test_gmac(struct test_suite_context *ts128, } } +static void +test_single_job_sgl(struct IMB_MGR *mb_mgr, + struct test_suite_context *ctx, + const uint32_t key_sz, + const uint32_t buffer_sz, + const uint32_t seg_sz, + const IMB_CIPHER_DIRECTION cipher_dir) +{ + uint8_t *in_buffer = NULL; + uint8_t **segments = NULL; + uint8_t linear_digest[DIGEST_SZ]; + uint8_t sgl_digest[DIGEST_SZ]; + uint8_t k[MAX_KEY_SZ]; + unsigned int i; + uint8_t aad[AAD_SZ]; + uint8_t iv[IV_SZ]; + struct gcm_context_data gcm_ctx; + struct gcm_key_data key; + uint32_t last_seg_sz = buffer_sz % seg_sz; + struct IMB_SGL_IOV *sgl_segs = NULL; + const uint32_t num_segments = DIV_ROUND_UP(buffer_sz, seg_sz); + + if (last_seg_sz == 0) + last_seg_sz = seg_sz; + + sgl_segs = malloc(sizeof(struct IMB_SGL_IOV) * num_segments); + if (sgl_segs == NULL) { + fprintf(stderr, "Could not allocate memory for SGL segments\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + + in_buffer = malloc(buffer_sz); + if (in_buffer == NULL) { + fprintf(stderr, "Could not allocate memory for input buffer\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + + /* + * Initialize tags with different values, to make sure the comparison + * is false if they are not updated by the library + */ + memset(sgl_digest, 0, DIGEST_SZ); + memset(linear_digest, 0xFF, DIGEST_SZ); + + generate_random_buf(in_buffer, buffer_sz); + generate_random_buf(k, key_sz); + generate_random_buf(iv, IV_SZ); + generate_random_buf(aad, AAD_SZ); + + if (key_sz == IMB_KEY_128_BYTES) + IMB_AES128_GCM_PRE(mb_mgr, k, &key); + else if (key_sz == IMB_KEY_192_BYTES) + IMB_AES192_GCM_PRE(mb_mgr, k, &key); + else /* key_sz == 32 */ + IMB_AES256_GCM_PRE(mb_mgr, k, &key); + + segments = malloc(num_segments * sizeof(*segments)); + if (segments == NULL) { + fprintf(stderr, + "Could not allocate memory for segments array\n"); + test_suite_update(ctx, 0, 1); + goto exit; + } + memset(segments, 0, num_segments * sizeof(*segments)); + + for (i = 0; i < (num_segments - 1); i++) { + segments[i] = malloc(seg_sz); + if (segments[i] == NULL) { + fprintf(stderr, + "Could not allocate memory for segment %u\n", + i); + test_suite_update(ctx, 0, 1); + goto exit; + } + memcpy(segments[i], in_buffer + seg_sz * i, seg_sz); + sgl_segs[i].in = segments[i]; + sgl_segs[i].out = segments[i]; + sgl_segs[i].len = seg_sz; + } + segments[i] = malloc(last_seg_sz); + if (segments[i] == NULL) { + fprintf(stderr, "Could not allocate memory for segment %u\n", + i); + test_suite_update(ctx, 0, 1); + goto exit; + } + memcpy(segments[i], in_buffer + seg_sz * i, last_seg_sz); + sgl_segs[i].in = segments[i]; + sgl_segs[i].out = segments[i]; + sgl_segs[i].len = last_seg_sz; + + /* Process linear (single segment) buffer */ + if (aes_gcm_job(mb_mgr, cipher_dir, &key, key_sz, + in_buffer, in_buffer, buffer_sz, iv, IV_SZ, aad, AAD_SZ, + linear_digest, DIGEST_SZ, + &gcm_ctx, IMB_CIPHER_GCM, 0) < 0) { + test_suite_update(ctx, 0, 1); + goto exit; + } else + test_suite_update(ctx, 1, 0); + + /* Process multi-segment buffer */ + aes_gcm_single_job_sgl(mb_mgr, cipher_dir, &key, key_sz, + sgl_segs, num_segments, + iv, IV_SZ, aad, AAD_SZ, + sgl_digest, DIGEST_SZ, &gcm_ctx); + + for (i = 0; i < (num_segments - 1); i++) { + if (memcmp(in_buffer + i*seg_sz, segments[i], + seg_sz) != 0) { + printf("ciphertext mismatched " + "in segment number %u " + "(segment size = %u)\n", + i, seg_sz); + hexdump(stderr, "Expected output", + in_buffer + i*seg_sz, seg_sz); + hexdump(stderr, "SGL output", segments[i], + seg_sz); + test_suite_update(ctx, 0, 1); + goto exit; + } + } + /* Check last segment */ + if (memcmp(in_buffer + i*seg_sz, segments[i], + last_seg_sz) != 0) { + printf("ciphertext mismatched " + "in segment number %u (segment size = %u)\n", + i, seg_sz); + hexdump(stderr, "Expected output", + in_buffer + i*seg_sz, last_seg_sz); + hexdump(stderr, "SGL output", segments[i], last_seg_sz); + test_suite_update(ctx, 0, 1); + } + if (memcmp(sgl_digest, linear_digest, 16) != 0) { + printf("hash mismatched (segment size = %u)\n", + seg_sz); + hexdump(stderr, "Expected digest", + linear_digest, DIGEST_SZ); + hexdump(stderr, "SGL digest", sgl_digest, DIGEST_SZ); + test_suite_update(ctx, 0, 1); + } else { + test_suite_update(ctx, 1, 0); + } + +exit: + free(sgl_segs); + free(in_buffer); + if (segments != NULL) { + for (i = 0; i < num_segments; i++) + free(segments[i]); + free(segments); + } +} + static void test_sgl(struct IMB_MGR *mb_mgr, struct test_suite_context *ctx, @@ -2275,16 +2883,16 @@ test_sgl(struct IMB_MGR *mb_mgr, else /* key_sz == 32 */ IMB_AES256_GCM_PRE(mb_mgr, k, &key); - segments = malloc(num_segments * 8); + segments = malloc(num_segments * sizeof(*segments)); if (segments == NULL) { fprintf(stderr, "Could not allocate memory for segments array\n"); test_suite_update(ctx, 0, 1); goto exit; } - memset(segments, 0, num_segments * 8); + memset(segments, 0, num_segments * sizeof(*segments)); - segment_sizes = malloc(num_segments * 4); + segment_sizes = malloc(num_segments * sizeof(*segment_sizes)); if (segment_sizes == NULL) { fprintf(stderr, "Could not allocate memory for array of sizes\n"); @@ -2503,6 +3111,11 @@ int gcm_test(IMB_MGR *p_mgr) IMB_DIR_ENCRYPT, 1); test_sgl(p_mgr, ctx, key_sz, buf_sz, seg_sz, IMB_DIR_DECRYPT, 1); + /* Single job SGL API */ + test_single_job_sgl(p_mgr, ctx, key_sz, buf_sz, seg_sz, + IMB_DIR_ENCRYPT); + test_single_job_sgl(p_mgr, ctx, key_sz, buf_sz, seg_sz, + IMB_DIR_DECRYPT); /* Direct API */ test_sgl(p_mgr, ctx, key_sz, buf_sz, seg_sz, IMB_DIR_ENCRYPT, 0); @@ -2524,7 +3137,8 @@ int gcm_test(IMB_MGR *p_mgr) errors += test_suite_end(&ts256); test_suite_start(&ts128, "GHASH"); - test_ghash(&ts128); + test_ghash(&ts128, 0); + test_ghash(&ts128, 1); errors += test_suite_end(&ts128); return errors; diff --git a/test/hmac_sha1_test.c b/test/hmac_sha1_test.c index 1d2fa686703ea9505b1341f2e650bb51e7efc8ea..8fe7cf1a1ffe3acf5ce95ffb2ec4fc6d7bf63ef6 100644 --- a/test/hmac_sha1_test.c +++ b/test/hmac_sha1_test.c @@ -39,6 +39,7 @@ int hmac_sha1_test(struct IMB_MGR *mb_mgr); #define block_size 64 #define digest_size 20 #define digest96_size 12 +#define max_burst_jobs 32 /* * Test vectors from https://tools.ietf.org/html/rfc2202 @@ -347,17 +348,18 @@ hmac_sha1_job_ok(const struct hmac_sha1_rfc2202_vector *vec, static int test_hmac_sha1(struct IMB_MGR *mb_mgr, const struct hmac_sha1_rfc2202_vector *vec, - const int num_jobs) + const uint32_t num_jobs) { struct IMB_JOB *job; uint8_t padding[16]; uint8_t **auths = malloc(num_jobs * sizeof(void *)); - int i = 0, jobs_rx = 0, ret = -1; + uint32_t i = 0, jobs_rx = 0; + int ret = -1; uint8_t key[block_size]; uint8_t buf[block_size]; DECLARE_ALIGNED(uint8_t ipad_hash[digest_size], 16); DECLARE_ALIGNED(uint8_t opad_hash[digest_size], 16); - int key_len = 0; + uint32_t key_len = 0; if (auths == NULL) { fprintf(stderr, "Can't allocate buffer memory\n"); @@ -455,7 +457,7 @@ test_hmac_sha1(struct IMB_MGR *mb_mgr, } if (jobs_rx != num_jobs) { - printf("Expected %d jobs, received %d\n", num_jobs, jobs_rx); + printf("Expected %u jobs, received %u\n", num_jobs, jobs_rx); goto end; } ret = 0; @@ -477,15 +479,285 @@ test_hmac_sha1(struct IMB_MGR *mb_mgr, return ret; } +static int +test_hmac_sha1_burst(struct IMB_MGR *mb_mgr, + const struct hmac_sha1_rfc2202_vector *vec, + const uint32_t num_jobs) +{ + struct IMB_JOB *job, *jobs[max_burst_jobs] = {NULL}; + uint8_t padding[16]; + uint8_t **auths = malloc(num_jobs * sizeof(void *)); + uint32_t i = 0, jobs_rx = 0; + int ret = -1, err; + uint8_t key[block_size]; + uint8_t buf[block_size]; + DECLARE_ALIGNED(uint8_t ipad_hash[digest_size], 16); + DECLARE_ALIGNED(uint8_t opad_hash[digest_size], 16); + uint32_t completed_jobs = 0, key_len = 0; + + if (auths == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end2; + } + + memset(padding, -1, sizeof(padding)); + memset(auths, 0, num_jobs * sizeof(void *)); + + for (i = 0; i < num_jobs; i++) { + const size_t alloc_len = + vec->digest_len + (sizeof(padding) * 2); + + auths[i] = malloc(alloc_len); + if (auths[i] == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end; + } + memset(auths[i], -1, alloc_len); + } + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (vec->key_len <= block_size) { + memcpy(key, vec->key, vec->key_len); + key_len = (int) vec->key_len; + } else { + IMB_SHA1(mb_mgr, vec->key, vec->key_len, key); + key_len = digest_size; + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA1_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA1_ONE_BLOCK(mb_mgr, buf, opad_hash); + + while (IMB_GET_NEXT_BURST(mb_mgr, num_jobs, jobs) < num_jobs) + IMB_FLUSH_BURST(mb_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + job->enc_keys = NULL; + job->dec_keys = NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->dst = NULL; + job->key_len_in_bytes = 0; + job->auth_tag_output = auths[i] + sizeof(padding); + job->auth_tag_output_len_in_bytes = vec->digest_len; + job->iv = NULL; + job->iv_len_in_bytes = 0; + job->src = vec->data; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = vec->data_len; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_1; + + job->user_data = auths[i]; + + } + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, num_jobs, jobs); + err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } + +check_burst_jobs: + for (i = 0; i < completed_jobs; i++) { + job = jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + goto end; + } + + if (!hmac_sha1_job_ok(vec, job, job->user_data, + padding, sizeof(padding))) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + completed_jobs = IMB_FLUSH_BURST(mb_mgr, + num_jobs - completed_jobs, + jobs); + if (completed_jobs == 0) { + printf("Expected %u jobs, received %u\n", + num_jobs, jobs_rx); + goto end; + } + goto check_burst_jobs; + } + ret = 0; + + end: + for (i = 0; i < num_jobs; i++) { + if (auths[i] != NULL) + free(auths[i]); + } + + end2: + if (auths != NULL) + free(auths); + + return ret; +} + +static int +test_hmac_sha1_hash_burst(struct IMB_MGR *mb_mgr, + const struct hmac_sha1_rfc2202_vector *vec, + const uint32_t num_jobs) +{ + struct IMB_JOB *job, jobs[max_burst_jobs] = {0}; + uint8_t padding[16]; + uint8_t **auths = malloc(num_jobs * sizeof(void *)); + uint32_t i = 0, jobs_rx = 0; + int ret = -1; + uint8_t key[block_size]; + uint8_t buf[block_size]; + DECLARE_ALIGNED(uint8_t ipad_hash[digest_size], 16); + DECLARE_ALIGNED(uint8_t opad_hash[digest_size], 16); + uint32_t completed_jobs = 0, key_len = 0; + + if (auths == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end2; + } + + memset(padding, -1, sizeof(padding)); + memset(auths, 0, num_jobs * sizeof(void *)); + + for (i = 0; i < num_jobs; i++) { + const size_t alloc_len = + vec->digest_len + (sizeof(padding) * 2); + + auths[i] = malloc(alloc_len); + if (auths[i] == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end; + } + memset(auths[i], -1, alloc_len); + } + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (vec->key_len <= block_size) { + memcpy(key, vec->key, vec->key_len); + key_len = (int) vec->key_len; + } else { + IMB_SHA1(mb_mgr, vec->key, vec->key_len, key); + key_len = digest_size; + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA1_ONE_BLOCK(mb_mgr, buf, ipad_hash); + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + IMB_SHA1_ONE_BLOCK(mb_mgr, buf, opad_hash); + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + job->enc_keys = NULL; + job->dec_keys = NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->dst = NULL; + job->key_len_in_bytes = 0; + job->auth_tag_output = auths[i] + sizeof(padding); + job->auth_tag_output_len_in_bytes = vec->digest_len; + job->iv = NULL; + job->iv_len_in_bytes = 0; + job->src = vec->data; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = vec->data_len; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->cipher_mode = IMB_CIPHER_NULL; + job->hash_alg = IMB_AUTH_HMAC_SHA_1; + + job->user_data = auths[i]; + + } + + completed_jobs = IMB_SUBMIT_HASH_BURST(mb_mgr, jobs, num_jobs, + IMB_AUTH_HMAC_SHA_1); + if (completed_jobs != num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + goto end; + } + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + goto end; + } + + if (!hmac_sha1_job_ok(vec, job, job->user_data, + padding, sizeof(padding))) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + printf("Expected %u jobs, received %u\n", num_jobs, jobs_rx); + goto end; + } + ret = 0; + + end: + for (i = 0; i < num_jobs; i++) { + if (auths[i] != NULL) + free(auths[i]); + } + + end2: + if (auths != NULL) + free(auths); + + return ret; +} + static void test_hmac_sha1_std_vectors(struct IMB_MGR *mb_mgr, - const int num_jobs, + const uint32_t num_jobs, struct test_suite_context *ts) { const int vectors_cnt = DIM(hmac_sha1_vectors); int vect; - printf("HMAC-SHA1 standard test vectors (N jobs = %d):\n", num_jobs); + printf("HMAC-SHA1 standard test vectors (N jobs = %u):\n", num_jobs); for (vect = 1; vect <= vectors_cnt; vect++) { const int idx = vect - 1; #ifdef DEBUG @@ -506,6 +778,20 @@ test_hmac_sha1_std_vectors(struct IMB_MGR *mb_mgr, } else { test_suite_update(ts, 1, 0); } + if (test_hmac_sha1_burst(mb_mgr, &hmac_sha1_vectors[idx], + num_jobs)) { + printf("error #%d - burst API\n", vect); + test_suite_update(ts, 0, 1); + } else { + test_suite_update(ts, 1, 0); + } + if (test_hmac_sha1_hash_burst(mb_mgr, &hmac_sha1_vectors[idx], + num_jobs)) { + printf("error #%d - hash-only burst API\n", vect); + test_suite_update(ts, 0, 1); + } else { + test_suite_update(ts, 1, 0); + } } printf("\n"); } @@ -514,10 +800,11 @@ int hmac_sha1_test(struct IMB_MGR *mb_mgr) { struct test_suite_context ts; - int num_jobs, errors = 0; + int errors = 0; + uint32_t num_jobs; test_suite_start(&ts, "HMAC-SHA1"); - for (num_jobs = 1; num_jobs <= 17; num_jobs++) + for (num_jobs = 1; num_jobs <= max_burst_jobs; num_jobs++) test_hmac_sha1_std_vectors(mb_mgr, num_jobs, &ts); errors = test_suite_end(&ts); diff --git a/test/hmac_sha256_sha512_test.c b/test/hmac_sha256_sha512_test.c index 10023346f4262aab416ba43980bc1670071b5120..c8ba3c3a55a57c3ae40f4d2c9ab9b96e3df90a05 100644 --- a/test/hmac_sha256_sha512_test.c +++ b/test/hmac_sha256_sha512_test.c @@ -34,6 +34,8 @@ #include "gcm_ctr_vectors_test.h" #include "utils.h" +#define max_burst_jobs 32 + int hmac_sha256_sha512_test(struct IMB_MGR *mb_mgr); /* @@ -818,18 +820,19 @@ hmac_shax_job_ok(const struct hmac_rfc4231_vector *vec, static int test_hmac_shax(struct IMB_MGR *mb_mgr, const struct hmac_rfc4231_vector *vec, - const int num_jobs, + const uint32_t num_jobs, const int sha_type) { struct IMB_JOB *job; uint8_t padding[16]; uint8_t **auths = malloc(num_jobs * sizeof(void *)); - int i = 0, jobs_rx = 0, ret = -1; + uint32_t i = 0, jobs_rx = 0; + int ret = -1; uint8_t key[IMB_SHA_512_BLOCK_SIZE]; uint8_t buf[IMB_SHA_512_BLOCK_SIZE]; DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); - int key_len = 0; + uint32_t key_len = 0; size_t digest_len = 0; size_t block_size = 0; @@ -1019,7 +1022,465 @@ test_hmac_shax(struct IMB_MGR *mb_mgr, } if (jobs_rx != num_jobs) { - printf("Expected %d jobs, received %d\n", num_jobs, jobs_rx); + printf("Expected %u jobs, received %u\n", num_jobs, jobs_rx); + goto end; + } + ret = 0; + + end: + for (i = 0; i < num_jobs; i++) { + if (auths[i] != NULL) + free(auths[i]); + } + + end2: + if (auths != NULL) + free(auths); + + return ret; +} + +static int +test_hmac_shax_burst(struct IMB_MGR *mb_mgr, + const struct hmac_rfc4231_vector *vec, + const uint32_t num_jobs, + const int sha_type) +{ + struct IMB_JOB *job, *jobs[max_burst_jobs] = {NULL}; + uint8_t padding[16]; + uint8_t **auths = malloc(num_jobs * sizeof(void *)); + uint32_t i = 0, jobs_rx = 0, completed_jobs = 0; + int ret = -1, err; + uint8_t key[IMB_SHA_512_BLOCK_SIZE]; + uint8_t buf[IMB_SHA_512_BLOCK_SIZE]; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + uint32_t key_len = 0; + size_t digest_len = 0; + size_t block_size = 0; + + if (auths == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end2; + } + + switch (sha_type) { + case 224: + digest_len = vec->hmac_sha224_len; + block_size = IMB_SHA_256_BLOCK_SIZE; + break; + case 256: + digest_len = vec->hmac_sha256_len; + block_size = IMB_SHA_256_BLOCK_SIZE; + break; + case 384: + digest_len = vec->hmac_sha384_len; + block_size = IMB_SHA_384_BLOCK_SIZE; + break; + case 512: + digest_len = vec->hmac_sha512_len; + block_size = IMB_SHA_512_BLOCK_SIZE; + break; + default: + fprintf(stderr, "Wrong SHA type selection 'SHA-%d'!\n", + sha_type); + goto end2; + } + + memset(padding, -1, sizeof(padding)); + memset(auths, 0, num_jobs * sizeof(void *)); + + for (i = 0; i < num_jobs; i++) { + const size_t alloc_len = + digest_len + (sizeof(padding) * 2); + + auths[i] = malloc(alloc_len); + if (auths[i] == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end; + } + memset(auths[i], -1, alloc_len); + } + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (vec->key_len <= block_size) { + memcpy(key, vec->key, vec->key_len); + key_len = (int) vec->key_len; + } else { + switch (sha_type) { + case 224: + IMB_SHA224(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA224_DIGEST_SIZE_IN_BYTES; + break; + case 256: + IMB_SHA256(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA256_DIGEST_SIZE_IN_BYTES; + break; + case 384: + IMB_SHA384(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA384_DIGEST_SIZE_IN_BYTES; + break; + case 512: + IMB_SHA512(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA512_DIGEST_SIZE_IN_BYTES; + break; + default: + fprintf(stderr, "Wrong SHA type selection 'SHA-%d'!\n", + sha_type); + goto end; + } + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + + switch (sha_type) { + case 224: + IMB_SHA224_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + case 256: + IMB_SHA256_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + case 384: + IMB_SHA384_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + case 512: + default: + IMB_SHA512_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + } + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + + switch (sha_type) { + case 224: + IMB_SHA224_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + case 256: + IMB_SHA256_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + case 384: + IMB_SHA384_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + case 512: + default: + IMB_SHA512_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + } + + while (IMB_GET_NEXT_BURST(mb_mgr, num_jobs, jobs) < num_jobs) + IMB_FLUSH_BURST(mb_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + job->enc_keys = NULL; + job->dec_keys = NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->dst = NULL; + job->key_len_in_bytes = 0; + job->auth_tag_output = auths[i] + sizeof(padding); + job->auth_tag_output_len_in_bytes = digest_len; + job->iv = NULL; + job->iv_len_in_bytes = 0; + job->src = vec->data; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = vec->data_len; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->cipher_mode = IMB_CIPHER_NULL; + + switch (sha_type) { + case 224: + job->hash_alg = IMB_AUTH_HMAC_SHA_224; + break; + case 256: + job->hash_alg = IMB_AUTH_HMAC_SHA_256; + break; + case 384: + job->hash_alg = IMB_AUTH_HMAC_SHA_384; + break; + case 512: + default: + job->hash_alg = IMB_AUTH_HMAC_SHA_512; + break; + } + + job->user_data = auths[i]; + + } + + completed_jobs = IMB_SUBMIT_BURST(mb_mgr, num_jobs, jobs); + err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } + + check_burst_jobs: + for (i = 0; i < completed_jobs; i++) { + job = jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + goto end; + } + + if (!hmac_shax_job_ok(vec, job, sha_type, + job->user_data, + padding, sizeof(padding))) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + completed_jobs = IMB_FLUSH_BURST(mb_mgr, + num_jobs - completed_jobs, + jobs); + if (completed_jobs == 0) { + printf("Expected %u jobs, received %u\n", + num_jobs, jobs_rx); + goto end; + } + goto check_burst_jobs; + } + ret = 0; + + end: + for (i = 0; i < num_jobs; i++) { + if (auths[i] != NULL) + free(auths[i]); + } + + end2: + if (auths != NULL) + free(auths); + + return ret; +} + +static int +test_hmac_shax_hash_burst(struct IMB_MGR *mb_mgr, + const struct hmac_rfc4231_vector *vec, + const uint32_t num_jobs, + const int sha_type) +{ + struct IMB_JOB *job, jobs[max_burst_jobs] = {0}; + uint8_t padding[16]; + uint8_t **auths = NULL; + uint32_t i = 0, jobs_rx = 0, completed_jobs = 0; + int ret = -1; + uint8_t key[IMB_SHA_512_BLOCK_SIZE]; + uint8_t buf[IMB_SHA_512_BLOCK_SIZE]; + DECLARE_ALIGNED(uint8_t ipad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t opad_hash[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + uint32_t key_len = 0; + size_t digest_len = 0; + size_t block_size = 0; + + if (num_jobs == 0) + return 0; + + auths = malloc(num_jobs * sizeof(void *)); + if (auths == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end2; + } + + switch (sha_type) { + case 224: + digest_len = vec->hmac_sha224_len; + block_size = IMB_SHA_256_BLOCK_SIZE; + break; + case 256: + digest_len = vec->hmac_sha256_len; + block_size = IMB_SHA_256_BLOCK_SIZE; + break; + case 384: + digest_len = vec->hmac_sha384_len; + block_size = IMB_SHA_384_BLOCK_SIZE; + break; + case 512: + digest_len = vec->hmac_sha512_len; + block_size = IMB_SHA_512_BLOCK_SIZE; + break; + default: + fprintf(stderr, "Wrong SHA type selection 'SHA-%d'!\n", + sha_type); + goto end2; + } + + memset(padding, -1, sizeof(padding)); + memset(auths, 0, num_jobs * sizeof(void *)); + + for (i = 0; i < num_jobs; i++) { + const size_t alloc_len = + digest_len + (sizeof(padding) * 2); + + auths[i] = malloc(alloc_len); + if (auths[i] == NULL) { + fprintf(stderr, "Can't allocate buffer memory\n"); + goto end; + } + memset(auths[i], -1, alloc_len); + } + + /* prepare the key */ + memset(key, 0, sizeof(key)); + if (vec->key_len <= block_size) { + memcpy(key, vec->key, vec->key_len); + key_len = (int) vec->key_len; + } else { + switch (sha_type) { + case 224: + IMB_SHA224(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA224_DIGEST_SIZE_IN_BYTES; + break; + case 256: + IMB_SHA256(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA256_DIGEST_SIZE_IN_BYTES; + break; + case 384: + IMB_SHA384(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA384_DIGEST_SIZE_IN_BYTES; + break; + case 512: + IMB_SHA512(mb_mgr, vec->key, vec->key_len, key); + key_len = IMB_SHA512_DIGEST_SIZE_IN_BYTES; + break; + default: + fprintf(stderr, "Wrong SHA type selection 'SHA-%d'!\n", + sha_type); + goto end; + } + } + + /* compute ipad hash */ + memset(buf, 0x36, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + + switch (sha_type) { + case 224: + IMB_SHA224_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + case 256: + IMB_SHA256_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + case 384: + IMB_SHA384_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + case 512: + default: + IMB_SHA512_ONE_BLOCK(mb_mgr, buf, ipad_hash); + break; + } + + /* compute opad hash */ + memset(buf, 0x5c, sizeof(buf)); + for (i = 0; i < key_len; i++) + buf[i] ^= key[i]; + + switch (sha_type) { + case 224: + IMB_SHA224_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + case 256: + IMB_SHA256_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + case 384: + IMB_SHA384_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + case 512: + default: + IMB_SHA512_ONE_BLOCK(mb_mgr, buf, opad_hash); + break; + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + job->enc_keys = NULL; + job->dec_keys = NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->dst = NULL; + job->key_len_in_bytes = 0; + job->auth_tag_output = auths[i] + sizeof(padding); + job->auth_tag_output_len_in_bytes = digest_len; + job->iv = NULL; + job->iv_len_in_bytes = 0; + job->src = vec->data; + job->cipher_start_src_offset_in_bytes = 0; + job->msg_len_to_cipher_in_bytes = 0; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = vec->data_len; + job->u.HMAC._hashed_auth_key_xor_ipad = ipad_hash; + job->u.HMAC._hashed_auth_key_xor_opad = opad_hash; + job->cipher_mode = IMB_CIPHER_NULL; + + switch (sha_type) { + case 224: + job->hash_alg = IMB_AUTH_HMAC_SHA_224; + break; + case 256: + job->hash_alg = IMB_AUTH_HMAC_SHA_256; + break; + case 384: + job->hash_alg = IMB_AUTH_HMAC_SHA_384; + break; + case 512: + default: + job->hash_alg = IMB_AUTH_HMAC_SHA_512; + break; + } + + job->user_data = auths[i]; + + } + + completed_jobs = IMB_SUBMIT_HASH_BURST(mb_mgr, jobs, num_jobs, + job->hash_alg); + if (completed_jobs != num_jobs) { + int err = imb_get_errno(mb_mgr); + + if (err != 0) { + printf("submit_burst error %d : '%s'\n", err, + imb_get_strerror(err)); + goto end; + } else { + printf("submit_burst error: not enough " + "jobs returned!\n"); + goto end; + } + } + + for (i = 0; i < num_jobs; i++) { + job = &jobs[i]; + + if (job->status != IMB_STATUS_COMPLETED) { + printf("job %u status not complete!\n", i+1); + goto end; + } + + if (!hmac_shax_job_ok(vec, job, sha_type, + job->user_data, + padding, sizeof(padding))) + goto end; + jobs_rx++; + } + + if (jobs_rx != num_jobs) { + printf("Expected %u jobs, received %u\n", num_jobs, jobs_rx); goto end; } ret = 0; @@ -1040,13 +1501,13 @@ test_hmac_shax(struct IMB_MGR *mb_mgr, static void test_hmac_shax_std_vectors(struct IMB_MGR *mb_mgr, const int sha_type, - const int num_jobs, + const uint32_t num_jobs, struct test_suite_context *ts) { const int vectors_cnt = DIM(hmac_sha256_sha512_vectors); int vect; - printf("HMAC-SHA%d standard test vectors (N jobs = %d):\n", + printf("HMAC-SHA%d standard test vectors (N jobs = %u):\n", sha_type, num_jobs); for (vect = 1; vect <= vectors_cnt; vect++) { const int idx = vect - 1; @@ -1083,6 +1544,23 @@ test_hmac_shax_std_vectors(struct IMB_MGR *mb_mgr, } else { test_suite_update(ts, 1, 0); } + if (test_hmac_shax_burst(mb_mgr, + &hmac_sha256_sha512_vectors[idx], + num_jobs, sha_type)) { + printf("error #%d - burst API\n", vect); + test_suite_update(ts, 0, 1); + } else { + test_suite_update(ts, 1, 0); + } + if (test_hmac_shax_hash_burst(mb_mgr, + &hmac_sha256_sha512_vectors[idx], + num_jobs, sha_type)) { + printf("error #%d - hash-only burst API\n", vect); + test_suite_update(ts, 0, 1); + } else { + test_suite_update(ts, 1, 0); + } + } printf("\n"); } @@ -1096,19 +1574,16 @@ hmac_sha256_sha512_test(struct IMB_MGR *mb_mgr) static const char * const sha_names_tab[] = { "HMAC-SHA224", "HMAC-SHA256", "HMAC-SHA384", "HMAC-SHA512" }; - const int num_jobs_tab[] = { - 1, 3, 4, 5, 7, 8, 9, 15, 16, 17 - }; - unsigned i, j; + unsigned i, num_jobs; int errors = 0; for (i = 0; i < DIM(sha_types_tab); i++) { struct test_suite_context ts; test_suite_start(&ts, sha_names_tab[i]); - for (j = 0; j < DIM(num_jobs_tab); j++) + for (num_jobs = 1; num_jobs <= max_burst_jobs; num_jobs++) test_hmac_shax_std_vectors(mb_mgr, sha_types_tab[i], - num_jobs_tab[j], &ts); + num_jobs, &ts); errors += test_suite_end(&ts); } diff --git a/test/ipsec_xvalid.c b/test/ipsec_xvalid.c index cf112d27e31b2c1c7f79520deba6731014e16683..d905f03e468437378fb80464cc04916fde7d47e0 100644 --- a/test/ipsec_xvalid.c +++ b/test/ipsec_xvalid.c @@ -77,6 +77,7 @@ #define MAX_GCM_AAD_SIZE 1024 #define MAX_CCM_AAD_SIZE 46 #define MAX_AAD_SIZE 1024 +#define NUM_TAG_SIZES 7 #define MAX_IV_SIZE 25 /* IV size for ZUC-256 */ #define MAX_TAG_SIZE 16 /* Max tag size for ZUC-256 */ @@ -91,6 +92,10 @@ #define SEED 0xdeadcafe #define STACK_DEPTH 8192 +/* Max safe check retries to eliminate false positives */ +#define MAX_SAFE_RETRIES 100 +#define DEFAULT_SAFE_RETRIES 2 + static int pattern_auth_key; static int pattern_cipher_key; static int pattern_plain_text; @@ -99,8 +104,6 @@ static uint64_t pattern8_cipher_key; static uint64_t pattern8_plain_text; #define MAX_OOO_MGR_SIZE 8192 -#define OOO_MGR_FIRST aes128_ooo -#define OOO_MGR_LAST zuc_eia3_ooo /* Struct storing cipher parameters */ struct params_s { @@ -139,6 +142,7 @@ struct data { uint8_t auth_key[MAX_KEY_SIZE]; struct cipher_auth_keys enc_keys; struct cipher_auth_keys dec_keys; + uint8_t tag_size; }; struct custom_job_params { @@ -496,6 +500,12 @@ struct str_value_mapping hash_algo_str_map[] = { .hash_alg = IMB_AUTH_ZUC256_EIA3_BITLEN, } }, + { + .name = "GHASH", + .values.job_params = { + .hash_alg = IMB_AUTH_GHASH, + } + }, }; struct str_value_mapping aead_algo_str_map[] = { @@ -578,7 +588,7 @@ struct variant_s { struct params_s params; }; -const uint8_t auth_tag_length_bytes[] = { +const uint8_t auth_tag_len_bytes[] = { 12, /* IMB_AUTH_HMAC_SHA_1 */ 14, /* IMB_AUTH_HMAC_SHA_224 */ 16, /* IMB_AUTH_HMAC_SHA_256 */ @@ -611,7 +621,7 @@ const uint8_t auth_tag_length_bytes[] = { 16, /* IMB_AUTH_CHACHA20_POLY1305_SGL */ 4, /* IMB_AUTH_ZUC256_EIA3_BITLEN */ 16, /* IMB_AUTH_SNOW_V_AEAD */ - 16, /* IMB_AUTH_CRC32_ETHERNET_FCS */ + 16, /* IMB_AUTH_AES_GCM_SGL */ 4, /* IMB_AUTH_CRC32_ETHERNET_FCS */ 4, /* IMB_AUTH_CRC32_SCTP */ 4, /* IMB_AUTH_CRC32_WIMAX_OFDMA_DATA */ @@ -624,6 +634,7 @@ const uint8_t auth_tag_length_bytes[] = { 4, /* IMB_AUTH_CRC8_WIMAX_OFDMA_HCS */ 4, /* IMB_AUTH_CRC7_FP_HEADER */ 4, /* IMB_AUTH_CRC6_IUUP_HEADER */ + 16, /* IMB_AUTH_GHASH */ }; /* Minimum, maximum and step values of key sizes */ @@ -654,6 +665,7 @@ const uint8_t key_sizes[][3] = { uint8_t custom_test = 0; uint8_t verbose = 0; +uint32_t safe_retries = DEFAULT_SAFE_RETRIES; enum range { RANGE_MIN = 0, @@ -666,7 +678,7 @@ uint32_t job_sizes[NUM_RANGE] = {DEFAULT_JOB_SIZE_MIN, DEFAULT_JOB_SIZE_STEP, DEFAULT_JOB_SIZE_MAX}; /* Max number of jobs to submit in IMIX testing */ -uint32_t max_num_jobs = 16; +uint32_t max_num_jobs = 17; /* IMIX disabled by default */ unsigned int imix_enabled = 0; /* cipher and authentication IV sizes */ @@ -686,6 +698,37 @@ uint8_t dec_archs[IMB_ARCH_NUM] = {0, 0, 1, 1, 1, 1, 1}; uint64_t flags = 0; /* flags passed to alloc_mb_mgr() */ +/* 0 => not possible, 1 => possible */ +int is_avx_sse_check_possible = 0; + +#ifdef __x86_64__ +static void +avx_sse_check(const char *ctx_str, + const IMB_HASH_ALG hash_alg, + const IMB_CIPHER_MODE cipher_mode) +{ + if (!is_avx_sse_check_possible) + return; + + const uint32_t avx_sse_flag = avx_sse_transition_check(); + + if (!avx_sse_flag) + return; + + const char *hash_str = misc_hash_alg_to_str(hash_alg); + const char *cipher_str = misc_cipher_mode_to_str(cipher_mode); + + if (avx_sse_flag & MISC_AVX_SSE_ZMM0_15_ISSUE) + printf("ERROR: AVX-SSE transition after %s in ZMM0-ZMM15: " + "HASH=%s, CIPHER=%s\n", + ctx_str, hash_str, cipher_str); + else if (avx_sse_flag & MISC_AVX_SSE_YMM0_15_ISSUE) + printf("ERROR: AVX-SSE transition after %s in YMM0-YMM15: " + "HASH=%s, CIPHER=%s\n", + ctx_str, hash_str, cipher_str); +} +#endif + static void clear_data(struct data *data) { @@ -722,12 +765,12 @@ static void generate_patterns(void) pattern_cipher_key == 0 || pattern_plain_text == 0); - memset(&pattern8_auth_key, pattern_auth_key, - sizeof(pattern8_auth_key)); - memset(&pattern8_cipher_key, pattern_cipher_key, - sizeof(pattern8_cipher_key)); - memset(&pattern8_plain_text, pattern_plain_text, - sizeof(pattern8_plain_text)); + NOSIMD_MEMSET(&pattern8_auth_key, pattern_auth_key, + sizeof(pattern8_auth_key)); + NOSIMD_MEMSET(&pattern8_cipher_key, pattern_cipher_key, + sizeof(pattern8_cipher_key)); + NOSIMD_MEMSET(&pattern8_plain_text, pattern_plain_text, + sizeof(pattern8_plain_text)); printf(">>> Patterns: AUTH_KEY = 0x%02x, CIPHER_KEY = 0x%02x, " "PLAIN_TEXT = 0x%02x\n", @@ -745,6 +788,7 @@ search_patterns(const void *ptr, const size_t mem_size) { const uint8_t *ptr8 = (const uint8_t *) ptr; const size_t limit = mem_size - sizeof(uint64_t); + const char *err_str = ""; int ret = -1; size_t i; @@ -757,14 +801,13 @@ search_patterns(const void *ptr, const size_t mem_size) const uint64_t string = *((const uint64_t *) &ptr8[i]); if (string == pattern8_cipher_key) { - fprintf(stderr, "Part of CIPHER_KEY is present\n"); + err_str = "Part of CIPHER_KEY is present"; ret = 0; } else if (string == pattern8_auth_key) { - fprintf(stderr, "Part of AUTH_KEY is present\n"); + err_str = "Part of AUTH_KEY is present"; ret = 0; } else if (string == pattern8_plain_text) { - fprintf(stderr, - "Part of plain/ciphertext is present\n"); + err_str = "Part of plain/ciphertext is present"; ret = 0; } @@ -773,15 +816,17 @@ search_patterns(const void *ptr, const size_t mem_size) } if (ret != -1) { - size_t len_to_print = mem_size - i; + static uint8_t tb[64]; + const size_t len_to_print = + (mem_size - i) > sizeof(tb) ? sizeof(tb) : mem_size - i; + NOSIMD_MEMCPY(tb, &ptr8[i], len_to_print); + + fprintf(stderr, "%s\n", err_str); fprintf(stderr, "Offset = %zu bytes, Addr = %p, RSP = %p\n", i, &ptr8[i], rdrsp()); - if (len_to_print > 64) - len_to_print = 64; - - hexdump_ex(stderr, NULL, &ptr8[i], len_to_print, &ptr8[i]); + hexdump_ex(stderr, NULL, tb, len_to_print, &ptr8[i]); return 0; } @@ -791,18 +836,20 @@ search_patterns(const void *ptr, const size_t mem_size) static size_t calculate_ooo_mgr_size(const void *ptr) { + const size_t max_size = MAX_OOO_MGR_SIZE - sizeof(uint64_t); size_t i; - for (i = 0; i <= (MAX_OOO_MGR_SIZE - sizeof(uint64_t)); i++) { - const uint64_t end_of_ooo_pattern = 0xDEADCAFEDEADCAFE; + for (i = 0; i <= max_size; i++) { + const uint64_t end_of_ooo_pattern = 0xDEADCAFEDEADCAFEULL; const uint8_t *ptr8 = (const uint8_t *) ptr; - const uint64_t string = *((const uint64_t *) &ptr8[i]); + const uint64_t *ptr64 = (const uint64_t *) &ptr8[i]; - if (string == end_of_ooo_pattern) + if (*ptr64 == end_of_ooo_pattern) return i + sizeof(uint64_t); } /* no marker found */ + fprintf(stderr, "No road-block marker found for %p manager!\n", ptr); return MAX_OOO_MGR_SIZE; } @@ -998,6 +1045,10 @@ fill_job(IMB_JOB *job, const struct params_s *params, job->u.GMAC._iv = auth_iv; job->u.GMAC.iv_len_in_bytes = 12; break; + case IMB_AUTH_GHASH: + job->u.GHASH._key = gdata_key; + job->u.GHASH._init_tag = auth_iv; + break; case IMB_AUTH_PON_CRC_BIP: case IMB_AUTH_NULL: case IMB_AUTH_AES_GMAC: @@ -1195,20 +1246,23 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, if (force_pattern) { switch (params->hash_alg) { case IMB_AUTH_AES_XCBC: - memset(k1_expanded, pattern_auth_key, + NOSIMD_MEMSET(k1_expanded, pattern_auth_key, sizeof(keys->k1_expanded)); + break; case IMB_AUTH_AES_CMAC: case IMB_AUTH_AES_CMAC_BITLEN: case IMB_AUTH_AES_CMAC_256: - memset(k1_expanded, pattern_auth_key, + NOSIMD_MEMSET(k1_expanded, pattern_auth_key, sizeof(keys->k1_expanded)); - memset(k2, pattern_auth_key, sizeof(keys->k2)); - memset(k3, pattern_auth_key, sizeof(keys->k3)); + NOSIMD_MEMSET(k2, pattern_auth_key, sizeof(keys->k2)); + NOSIMD_MEMSET(k3, pattern_auth_key, sizeof(keys->k3)); + break; case IMB_AUTH_POLY1305: - memset(k1_expanded, pattern_auth_key, + NOSIMD_MEMSET(k1_expanded, pattern_auth_key, sizeof(keys->k1_expanded)); + break; case IMB_AUTH_HMAC_SHA_1: case IMB_AUTH_HMAC_SHA_224: @@ -1216,14 +1270,18 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_AUTH_HMAC_SHA_384: case IMB_AUTH_HMAC_SHA_512: case IMB_AUTH_MD5: - memset(ipad, pattern_auth_key, sizeof(keys->ipad)); - memset(opad, pattern_auth_key, sizeof(keys->opad)); + NOSIMD_MEMSET(ipad, pattern_auth_key, + sizeof(keys->ipad)); + NOSIMD_MEMSET(opad, pattern_auth_key, + sizeof(keys->opad)); + break; case IMB_AUTH_ZUC_EIA3_BITLEN: case IMB_AUTH_ZUC256_EIA3_BITLEN: case IMB_AUTH_SNOW3G_UIA2_BITLEN: case IMB_AUTH_KASUMI_UIA1: - memset(k3, pattern_auth_key, sizeof(keys->k3)); + NOSIMD_MEMSET(k3, pattern_auth_key, sizeof(keys->k3)); + break; case IMB_AUTH_AES_CCM: case IMB_AUTH_AES_GMAC: @@ -1256,7 +1314,8 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_AUTH_AES_GMAC_128: case IMB_AUTH_AES_GMAC_192: case IMB_AUTH_AES_GMAC_256: - memset(gdata_key, pattern_auth_key, + case IMB_AUTH_GHASH: + NOSIMD_MEMSET(gdata_key, pattern_auth_key, sizeof(keys->gdata_key)); break; default: @@ -1268,7 +1327,7 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, switch (params->cipher_mode) { case IMB_CIPHER_GCM: - memset(gdata_key, pattern_cipher_key, + NOSIMD_MEMSET(gdata_key, pattern_cipher_key, sizeof(keys->gdata_key)); break; case IMB_CIPHER_PON_AES_CNTR: @@ -1279,20 +1338,20 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_CIPHER_DOCSIS_SEC_BPI: case IMB_CIPHER_ECB: case IMB_CIPHER_CBCS_1_9: - memset(enc_keys, pattern_cipher_key, + NOSIMD_MEMSET(enc_keys, pattern_cipher_key, sizeof(keys->enc_keys)); - memset(dec_keys, pattern_cipher_key, + NOSIMD_MEMSET(dec_keys, pattern_cipher_key, sizeof(keys->dec_keys)); break; case IMB_CIPHER_DES: case IMB_CIPHER_DES3: case IMB_CIPHER_DOCSIS_DES: - memset(enc_keys, pattern_cipher_key, + NOSIMD_MEMSET(enc_keys, pattern_cipher_key, sizeof(keys->enc_keys)); break; case IMB_CIPHER_SNOW3G_UEA2_BITLEN: case IMB_CIPHER_KASUMI_UEA1_BITLEN: - memset(k2, pattern_cipher_key, 16); + NOSIMD_MEMSET(k2, pattern_cipher_key, 16); break; case IMB_CIPHER_ZUC_EEA3: case IMB_CIPHER_CHACHA20: @@ -1300,7 +1359,7 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_CIPHER_CHACHA20_POLY1305_SGL: case IMB_CIPHER_SNOW_V: case IMB_CIPHER_SNOW_V_AEAD: - memset(k2, pattern_cipher_key, 32); + NOSIMD_MEMSET(k2, pattern_cipher_key, 32); break; case IMB_CIPHER_NULL: /* No operation needed */ @@ -1328,13 +1387,15 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_AUTH_HMAC_SHA_1: /* compute ipad hash */ - memset(buf, 0x36, IMB_SHA1_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x36, IMB_SHA1_BLOCK_SIZE); + for (i = 0; i < IMB_SHA1_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA1_ONE_BLOCK(mb_mgr, buf, ipad); /* compute opad hash */ - memset(buf, 0x5c, IMB_SHA1_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x5c, IMB_SHA1_BLOCK_SIZE); + for (i = 0; i < IMB_SHA1_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA1_ONE_BLOCK(mb_mgr, buf, opad); @@ -1342,13 +1403,15 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_AUTH_HMAC_SHA_224: /* compute ipad hash */ - memset(buf, 0x36, IMB_SHA_256_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x36, IMB_SHA_256_BLOCK_SIZE); + for (i = 0; i < IMB_SHA_256_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA224_ONE_BLOCK(mb_mgr, buf, ipad); /* compute opad hash */ - memset(buf, 0x5c, IMB_SHA_256_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x5c, IMB_SHA_256_BLOCK_SIZE); + for (i = 0; i < IMB_SHA_256_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA224_ONE_BLOCK(mb_mgr, buf, opad); @@ -1356,13 +1419,13 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_AUTH_HMAC_SHA_256: /* compute ipad hash */ - memset(buf, 0x36, IMB_SHA_256_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x36, IMB_SHA_256_BLOCK_SIZE); for (i = 0; i < IMB_SHA_256_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA256_ONE_BLOCK(mb_mgr, buf, ipad); /* compute opad hash */ - memset(buf, 0x5c, IMB_SHA_256_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x5c, IMB_SHA_256_BLOCK_SIZE); for (i = 0; i < IMB_SHA_256_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA256_ONE_BLOCK(mb_mgr, buf, opad); @@ -1370,13 +1433,13 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_AUTH_HMAC_SHA_384: /* compute ipad hash */ - memset(buf, 0x36, IMB_SHA_384_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x36, IMB_SHA_384_BLOCK_SIZE); for (i = 0; i < IMB_SHA_384_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA384_ONE_BLOCK(mb_mgr, buf, ipad); /* compute opad hash */ - memset(buf, 0x5c, IMB_SHA_384_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x5c, IMB_SHA_384_BLOCK_SIZE); for (i = 0; i < IMB_SHA_384_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA384_ONE_BLOCK(mb_mgr, buf, opad); @@ -1384,13 +1447,13 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_AUTH_HMAC_SHA_512: /* compute ipad hash */ - memset(buf, 0x36, IMB_SHA_512_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x36, IMB_SHA_512_BLOCK_SIZE); for (i = 0; i < IMB_SHA_512_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA512_ONE_BLOCK(mb_mgr, buf, ipad); /* compute opad hash */ - memset(buf, 0x5c, IMB_SHA_512_BLOCK_SIZE); + NOSIMD_MEMSET(buf, 0x5c, IMB_SHA_512_BLOCK_SIZE); for (i = 0; i < IMB_SHA_512_BLOCK_SIZE; i++) buf[i] ^= auth_key[i]; IMB_SHA512_ONE_BLOCK(mb_mgr, buf, opad); @@ -1398,13 +1461,13 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_AUTH_MD5: /* compute ipad hash */ - memset(buf, 0x36, 64); + NOSIMD_MEMSET(buf, 0x36, 64); for (i = 0; i < 64; i++) buf[i] ^= auth_key[i]; IMB_MD5_ONE_BLOCK(mb_mgr, buf, ipad); /* compute opad hash */ - memset(buf, 0x5c, 64); + NOSIMD_MEMSET(buf, 0x5c, 64); for (i = 0; i < 64; i++) buf[i] ^= auth_key[i]; IMB_MD5_ONE_BLOCK(mb_mgr, buf, opad); @@ -1414,7 +1477,7 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_AUTH_ZUC256_EIA3_BITLEN: case IMB_AUTH_SNOW3G_UIA2_BITLEN: case IMB_AUTH_KASUMI_UIA1: - memcpy(k2, auth_key, sizeof(keys->k2)); + NOSIMD_MEMCPY(k2, auth_key, sizeof(keys->k2)); break; case IMB_AUTH_AES_GMAC_128: IMB_AES128_GCM_PRE(mb_mgr, auth_key, gdata_key); @@ -1425,6 +1488,9 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_AUTH_AES_GMAC_256: IMB_AES256_GCM_PRE(mb_mgr, auth_key, gdata_key); break; + case IMB_AUTH_GHASH: + IMB_GHASH_PRE(mb_mgr, auth_key, gdata_key); + break; case IMB_AUTH_AES_CCM: case IMB_AUTH_AES_GMAC: case IMB_AUTH_NULL: @@ -1454,7 +1520,7 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, /* No operation needed */ break; case IMB_AUTH_POLY1305: - memcpy(k1_expanded, auth_key, 32); + NOSIMD_MEMCPY(k1_expanded, auth_key, 32); break; default: fprintf(stderr, "Unsupported hash algorithm %u, line %d\n", @@ -1525,7 +1591,7 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, break; case IMB_CIPHER_SNOW3G_UEA2_BITLEN: case IMB_CIPHER_KASUMI_UEA1_BITLEN: - memcpy(k2, ciph_key, 16); + NOSIMD_MEMCPY(k2, ciph_key, 16); break; case IMB_CIPHER_ZUC_EEA3: case IMB_CIPHER_CHACHA20: @@ -1534,12 +1600,12 @@ prepare_keys(IMB_MGR *mb_mgr, struct cipher_auth_keys *keys, case IMB_CIPHER_SNOW_V: case IMB_CIPHER_SNOW_V_AEAD: /* Use of: - * memcpy(k2, ciph_key, 32); + * NOSIMD_MEMCPY(k2, ciph_key, 32); * leaves sensitive data on the stack. * Copying data in 16 byte chunks instead. */ - memcpy(k2, ciph_key, 16); - memcpy(k2 + 16, ciph_key + 16, 16); + NOSIMD_MEMCPY(k2, ciph_key, 16); + NOSIMD_MEMCPY(k2 + 16, ciph_key + 16, 16); break; #else case IMB_CIPHER_SNOW3G_UEA2_BITLEN: @@ -1605,7 +1671,7 @@ modify_docsis_crc32_test_buf(uint8_t *test_buf, if (buf_size >= (IMB_DOCSIS_CRC32_MIN_ETH_PDU_SIZE + IMB_DOCSIS_CRC32_TAG_SIZE)) { /* Set plaintext CRC32 in the test buffer */ - memcpy(&test_buf[buf_size - IMB_DOCSIS_CRC32_TAG_SIZE], + NOSIMD_MEMCPY(&test_buf[buf_size - IMB_DOCSIS_CRC32_TAG_SIZE], job->auth_tag_output, IMB_DOCSIS_CRC32_TAG_SIZE); } } @@ -1682,9 +1748,30 @@ perform_safe_checks(IMB_MGR *mgr, const IMB_ARCH arch, const char *dir) } /* search OOO managers */ - for (ooo_ptr = &mgr->OOO_MGR_FIRST, i = 0; - ooo_ptr <= &mgr->OOO_MGR_LAST; + for (ooo_ptr = &mgr->aes128_ooo, i = 0; + ooo_ptr < &mgr->end_ooo; ooo_ptr++, i++) { + static const char * const ooo_names[] = { + "aes128_ooo", "aes192_ooo", "aes256_ooo", + "docsis128_sec_ooo", "docsis128_crc32_sec_ooo", + "docsis256_sec_ooo", "docsis256_crc32_sec_ooo", + "des_enc_ooo", "des_dec_ooo", + "des3_enc_ooo", "des3_dec_ooo", + "docsis_des_enc_ooo", "docsis_des_dec_ooo", + "hmac_sha_1_ooo", + "hmac_sha_224_ooo", "hmac_sha_256_ooo", + "hmac_sha_384_ooo", "hmac_sha_512_ooo", + "hmac_md5_ooo", + "aes_xcbc_ooo", "aes_ccm_ooo", "aes_cmac_ooo", + "zuc_eea3_ooo", "zuc_eia3_ooo", + "aes128_cbcs_ooo", + "zuc256_eea3_ooo", "zuc256_eia3_ooo", + "aes256_ccm_ooo", "aes256_cmac_ooo", + "snow3g_uea2_ooo", "snow3g_uia2_ooo", + "sha_1_ooo", "sha_224_ooo", "sha_256_ooo", + "sha_384_ooo", "sha_512_ooo", + "end_ooo" /* add new ooo manager above this line */ + }; void *ooo_mgr_p = *ooo_ptr; if (ooo_mgr_p == NULL) continue; @@ -1692,8 +1779,8 @@ perform_safe_checks(IMB_MGR *mgr, const IMB_ARCH arch, const char *dir) if (search_patterns(ooo_mgr_p, get_ooo_mgr_size(ooo_mgr_p, i)) == 0) { fprintf(stderr, - "Pattern found in 000 MGR (%d) after %s data\n", - (int)(ooo_ptr - &mgr->OOO_MGR_FIRST), dir); + "Pattern found in OOO MGR (index=%u,\"%s\") after %s data\n", + i, ooo_names[i], dir); return -1; } } @@ -1701,38 +1788,6 @@ perform_safe_checks(IMB_MGR *mgr, const IMB_ARCH arch, const char *dir) return 0; } -static void -clear_scratch_simd(const IMB_ARCH arch) -{ - switch (arch) { -#ifdef __x86_64__ - case IMB_ARCH_NOAESNI: - case IMB_ARCH_SSE: - clr_scratch_xmms_sse(); - break; - case IMB_ARCH_AVX: - clr_scratch_xmms_avx(); - break; - case IMB_ARCH_AVX2: - clr_scratch_ymms(); - break; - case IMB_ARCH_AVX512: - clr_scratch_zmms(); - break; -#endif - -#ifdef __aarch64__ - case IMB_ARCH_NOAESNI: - case IMB_ARCH_AARCH64: - CLEAR_SCRATCH_SIMD_REGS(); - break; -#endif - default: - fprintf(stderr, "Invalid architecture\n"); - exit(EXIT_FAILURE); - } -} - /* Performs test using AES_HMAC or DOCSIS */ static int do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, @@ -1761,12 +1816,7 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, unsigned int num_processed_jobs = 0; uint8_t next_iv[IMB_AES_BLOCK_SIZE]; uint16_t pli = 0; - uint8_t tag_size; - - if (auth_tag_size == 0) - tag_size = auth_tag_length_bytes[params->hash_alg - 1]; - else - tag_size = auth_tag_size; + uint8_t tag_size = data->tag_size; if (num_jobs == 0) return ret; @@ -1779,8 +1829,8 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, generate_random_buf(auth_iv, MAX_IV_SIZE); generate_random_buf(aad, MAX_AAD_SIZE); if (safe_check) { - memset(ciph_key, pattern_cipher_key, MAX_KEY_SIZE); - memset(auth_key, pattern_auth_key, MAX_KEY_SIZE); + NOSIMD_MEMSET(ciph_key, pattern_cipher_key, MAX_KEY_SIZE); + NOSIMD_MEMSET(auth_key, pattern_auth_key, MAX_KEY_SIZE); } else { generate_random_buf(ciph_key, MAX_KEY_SIZE); generate_random_buf(auth_key, MAX_KEY_SIZE); @@ -1857,7 +1907,8 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, } if (safe_check) - memset(test_buf[i], pattern_plain_text, buf_sizes[i]); + NOSIMD_MEMSET(test_buf[i], pattern_plain_text, + buf_sizes[i]); else generate_random_buf(test_buf[i], buf_sizes[i]); @@ -1888,7 +1939,6 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, /* Clear scratch registers before expanding keys to prevent * other functions from storing sensitive data in stack */ - clear_scratch_simd(enc_arch); if (prepare_keys(enc_mb_mgr, enc_keys, ciph_key, auth_key, params, 0) < 0) goto exit; @@ -1967,7 +2017,7 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, * Encrypt + generate digest from encrypted message * using architecture under test */ - memcpy(src_dst_buf[i], test_buf[i], buf_sizes[i]); + NOSIMD_MEMCPY(src_dst_buf[i], test_buf[i], buf_sizes[i]); if (fill_job(job, params, src_dst_buf[i], in_digest[i], aad, buf_sizes[i], tag_size, IMB_DIR_ENCRYPT, enc_keys, cipher_iv, auth_iv, i, next_iv) < 0) @@ -1978,10 +2028,11 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, /* Clear scratch registers before submitting job to prevent * other functions from storing sensitive data in stack */ - if (safe_check) - clear_scratch_simd(enc_arch); job = IMB_SUBMIT_JOB(enc_mb_mgr); - +#ifdef __x86_64__ + avx_sse_check("enc-submit", (unsigned) params->hash_alg, + (unsigned) params->cipher_mode); +#endif if (job) { unsigned idx = (unsigned)((uintptr_t) job->user_data); @@ -2016,6 +2067,10 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, /* Flush rest of the jobs, if there are outstanding jobs */ while (num_processed_jobs != num_jobs) { job = IMB_FLUSH_JOB(enc_mb_mgr); +#ifdef __x86_64__ + avx_sse_check("enc-flush", (unsigned) params->hash_alg, + (unsigned) params->cipher_mode); +#endif while (job != NULL) { unsigned idx = (unsigned)((uintptr_t) job->user_data); @@ -2099,10 +2154,11 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, /* Clear scratch registers before submitting job to prevent * other functions from storing sensitive data in stack */ - if (safe_check) - clear_scratch_simd(dec_arch); job = IMB_SUBMIT_JOB(dec_mb_mgr); - +#ifdef __x86_64__ + avx_sse_check("dec-submit", (unsigned) params->hash_alg, + (unsigned) params->cipher_mode); +#endif if (job != NULL) { unsigned idx = (unsigned)((uintptr_t) job->user_data); @@ -2128,6 +2184,10 @@ do_test(IMB_MGR *enc_mb_mgr, const IMB_ARCH enc_arch, /* Flush rest of the jobs, if there are outstanding jobs */ while (num_processed_jobs != num_jobs) { job = IMB_FLUSH_JOB(dec_mb_mgr); +#ifdef __x86_64__ + avx_sse_check("dec-flush", (unsigned) params->hash_alg, + (unsigned) params->cipher_mode); +#endif while (job != NULL) { unsigned idx = (unsigned)((uintptr_t) job->user_data); @@ -2240,30 +2300,27 @@ exit: return ret; } -/* Runs test for each buffer size */ static void -process_variant(IMB_MGR *enc_mgr, const IMB_ARCH enc_arch, - IMB_MGR *dec_mgr, const IMB_ARCH dec_arch, - struct params_s *params, struct data *variant_data, - const unsigned int safe_check) +test_single(IMB_MGR *enc_mgr, const IMB_ARCH enc_arch, + IMB_MGR *dec_mgr, const IMB_ARCH dec_arch, + struct params_s *params, struct data *variant_data, + const uint32_t buf_size, + const unsigned int safe_check) { -#ifdef PIN_BASED_CEC - const uint32_t sizes = job_sizes[RANGE_MAX]; -#else - const uint32_t sizes = params->num_sizes; -#endif - uint32_t sz; + unsigned int i; + unsigned int num_tag_sizes = 0; + uint8_t tag_sizes[NUM_TAG_SIZES]; uint64_t min_aad_sz = 0; uint64_t max_aad_sz, aad_sz; - if (verbose) { - printf("[INFO] "); - print_algo_info(params); + if (params->hash_alg >= IMB_AUTH_NUM) { + if (verbose) { + fprintf(stderr, "Invalid hash alg\n"); + printf("FAIL\n"); + } + exit(EXIT_FAILURE); } - /* Reset the variant data */ - clear_data(variant_data); - if (params->cipher_mode == IMB_CIPHER_GCM) max_aad_sz = MAX_GCM_AAD_SIZE; else if (params->cipher_mode == IMB_CIPHER_CCM) @@ -2271,13 +2328,29 @@ process_variant(IMB_MGR *enc_mgr, const IMB_ARCH enc_arch, else max_aad_sz = 0; - for (sz = 0; sz < sizes; sz++) { -#ifdef PIN_BASED_CEC - const uint32_t buf_size = job_sizes[RANGE_MIN]; -#else - const uint32_t buf_size = job_sizes[RANGE_MIN] + - (sz * job_sizes[RANGE_STEP]); -#endif + /* If tag size is defined by user, only test this size */ + if (auth_tag_size != 0) { + tag_sizes[0] = auth_tag_size; + num_tag_sizes = 1; + } else { + /* If CCM, test all tag sizes supported (4,6,8,10,12,14,16) */ + if (params->hash_alg == IMB_AUTH_AES_CCM) { + for (i = 4; i <= 16; i += 2) + tag_sizes[num_tag_sizes++] = i; + /* If ZUC-EIA3-256, test all tag sizes supported (4,8,16) */ + /* Todo, only 4 bytes tag is supported on arm arch */ + } else if (params->hash_alg == IMB_AUTH_ZUC256_EIA3_BITLEN) { + for (i = 4; i <= 4; i *= 2) + tag_sizes[num_tag_sizes++] = i; + } else { + tag_sizes[0] = auth_tag_len_bytes[params->hash_alg - 1]; + num_tag_sizes = 1; + } + } + + for (i = 0; i < num_tag_sizes; i++) { + variant_data->tag_size = tag_sizes[i]; + for (aad_sz = min_aad_sz; aad_sz <= max_aad_sz; aad_sz++) { params->aad_size = aad_sz; params->buf_size = buf_size; @@ -2315,14 +2388,22 @@ process_variant(IMB_MGR *enc_mgr, const IMB_ARCH enc_arch, dec_arch, params, variant_data, 1, 0, 1); if (result < 0) { - printf("=== Issue found. " - "Checking again...\n"); - generate_patterns(); - result = do_test(enc_mgr, enc_arch, - dec_mgr, dec_arch, - params, variant_data, - 1, 0, 1); - + uint32_t j; + + for (j = 0; j < safe_retries; j++) { + printf("=== Issue found. " + "Checking again...\n"); + generate_patterns(); + result = do_test(enc_mgr, + enc_arch, + dec_mgr, + dec_arch, + params, + variant_data, + 1, 0, 1); + if (result == 0) + break; + } if (result < 0) { if (verbose) printf("FAIL\n"); @@ -2337,14 +2418,49 @@ process_variant(IMB_MGR *enc_mgr, const IMB_ARCH enc_arch, params, variant_data, 0, 0, 1) < 0) exit(EXIT_FAILURE); } + } +} +/* Runs test for each buffer size */ +static void +process_variant(IMB_MGR *enc_mgr, const IMB_ARCH enc_arch, + IMB_MGR *dec_mgr, const IMB_ARCH dec_arch, + struct params_s *params, struct data *variant_data, + const unsigned int safe_check) +{ +#ifdef PIN_BASED_CEC + const uint32_t sizes = job_sizes[RANGE_MAX]; +#else + const uint32_t sizes = params->num_sizes; +#endif + uint32_t sz; + + if (verbose) { + printf("[INFO] "); + print_algo_info(params); + } + + /* Reset the variant data */ + clear_data(variant_data); + + for (sz = 0; sz < sizes; sz++) { +#ifdef PIN_BASED_CEC + const uint32_t buf_size = job_sizes[RANGE_MIN]; +#else + const uint32_t buf_size = job_sizes[RANGE_MIN] + + (sz * job_sizes[RANGE_STEP]); +#endif + + test_single(enc_mgr, enc_arch, dec_mgr, + dec_arch, params, variant_data, + buf_size, safe_check); } /* Perform IMIX tests */ if (imix_enabled) { unsigned int i, j; - params->aad_size = min_aad_sz; + params->aad_size = 0; for (i = 2; i <= max_num_jobs; i++) { for (j = 0; j < IMIX_ITER; j++) { @@ -2412,6 +2528,16 @@ run_test(const IMB_ARCH enc_arch, const IMB_ARCH dec_arch, exit(EXIT_FAILURE); } + if (enc_mgr->features & IMB_FEATURE_SELF_TEST) + if (!(enc_mgr->features & IMB_FEATURE_SELF_TEST_PASS)) + fprintf(stderr, "SELF-TEST: FAIL\n"); + + if (imb_get_errno(enc_mgr) != 0) { + fprintf(stderr, "Error initializing enc MB_MGR structure! %s\n", + imb_get_strerror(imb_get_errno(enc_mgr))); + exit(EXIT_FAILURE); + } + printf("Encrypting "); print_tested_arch(enc_mgr->features, enc_arch); @@ -2456,6 +2582,16 @@ run_test(const IMB_ARCH enc_arch, const IMB_ARCH dec_arch, exit(EXIT_FAILURE); } + if (dec_mgr->features & IMB_FEATURE_SELF_TEST) + if (!(dec_mgr->features & IMB_FEATURE_SELF_TEST_PASS)) + fprintf(stderr, "SELF-TEST: FAIL\n"); + + if (imb_get_errno(dec_mgr) != 0) { + fprintf(stderr, "Error initializing dec MB_MGR structure! %s\n", + imb_get_strerror(imb_get_errno(dec_mgr))); + exit(EXIT_FAILURE); + } + printf("Decrypting "); print_tested_arch(dec_mgr->features, dec_arch); @@ -2468,11 +2604,12 @@ run_test(const IMB_ARCH enc_arch, const IMB_ARCH dec_arch, goto exit; } - IMB_HASH_ALG hash_alg; IMB_CIPHER_MODE c_mode; for (c_mode = IMB_CIPHER_CBC; c_mode < IMB_CIPHER_NUM; c_mode++) { + IMB_HASH_ALG hash_alg; + /* Skip IMB_CIPHER_CUSTOM */ if (c_mode == IMB_CIPHER_CUSTOM) continue; @@ -2520,11 +2657,6 @@ run_test(const IMB_ARCH enc_arch, const IMB_ARCH dec_arch, if (c_mode != IMB_CIPHER_DOCSIS_SEC_BPI && hash_alg == IMB_AUTH_DOCSIS_CRC32) continue; - if (c_mode == IMB_CIPHER_GCM && - (hash_alg == IMB_AUTH_AES_GMAC_128 || - hash_alg == IMB_AUTH_AES_GMAC_192 || - hash_alg == IMB_AUTH_AES_GMAC_256)) - continue; if ((c_mode == IMB_CIPHER_CHACHA20_POLY1305 && hash_alg != IMB_AUTH_CHACHA20_POLY1305) || (c_mode != IMB_CIPHER_CHACHA20_POLY1305 && @@ -2661,6 +2793,8 @@ static void usage(const char *app_name) "--aesni-emu: Do AESNI_EMU (disabled by default)\n" "--shani-on: use SHA extensions, default: auto-detect\n" "--shani-off: don't use SHA extensions\n" + "--gfni-on: use Galois Field extensions, default: auto-detect\n" + "--gfni-off: don't use Galois Field extensions\n" "--cipher-iv-size: size of cipher IV.\n" "--auth-iv-size: size of authentication IV.\n" "--tag-size: size of authentication tag\n" @@ -2679,7 +2813,9 @@ static void usage(const char *app_name) "(maximum = %d)\n" "--safe-check: check if keys, IVs, plaintext or tags " "get cleared from IMB_MGR upon job completion (off by default; " - "requires library compiled with SAFE_DATA)\n", + "requires library compiled with SAFE_DATA)\n" + "--avx-sse: if XGETBV is available then check for potential " + "AVX-SSE transition problems\n", app_name, MAX_NUM_JOBS); } @@ -2877,7 +3013,7 @@ int main(int argc, char *argv[]) * Disable all the other architectures * and enable only the specified */ - memset(enc_archs, 0, sizeof(enc_archs)); + NOSIMD_MEMSET(enc_archs, 0, sizeof(enc_archs)); enc_archs[values->arch_type] = 1; i++; } else if (strcmp(argv[i], "--dec-arch") == 0) { @@ -2892,7 +3028,7 @@ int main(int argc, char *argv[]) * Disable all the other architectures * and enable only the specified */ - memset(dec_archs, 0, sizeof(dec_archs)); + NOSIMD_MEMSET(dec_archs, 0, sizeof(dec_archs)); dec_archs[values->arch_type] = 1; i++; } else if (strcmp(argv[i], "--cipher-algo") == 0) { @@ -2986,8 +3122,23 @@ int main(int argc, char *argv[]) } } else if (strcmp(argv[i], "--safe-check") == 0) { safe_check = 1; + } else if (strcmp(argv[i], "--safe-retries") == 0) { + i = get_next_num_arg((const char * const *)argv, i, + argc, &safe_retries, + sizeof(safe_retries)); + if (safe_retries > MAX_SAFE_RETRIES) { + fprintf(stderr, "Number of retries cannot be " + "higher than %d\n", MAX_SAFE_RETRIES); + return EXIT_FAILURE; + } } else if (strcmp(argv[i], "--imix") == 0) { imix_enabled = 1; +#ifdef __x86_64__ + } else if (strcmp(argv[i], "--avx-sse") == 0) { + is_avx_sse_check_possible = avx_sse_detectability(); + if (!is_avx_sse_check_possible) + fprintf(stderr, "XGETBV not available\n"); +#endif } else { usage(argv[0]); return EXIT_FAILURE; diff --git a/test/job_api_fuzz_test.c b/test/job_api_fuzz_test.c index 6e69c5fc9c3c34d53c5e5b82aaffc81f1a40400e..0fdf71921bc9b8997c349e807f4b0505a2bba0ad 100644 --- a/test/job_api_fuzz_test.c +++ b/test/job_api_fuzz_test.c @@ -31,7 +31,7 @@ #include #include #include - +#include #include #define BUFF_SIZE (32*1024*1024) @@ -225,6 +225,12 @@ static void fill_additional_hash_data(struct IMB_JOB *job, if (job->u.GMAC.iv_len_in_bytes > buffsize) job->u.GMAC.iv_len_in_bytes = buffsize; break; + case IMB_AUTH_GHASH: + if (job->u.GHASH._key != NULL) + job->u.GHASH._key = buff; + if (job->u.GHASH._init_tag != NULL) + job->u.GHASH._init_tag = buff; + break; case IMB_AUTH_POLY1305: if (job->u.POLY1305._key != NULL) job->u.POLY1305._key = buff; @@ -373,6 +379,8 @@ static IMB_HASH_ALG hash_selection(void) return IMB_AUTH_CRC7_FP_HEADER; else if (strcmp(a, "IMB_AUTH_CRC6_IUUP_HEADER") == 0) return IMB_AUTH_CRC6_IUUP_HEADER; + else if (strcmp(a, "IMB_AUTH_GHASH") == 0) + return IMB_AUTH_GHASH; else return 0; } @@ -441,24 +449,57 @@ int LLVMFuzzerTestOneInput(const uint8_t *data, size_t dataSize) { IMB_HASH_ALG hash; IMB_CIPHER_MODE cipher; - char *ar; + IMB_CIPHER_DIRECTION dir; IMB_MGR *p_mgr = NULL; IMB_ARCH arch; unsigned i; - const unsigned num_jobs = 20; + const char *ar = getenv("ARCH"); + const char *api = getenv("API"); + const char *n_jobs = getenv("NUM_JOBS"); + const char *key_length = getenv("KEY_LEN"); + const char *cipher_dir = getenv("DIR"); + unsigned num_jobs; + unsigned key_len; const size_t buffsize = BUFF_SIZE; + bool single = false, cipher_burst = false, + hash_burst = false, burst = false; + + if (n_jobs == NULL) + num_jobs = 10; + else + num_jobs = strtoul(n_jobs, NULL, 10); + if (key_length == NULL) + key_len = 16; + else + key_len = strtoul(key_length, NULL, 10); + /* Setting minimum datasize to always fill job structure */ if (dataSize < sizeof(IMB_JOB)) return 0; + if (num_jobs > 32 || num_jobs == 0 || key_len == 0) + return 0; + + if (cipher_dir != NULL) { + if (strcmp(cipher_dir, "ENCRYPT") == 0) + dir = IMB_DIR_ENCRYPT; + else if (strcmp(cipher_dir, "DECRYPT") == 0) + dir = IMB_DIR_DECRYPT; + else { + printf("Invalid cipher direction!\n"); + return EXIT_FAILURE; + } + } else { + dir = IMB_DIR_ENCRYPT; + } + /* allocate multi-buffer manager */ p_mgr = alloc_mb_mgr(0); if (p_mgr == NULL) { printf("Error allocating MB_MGR structure!\n"); return EXIT_FAILURE; } - ar = getenv("ARCH"); if (ar == NULL) { init_mb_mgr_auto(p_mgr, &arch); } else { @@ -482,32 +523,141 @@ int LLVMFuzzerTestOneInput(const uint8_t *data, size_t dataSize) } IMB_JOB *job = NULL; + /* create job array */ + IMB_JOB *jobs[32] = {NULL}; - for (i = 0; i < num_jobs; i++) { - hash = hash_selection(); - cipher = cipher_selection(); - job = IMB_GET_NEXT_JOB(p_mgr); - memcpy(job, data, sizeof(*job)); - /* - * setenv is invalid or unset - receive flag and fuzz random - * else a specific algo has been selected to fuzz. - */ - if (hash == 0) - job->hash_alg %= (IMB_AUTH_NUM + 1); - else - job->hash_alg = hash; - if (cipher == 0) - job->cipher_mode %= (IMB_CIPHER_NUM + 1); - else - job->cipher_mode = cipher; - clamp_lengths(job, buffsize); - static DECLARE_ALIGNED(uint8_t buff[2*BUFF_SIZE], 64); - - fill_job_data(job, buff); - fill_additional_cipher_data(job, buff, buffsize); - fill_additional_hash_data(job, buff, buffsize); - job = IMB_SUBMIT_JOB(p_mgr); + if (api == NULL || (strcmp(api, "SINGLE") == 0)) { + single = true; + } else if (strcmp(api, "BURST") == 0) { + burst = true; + } else if (strcmp(api, "CIPHER_BURST") == 0) { + cipher_burst = true; + } else if (strcmp(api, "HASH_BURST") == 0) { + hash_burst = true; + } else { + printf("Invalid API passed to application. Terminating\n"); + return 0; } + + if (single) { + for (i = 0; i < num_jobs; i++) { + hash = hash_selection(); + cipher = cipher_selection(); + job = IMB_GET_NEXT_JOB(p_mgr); + memcpy(job, data, sizeof(*job)); + /* + * setenv is invalid or unset - + * receive flag and fuzz random + * else a specific algo has been selected to fuzz. + */ + if (hash == 0) + job->hash_alg %= (IMB_AUTH_NUM + 1); + else + job->hash_alg = hash; + if (cipher == 0) + job->cipher_mode %= (IMB_CIPHER_NUM + 1); + else + job->cipher_mode = cipher; + clamp_lengths(job, buffsize); + static DECLARE_ALIGNED(uint8_t buff[2*BUFF_SIZE], 64); + + fill_job_data(job, buff); + fill_additional_cipher_data(job, buff, buffsize); + fill_additional_hash_data(job, buff, buffsize); + job = IMB_SUBMIT_JOB(p_mgr); + } + } else if (burst) { + while (IMB_GET_NEXT_BURST(p_mgr, num_jobs, jobs) + < (uint32_t)num_jobs) + IMB_FLUSH_BURST(p_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + hash = hash_selection(); + cipher = cipher_selection(); + memcpy(job, data, sizeof(*job)); + /* + * setenv is invalid or unset - + * receive flag and fuzz random + * else a specific algo has been + * selected to fuzz. + */ + if (hash == 0) + job->hash_alg %= (IMB_AUTH_NUM + 1); + else + job->hash_alg = hash; + if (cipher == 0) + job->cipher_mode %= + (IMB_CIPHER_NUM + 1); + else + job->cipher_mode = cipher; + clamp_lengths(job, buffsize); + static DECLARE_ALIGNED + (uint8_t buff[2*BUFF_SIZE], 64); + + fill_job_data(job, buff); + fill_additional_cipher_data + (job, buff, buffsize); + fill_additional_hash_data + (job, buff, buffsize); + } + + IMB_SUBMIT_BURST(p_mgr, num_jobs, jobs); + } else if (cipher_burst) { + while (IMB_GET_NEXT_BURST(p_mgr, num_jobs, jobs) + < (uint32_t)num_jobs) + IMB_FLUSH_BURST(p_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + cipher = cipher_selection(); + memcpy(job, data, sizeof(*job)); + /* + * setenv is invalid or unset - + * receive flag and fuzz random + * else a specific algo has been + * selected to fuzz. + */ + if (cipher == 0) + cipher = (job->cipher_mode % + (IMB_CIPHER_NUM + 1)); + clamp_lengths(job, buffsize); + static DECLARE_ALIGNED + (uint8_t buff[2*BUFF_SIZE], 64); + + fill_job_data(job, buff); + } + + IMB_SUBMIT_CIPHER_BURST(p_mgr, job, num_jobs, + cipher, dir, key_len); + } else if (hash_burst) { + while (IMB_GET_NEXT_BURST(p_mgr, num_jobs, jobs) + < (uint32_t)num_jobs) + IMB_FLUSH_BURST(p_mgr, num_jobs, jobs); + + for (i = 0; i < num_jobs; i++) { + job = jobs[i]; + hash = hash_selection(); + memcpy(job, data, sizeof(*job)); + /* + * setenv is invalid or unset - + * receive flag and fuzz random + * else a specific algo has + * been selected to fuzz. + */ + if (hash == 0) + hash = (job->hash_alg % + (IMB_AUTH_NUM + 1)); + clamp_lengths(job, buffsize); + static DECLARE_ALIGNED + (uint8_t buff[2*BUFF_SIZE], 64); + + fill_job_data(job, buff); + } + + IMB_SUBMIT_HASH_BURST(p_mgr, jobs[0], num_jobs, hash); + } + free_mb_mgr(p_mgr); return 0; } diff --git a/test/main.c b/test/main.c index b548d4fd26fb0223e5cd8bff23c0551f1f670051..8dd4033495596b93acac9e8076a580d0316229c1 100644 --- a/test/main.c +++ b/test/main.c @@ -289,8 +289,10 @@ usage(const char *name) "--auto-detect: auto detects current architecture " "to run the tests\n Note: Auto detection " "option now run by default and will be removed in the future\n" - "--shani-on: use SHA extensions, default: auto-detect\n" - "--shani-off: don't use SHA extensions\n", name); + "--gfni-on: use Galois Field extensions, default: auto-detect\n" + "--gfni-off: don't use Galois Field extensions\n" + "--shani-on: use SHA extensions, default: auto-detect\n" + "--shani-off: don't use SHA extensions\n", name); } static void @@ -488,6 +490,20 @@ main(int argc, char **argv) #endif } + if (p_mgr->features & IMB_FEATURE_SELF_TEST) + printf("SELF-TEST: %s\n", + (p_mgr->features & IMB_FEATURE_SELF_TEST_PASS) ? + "PASS" : "FAIL"); + else + printf("SELF-TEST: N/A (requires library >= v1.3)\n"); + + if (imb_get_errno(p_mgr) != 0) { + printf("Error initializing MB_MGR structure! %s\n", + imb_get_strerror(imb_get_errno(p_mgr))); + free_mb_mgr(p_mgr); + return EXIT_FAILURE; + } + print_tested_arch(p_mgr->features, atype); for (test_idx = 0; test_idx < DIM(tests); test_idx++) { diff --git a/test/misc.asm b/test/misc.asm index dfdb8bd01b01825bda95296d3b8f9a56b27ef153..f5e5b54fd416a7d91f131c0056ae63f570b6c294 100644 --- a/test/misc.asm +++ b/test/misc.asm @@ -34,6 +34,12 @@ ;;; - type : function or data ;;; - scope : internal, private, default %define MKGLOBAL(name,type,scope) global name %+ : %+ type scope + +;;; ABI function arguments +%define arg1 rdi +%define arg2 rsi +%define arg3 rdx +%define arg4 rcx %endif %ifdef WIN_ABI @@ -42,12 +48,19 @@ ;;; - type : function or data ;;; - scope : internal, private, default (ignored in win64 coff format) %define MKGLOBAL(name,type,scope) global name + +;;; ABI function arguments +%define arg1 rcx +%define arg2 rdx +%define arg3 r8 +%define arg4 r9 %endif section .bss default rel MKGLOBAL(gps,data,) +align 8 gps: resq 14 MKGLOBAL(simd_regs,data,) @@ -56,14 +69,85 @@ simd_regs: resb 32*64 section .text +;; uint32_t avx_sse_transition_check(void) +MKGLOBAL(avx_sse_transition_check,function,) +align 16 +avx_sse_transition_check: + mov ecx, 1 + xgetbv + ;; result goes to edx:eax + ;; we care about bits 2 and 6 only + and eax, (1 << 2) | (1 << 6) + ret + +;; void *nosimd_memcpy(void *dst, const void *src, size_t n) +MKGLOBAL(nosimd_memcpy,function,) +align 16 +nosimd_memcpy: + pushfq + push arg1 + cld ;; increment dst/src pointers + +%ifdef WIN_ABI + push rdi + push rsi + mov rdi, arg1 ;; arg1 = rcx + mov rsi, arg2 ;; arg2 = rdx + mov rcx, arg3 ;; arg3 = r8 + rep movsb + pop rsi + pop rdi +%endif + +%ifdef LINUX + ;; rdi = arg1 + ;; rsi = arg2 + mov rcx, arg3 ;; arg3 = rdx + rep movsb +%endif + + pop rax ;; return `dst` + popfq + ret + +;; void *nosimd_memset(void *p, int c, size_t n) +MKGLOBAL(nosimd_memset,function,) +align 16 +nosimd_memset: + pushfq + push arg1 + cld ;; increment dst pointer + +%ifdef WIN_ABI + push rdi + mov rdi, arg1 ;; arg1 = rcx + mov rax, arg2 ;; arg2 = rdx + mov rcx, arg3 ;; arg3 = r8 + rep stosb + pop rdi +%endif + +%ifdef LINUX + ;; rdi = arg1 + mov rax, arg2 ;; arg2 = rsi + mov rcx, arg3 ;; arg3 = rdx + rep stosb +%endif + + pop rax ;; return `p` + popfq + ret + ;; Returns RSP pointer with the value BEFORE the call, so 8 bytes need ;; to be added MKGLOBAL(rdrsp,function,) +align 16 rdrsp: lea rax, [rsp + 8] ret MKGLOBAL(dump_gps,function,) +align 16 dump_gps: mov [rel gps], rax @@ -84,6 +168,7 @@ dump_gps: ret MKGLOBAL(dump_xmms_sse,function,) +align 16 dump_xmms_sse: %assign i 0 @@ -97,6 +182,7 @@ dump_xmms_sse: ret MKGLOBAL(dump_xmms_avx,function,) +align 16 dump_xmms_avx: %assign i 0 @@ -110,6 +196,7 @@ dump_xmms_avx: ret MKGLOBAL(dump_ymms,function,) +align 16 dump_ymms: %assign i 0 @@ -123,6 +210,7 @@ dump_ymms: ret MKGLOBAL(dump_zmms,function,) +align 16 dump_zmms: %assign i 0 @@ -140,6 +228,7 @@ dump_zmms: ; ; void clr_scratch_xmms_sse(void) MKGLOBAL(clr_scratch_xmms_sse,function,internal) +align 16 clr_scratch_xmms_sse: %ifdef LINUX @@ -167,6 +256,7 @@ clr_scratch_xmms_sse: ; ; void clr_scratch_xmms_avx(void) MKGLOBAL(clr_scratch_xmms_avx,function,internal) +align 16 clr_scratch_xmms_avx: %ifdef LINUX @@ -190,6 +280,7 @@ clr_scratch_xmms_avx: ; ; void clr_scratch_ymms(void) MKGLOBAL(clr_scratch_ymms,function,internal) +align 16 clr_scratch_ymms: ; On Linux, all YMM registers are scratch registers %ifdef LINUX @@ -219,6 +310,7 @@ clr_scratch_ymms: ; ; void clr_scratch_zmms(void) MKGLOBAL(clr_scratch_zmms,function,internal) +align 16 clr_scratch_zmms: ; On Linux, all ZMM registers are scratch registers diff --git a/test/misc.h b/test/misc.h index 1d1e2563e767771f042eff03093a5883c29485d3..daaa56644aec21f2c0897b8931d36414fe8002bc 100644 --- a/test/misc.h +++ b/test/misc.h @@ -25,6 +25,12 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. *****************************************************************************/ +#ifdef __WIN32 +#include +#endif + +#include + #ifndef XVALIDAPP_MISC_H #define XVALIDAPP_MISC_H @@ -56,9 +62,270 @@ void clr_scratch_xmms_avx(void); void clr_scratch_ymms(void); void clr_scratch_zmms(void); +/* custom replacement for memset() */ +void *nosimd_memset(void *p, int c, size_t n); + +/* custom replacement for memcpy() */ +void *nosimd_memcpy(void *dst, const void *src, size_t n); + +#define NOSIMD_MEMCPY nosimd_memcpy +#define NOSIMD_MEMSET nosimd_memset +/* + * Detects if SIMD registers are in the state that + * can cause AVX-SSE transition penalty + */ +uint32_t avx_sse_transition_check(void); + +#define MISC_AVX_SSE_YMM0_15_ISSUE (1 << 2) +#define MISC_AVX_SSE_ZMM0_15_ISSUE (1 << 6) +#define MISC_AVX_SSE_ISSUE (MISC_AVX_SSE_YMM0_15_ISSUE | \ + MISC_AVX_SSE_ZMM0_15_ISSUE) + +/* CPUID feature detection code follows here */ + +struct misc_cpuid_regs { + uint32_t eax; + uint32_t ebx; + uint32_t ecx; + uint32_t edx; +}; + +/** + * @brief C wrapper for CPUID opcode + * + * @param leaf[in] CPUID leaf number (EAX) + * @param subleaf[in] CPUID sub-leaf number (ECX) + * @param out[out] registers structure to store results of CPUID into + */ +static void +misc_cpuid(const unsigned leaf, const unsigned subleaf, + struct misc_cpuid_regs *out) +{ +#ifdef _WIN32 + /* Windows */ + int regs[4]; + + __cpuidex(regs, leaf, subleaf); + out->eax = regs[0]; + out->ebx = regs[1]; + out->ecx = regs[2]; + out->edx = regs[3]; +#else + /* Linux */ + asm volatile("mov %4, %%eax\n\t" + "mov %5, %%ecx\n\t" + "cpuid\n\t" + "mov %%eax, %0\n\t" + "mov %%ebx, %1\n\t" + "mov %%ecx, %2\n\t" + "mov %%edx, %3\n\t" + : "=g" (out->eax), "=g" (out->ebx), "=g" (out->ecx), + "=g" (out->edx) + : "g" (leaf), "g" (subleaf) + : "%eax", "%ebx", "%ecx", "%edx"); +#endif /* Linux */ +} + +/** + * @brief Detects if XGETBV instruction is available to use. + * Call it before calling avx_sse_transition_check(). + * + * @retval 0 XGETBV NOT available + * @retval 1 XGETBV available + */ +static int avx_sse_detectability(void) +{ + struct misc_cpuid_regs r; + + /* Get highest supported CPUID leaf number */ + misc_cpuid(0x0, 0x0, &r); + + const unsigned hi_leaf_number = r.eax; + + if (hi_leaf_number < 0xd) + return 0; + + /* Get CPUID leaf 0xd subleaf 0x1 */ + misc_cpuid(0xd, 0x1, &r); + + /* return bit 2 from EAX */ + return (r.eax >> 2) & 1; +} + +/* decodes cipher mode to string */ +static const char *misc_cipher_mode_to_str(const IMB_CIPHER_MODE mode) +{ + static char cb[64]; + + switch (mode) { + case IMB_CIPHER_CBC: + return "aes-cbc"; + case IMB_CIPHER_CNTR: + return "aes-ctr"; + case IMB_CIPHER_NULL: + return "null"; + case IMB_CIPHER_DOCSIS_SEC_BPI: + return "aes-docsis"; + case IMB_CIPHER_GCM: + return "aead-aes-gcm"; + case IMB_CIPHER_CUSTOM: + return "custom"; + case IMB_CIPHER_DES: + return "des-cbc"; + case IMB_CIPHER_DOCSIS_DES: + return "des-docsis"; + case IMB_CIPHER_CCM: + return "aes-ccm"; + case IMB_CIPHER_DES3: + return "3des-cbc"; + case IMB_CIPHER_PON_AES_CNTR: + return "pon-aes-ctr"; + case IMB_CIPHER_ECB: + return "aes-ecb"; + case IMB_CIPHER_CNTR_BITLEN: + return "aes-ctr (bitlen)"; + case IMB_CIPHER_ZUC_EEA3: + return "zuc-eea3"; + case IMB_CIPHER_SNOW3G_UEA2_BITLEN: + return "snow3g-uea2"; + case IMB_CIPHER_KASUMI_UEA1_BITLEN: + return "kasumi-uea1"; + case IMB_CIPHER_CBCS_1_9: + return "aes-cbcs-1-9"; + case IMB_CIPHER_CHACHA20: + return "chacha20"; + case IMB_CIPHER_CHACHA20_POLY1305: + return "aead-chacha20-poly1305"; + case IMB_CIPHER_CHACHA20_POLY1305_SGL: + return "aead-chacha20-poly1305-sgl"; + case IMB_CIPHER_SNOW_V: + return "snow-v"; + case IMB_CIPHER_SNOW_V_AEAD: + return "aead-snow-v"; + case IMB_CIPHER_GCM_SGL: + return "aead-aes-gcm-sgl"; + case IMB_CIPHER_NUM: + default: + break; + } + + memset(cb, 0, sizeof(cb)); + snprintf(cb, sizeof(cb) - 1, "unknown<%u>", (unsigned) mode); + return cb; +} + +/* decodes hash algorithm to string */ +static const char *misc_hash_alg_to_str(const IMB_HASH_ALG mode) +{ + static char cb[64]; + + switch (mode) { + case IMB_AUTH_HMAC_SHA_1: + return "hmac-sha1"; + case IMB_AUTH_HMAC_SHA_224: + return "hmac-sha224"; + case IMB_AUTH_HMAC_SHA_256: + return "hmac-sha256"; + case IMB_AUTH_HMAC_SHA_384: + return "hmac-sha384"; + case IMB_AUTH_HMAC_SHA_512: + return "hmac-sha512"; + case IMB_AUTH_AES_XCBC: + return "aes-xcbc"; + case IMB_AUTH_MD5: + return "hmac-md5"; + case IMB_AUTH_NULL: + return "null"; + case IMB_AUTH_AES_GMAC: + return "aead-aes-gcm"; + case IMB_AUTH_CUSTOM: + return "custom"; + case IMB_AUTH_AES_CCM: + return "aes-ccm"; + case IMB_AUTH_AES_CMAC: + return "aes-cmac-128"; + case IMB_AUTH_SHA_1: + return "sha1"; + case IMB_AUTH_SHA_224: + return "sha224"; + case IMB_AUTH_SHA_256: + return "sha256"; + case IMB_AUTH_SHA_384: + return "sha384"; + case IMB_AUTH_SHA_512: + return "sha512"; + case IMB_AUTH_AES_CMAC_BITLEN: + return "aes-cmac (bitlen)"; + case IMB_AUTH_PON_CRC_BIP: + return "pon-crc-bip"; + case IMB_AUTH_ZUC_EIA3_BITLEN: + return "zuc-eia3"; + case IMB_AUTH_DOCSIS_CRC32: + return "docsis-crc32"; + case IMB_AUTH_SNOW3G_UIA2_BITLEN: + return "snow3g-uia2"; + case IMB_AUTH_KASUMI_UIA1: + return "kasumi-uia1"; + case IMB_AUTH_AES_GMAC_128: + return "aes-gmac-128"; + case IMB_AUTH_AES_GMAC_192: + return "aes-gmac-192"; + case IMB_AUTH_AES_GMAC_256: + return "aes-gmac-256"; + case IMB_AUTH_AES_CMAC_256: + return "aes-cmac-256"; + case IMB_AUTH_POLY1305: + return "poly1305"; + case IMB_AUTH_CHACHA20_POLY1305: + return "aead-chacha20-poly1305"; + case IMB_AUTH_CHACHA20_POLY1305_SGL: + return "aead-chacha20-poly1305-sgl"; + case IMB_AUTH_ZUC256_EIA3_BITLEN: + return "zuc256-eia3"; + case IMB_AUTH_SNOW_V_AEAD: + return "aead-snow-v"; + case IMB_AUTH_GCM_SGL: + return "aead-aes-gcm-sgl"; + case IMB_AUTH_CRC32_ETHERNET_FCS: + return "crc32-ethernet-fcs"; + case IMB_AUTH_CRC32_SCTP: + return "crc32-sctp"; + case IMB_AUTH_CRC32_WIMAX_OFDMA_DATA: + return "crc32-wimax-ofdma-data"; + case IMB_AUTH_CRC24_LTE_A: + return "crc24-lte-a"; + case IMB_AUTH_CRC24_LTE_B: + return "crc24-lte-b"; + case IMB_AUTH_CRC16_X25: + return "crc16-x25"; + case IMB_AUTH_CRC16_FP_DATA: + return "crc16-fp-data"; + case IMB_AUTH_CRC11_FP_HEADER: + return "crc11-fp-header"; + case IMB_AUTH_CRC10_IUUP_DATA: + return "crc10-iuup-data"; + case IMB_AUTH_CRC8_WIMAX_OFDMA_HCS: + return "crc8-wimax-ofdma-hcs"; + case IMB_AUTH_CRC7_FP_HEADER: + return "crc7-fp-header"; + case IMB_AUTH_CRC6_IUUP_HEADER: + return "crc6-iuup-header"; + case IMB_AUTH_GHASH: + return "ghash"; + case IMB_AUTH_NUM: + default: + break; + } + + memset(cb, 0, sizeof(cb)); + snprintf(cb, sizeof(cb) - 1, "unknown<%u>", (unsigned) mode); + return cb; +} #endif /* __x86_64__ */ #ifdef __aarch64__ +#define NOSIMD_MEMCPY memcpy +#define NOSIMD_MEMSET memset /* x0-x28 */ #define GP_MEM_SIZE 29*8 diff --git a/test/sha_test.c b/test/sha_test.c index 055d631515d480c4d63065d59dee043a6b3b56db..a96b871f1be40e0d5217e303b3d8f168772b6d81 100644 --- a/test/sha_test.c +++ b/test/sha_test.c @@ -597,14 +597,17 @@ sha_test(struct IMB_MGR *mb_mgr) struct test_suite_context sha1_ctx, sha224_ctx, sha256_ctx; struct test_suite_context sha384_ctx, sha512_ctx; int errors; + unsigned i; test_suite_start(&sha1_ctx, "SHA1"); test_suite_start(&sha224_ctx, "SHA224"); test_suite_start(&sha256_ctx, "SHA256"); test_suite_start(&sha384_ctx, "SHA384"); test_suite_start(&sha512_ctx, "SHA512"); - test_sha_vectors(mb_mgr, &sha1_ctx, &sha224_ctx, - &sha256_ctx, &sha384_ctx, &sha512_ctx, 1); + for (i = 1; i <= 17; i++) { + test_sha_vectors(mb_mgr, &sha1_ctx, &sha224_ctx, + &sha256_ctx, &sha384_ctx, &sha512_ctx, i); + } errors = test_suite_end(&sha1_ctx); errors += test_suite_end(&sha224_ctx); errors += test_suite_end(&sha256_ctx); diff --git a/test/snow3g_test.c b/test/snow3g_test.c index da2ec7adc2f50b5a57413e305d31712e703e5cc6..a8218bf63c0c1bcd1d9de0b8fc5888b2160f1e06 100644 --- a/test/snow3g_test.c +++ b/test/snow3g_test.c @@ -1,5 +1,6 @@ /***************************************************************************** Copyright (c) 2009-2022, Intel Corporation + Copyright (c) 2022, Nokia Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: @@ -70,6 +71,15 @@ validate_snow3g_f8_n_blocks(struct IMB_MGR *mb_mgr, uint32_t job_api, struct test_suite_context *uea2_ctx, struct test_suite_context *uia2_ctx); static void +validate_snow3g_f8_n_blocks_linear(struct IMB_MGR *mb_mgr, uint32_t job_api, + struct test_suite_context *uea2_ctx, + struct test_suite_context *uia2_ctx); +static void +validate_snow3g_f8_n_blocks_linear_mkeys(struct IMB_MGR *mb_mgr, + uint32_t job_api, + struct test_suite_context *uea2_ctx, + struct test_suite_context *uia2_ctx); +static void validate_snow3g_f8_n_blocks_multi(struct IMB_MGR *mb_mgr, uint32_t job_api, struct test_suite_context *uea2_ctx, @@ -99,6 +109,10 @@ struct { "validate_snow3g_f8_8_blocks_multi_key"}, {validate_snow3g_f8_n_blocks, "validate_snow3g_f8_n_blocks"}, + {validate_snow3g_f8_n_blocks_linear, + "validate_snow3g_f8_n_blocks_linear"}, + {validate_snow3g_f8_n_blocks_linear_mkeys, + "validate_snow3g_f8_n_blocks_linear_multi_keys"}, {validate_snow3g_f8_n_blocks_multi, "validate_snow3g_f8_n_blocks_multi"}, {validate_snow3g_f9, @@ -1885,21 +1899,21 @@ validate_snow3g_f8_n_blocks(struct IMB_MGR *mb_mgr, uint32_t job_api, for (i = 0; i < NUM_SUPPORTED_BUFFERS; i++) { /*Test the encrypt*/ - if (job_api) + if (job_api) { submit_uea2_jobs(mb_mgr, (uint8_t **)&pKeySched, pIV, pSrcBuff, pDstBuff, bitLens, bitOffsets, IMB_DIR_ENCRYPT, i + 1); - else + } else { IMB_SNOW3G_F8_N_BUFFER(mb_mgr, *pKeySched, (const void * const *)pIV, (const void * const *)pSrcBuff, (void **)pDstBuff, packetLen, i + 1); - - if (pDstBuff[0] == NULL) { - printf("N buffer failure\n"); - goto snow3g_f8_n_buffer_exit; + if (pDstBuff[0] == NULL) { + printf("N buffer failure\n"); + goto snow3g_f8_n_buffer_exit; + } } /*Compare the data in the pDstBuff with the cipher pattern*/ @@ -1913,20 +1927,21 @@ validate_snow3g_f8_n_blocks(struct IMB_MGR *mb_mgr, uint32_t job_api, } /*Test the Decrypt*/ - if (job_api) + if (job_api) { submit_uea2_jobs(mb_mgr, (uint8_t **)&pKeySched, pIV, pSrcBuff, pDstBuff, bitLens, bitOffsets, IMB_DIR_DECRYPT, i + 1); - else + } else { IMB_SNOW3G_F8_N_BUFFER(mb_mgr, *pKeySched, (const void * const *)pIV, (const void * const *)pDstBuff, (void **)pSrcBuff, packetLen, i + 1); - if (pSrcBuff[0] == NULL) { - printf("N buffer failure\n"); - goto snow3g_f8_n_buffer_exit; + if (pSrcBuff[0] == NULL) { + printf("N buffer failure\n"); + goto snow3g_f8_n_buffer_exit; + } } /*Compare the data in the pSrcBuff with the pDstBuff*/ @@ -1963,6 +1978,529 @@ snow3g_f8_n_buffer_exit: test_suite_update(uea2_ctx, 1, 0); } +static void +validate_snow3g_f8_n_blocks_linear(struct IMB_MGR *mb_mgr, uint32_t job_api, + struct test_suite_context *uea2_ctx, + struct test_suite_context *uia2_ctx) +{ + int length, numVectors, i, j, numPackets = 16; + size_t size = 0; + cipher_test_linear_vector_t *testVectors = + snow3g_cipher_test_vectors_linear[1]; + /* snow3g f8 test vectors are located at index 1 */ + numVectors = numSnow3gCipherTestVectorsLinear[1]; + + snow3g_key_schedule_t *pKeySched[NUM_SUPPORTED_BUFFERS]; + uint8_t *pKey[NUM_SUPPORTED_BUFFERS]; + uint8_t *pSrcBuff[NUM_SUPPORTED_BUFFERS]; + uint8_t *pDstBuff[NUM_SUPPORTED_BUFFERS]; + uint8_t *pSrcBuff_const[NUM_SUPPORTED_BUFFERS]; + uint8_t *pDstBuff_const[NUM_SUPPORTED_BUFFERS]; + uint8_t *pIV[NUM_SUPPORTED_BUFFERS]; + uint32_t packetLen[NUM_SUPPORTED_BUFFERS]; + uint32_t bitOffsets[NUM_SUPPORTED_BUFFERS]; + uint32_t bitLens[NUM_SUPPORTED_BUFFERS]; + int keyLen = MAX_KEY_LEN; + int status = 0; + + (void)uia2_ctx; +#ifdef DEBUG + printf("Testing IMB_SNOW3G_F8_N_BUFFER: (%s):\n", + job_api ? "Job API" : "Direct API"); +#endif + memset(pSrcBuff, 0, sizeof(pSrcBuff)); + memset(pSrcBuff_const, 0, sizeof(pSrcBuff_const)); + memset(pDstBuff, 0, sizeof(pDstBuff)); + memset(pDstBuff_const, 0, sizeof(pDstBuff_const)); + memset(pIV, 0, sizeof(pIV)); + memset(pKey, 0, sizeof(pKey)); + memset(packetLen, 0, sizeof(packetLen)); + memset(pKeySched, 0, sizeof(pKeySched)); + + if (!numVectors) { + printf("No Snow3G test vectors found !\n"); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + + size = IMB_SNOW3G_KEY_SCHED_SIZE(mb_mgr); + if (!size) { + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + + /* Create test Data for num Packets*/ + for (i = 0; i < numPackets; i++) { + /*vectors are in bits used to round up to bytes*/ + length = testVectors[0].dataLenInBytes[i]; + + packetLen[i] = length; + bitLens[i] = length * 8; + bitOffsets[i] = 0; + + pKey[i] = malloc(keyLen); + if (!pKey[i]) { + printf("malloc(pKey[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + pKeySched[i] = malloc(size); + if (!pKeySched[i]) { + printf("malloc(pKeySched[%d]): failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + pSrcBuff[i] = malloc(length); + if (!pSrcBuff[i]) { + printf("malloc(pSrcBuff[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + pDstBuff[i] = malloc(length); + if (!pDstBuff[i]) { + printf("malloc(pDstBuff[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + pSrcBuff_const[i] = malloc(length); + if (!pSrcBuff_const[i]) { + printf("malloc(pSrcBuff_const[%d]): failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + pDstBuff_const[i] = malloc(length); + if (!pDstBuff_const[i]) { + printf("malloc(pDstBuff_const[%d]): failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + pIV[i] = malloc(SNOW3G_IV_LEN_IN_BYTES); + if (!pIV[i]) { + printf("malloc(pIV[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + + memcpy(pKey[i], testVectors[0].key[i], + testVectors[0].keyLenInBytes); + + memset(pSrcBuff[i], 0, length); + memcpy(pSrcBuff_const[i], testVectors[0].plaintext[i], length); + + memset(pDstBuff[i], 0, length); + memcpy(pDstBuff_const[i], testVectors[0].ciphertext[i], length); + + memcpy(pIV[i], testVectors[0].iv[i], + testVectors[0].ivLenInBytes); + + /* init key shed */ + if (IMB_SNOW3G_INIT_KEY_SCHED(mb_mgr, pKey[i], pKeySched[i])) { + printf("IMB_SNOW3G_INIT_KEY_SCHED(mb_mgr) error\n"); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + } + + for (i = 0; i < numPackets; i++) { + const char *fn_name = job_api ? + "submit_uea2_jobs" : "IMB_SNOW3G_F8_N_BUFFER"; + + for (j = 0; j < i; j++) { + /* Cleanup previous values */ + memset(pSrcBuff[j], 0, length); + memset(pDstBuff[j], 0, length); + } + + /*Test the encrypt*/ + if (job_api) { + submit_uea2_jobs(mb_mgr, (uint8_t **)&pKeySched, pIV, + pSrcBuff_const, pDstBuff, bitLens, + bitOffsets, IMB_DIR_ENCRYPT, i + 1); + } else { + IMB_SNOW3G_F8_N_BUFFER(mb_mgr, *pKeySched, + (const void *const *)pIV, + (const void *const *) + pSrcBuff_const, + (void **)pDstBuff, packetLen, + i + 1); + if (pDstBuff[0] == NULL) { + printf("N buffer failure\n"); + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + } + + /*Compare the data in the pDstBuff with the cipher pattern*/ + for (j = 0; j < i; j++) { + if (memcmp(pDstBuff_const[j], pDstBuff[j], + packetLen[j]) != 0) { + printf("%s(Enc) %s nb_packets:%d vector:%d\n", + fn_name, __func__, i, j); + snow3g_hexdump("Actual:", pDstBuff[j], + packetLen[j]); + snow3g_hexdump("Expected:", pDstBuff_const[j], + packetLen[j]); + status = -1; + } + } + + /*Test the Decrypt*/ + if (job_api) { + submit_uea2_jobs(mb_mgr, (uint8_t **)&pKeySched, pIV, + pDstBuff_const, pSrcBuff, bitLens, + bitOffsets, IMB_DIR_DECRYPT, i + 1); + } else { + IMB_SNOW3G_F8_N_BUFFER(mb_mgr, *pKeySched, + (const void *const *)pIV, + (const void *const *) + pDstBuff_const, + (void **)pSrcBuff, packetLen, + i + 1); + if (pSrcBuff[0] == NULL) { + status = -1; + goto snow3g_f8_n_buffer_linear_exit; + } + } + + /*Compare the data in the pSrcBuff with the pDstBuff*/ + for (j = 0; j < i; j++) { + if (memcmp(pSrcBuff[j], pSrcBuff_const[j], + packetLen[j]) != 0) { + printf("%s(Dec) %s nb_packets:%d vector:%d\n", + fn_name, __func__, i, j); + snow3g_hexdump("Actual:", pSrcBuff[j], + packetLen[j]); + snow3g_hexdump("Expected:", pSrcBuff_const[j], + packetLen[j]); + status = -1; + } + } + } + +snow3g_f8_n_buffer_linear_exit: + for (i = 0; i < numPackets; i++) { + if (pKey[i] != NULL) + free(pKey[i]); + if (pKeySched[i] != NULL) + free(pKeySched[i]); + if (pSrcBuff[i] != NULL) + free(pSrcBuff[i]); + if (pDstBuff[i] != NULL) + free(pDstBuff[i]); + if (pSrcBuff_const[i] != NULL) + free(pSrcBuff_const[i]); + if (pDstBuff_const[i] != NULL) + free(pDstBuff_const[i]); + if (pIV[i] != NULL) + free(pIV[i]); + } + + if (status < 0) + test_suite_update(uea2_ctx, 0, 1); + else + test_suite_update(uea2_ctx, 1, 0); +} + +static void +validate_snow3g_f8_n_blocks_linear_mkeys(struct IMB_MGR *mb_mgr, + uint32_t job_api, + struct test_suite_context *uea2_ctx, + struct test_suite_context *uia2_ctx) +{ + int length, numVectors, i, j; + size_t size = 0; + cipher_test_vector_t *testVectors = snow3g_cipher_test_vectors[1]; + /* snow3g f8 random test vectors are located at index 1 */ + numVectors = numSnow3gCipherTestVectors[1]; + + snow3g_key_schedule_t **pKeySched; + uint8_t **pKey; + uint8_t **pSrcBuff; + uint8_t **pDstBuff; + uint8_t **pSrcBuff_const; + uint8_t **pDstBuff_const; + uint8_t **pIV; + uint32_t *packetLen; + uint32_t *bitOffsets; + uint32_t *bitLens; + int status = 0; + + (void)uia2_ctx; +#ifdef DEBUG + printf("Testing IMB_SNOW3G_F8_N_BUFFER_MULTI for usecase %s: (%s):\n", + __func__, job_api ? "Job API" : "Direct API"); +#endif + + pSrcBuff = malloc(sizeof(*pSrcBuff) * numVectors); + pSrcBuff_const = malloc(sizeof(*pSrcBuff_const) * numVectors); + pDstBuff = malloc(sizeof(*pDstBuff) * numVectors); + pDstBuff_const = malloc(sizeof(*pDstBuff_const) * numVectors); + pIV = malloc(sizeof(*pIV) * numVectors); + pKey = malloc(sizeof(*pKey) * numVectors); + pKeySched = malloc(sizeof(*pKeySched) * numVectors); + packetLen = malloc(sizeof(*packetLen) * numVectors); + bitOffsets = malloc(sizeof(*bitOffsets) * numVectors); + bitLens = malloc(sizeof(*bitLens) * numVectors); + + if (!pSrcBuff || !pSrcBuff_const || !pDstBuff || + !pDstBuff_const || !pIV || !pKey || !pKeySched || + !packetLen || !bitOffsets || !bitLens) + goto snow3g_f8_n_buff_linear_mkey_early_exit; + + memset(pSrcBuff, 0, sizeof(*pSrcBuff) * numVectors); + memset(pSrcBuff_const, 0, sizeof(*pSrcBuff_const) * numVectors); + memset(pDstBuff, 0, sizeof(*pDstBuff) * numVectors); + memset(pDstBuff_const, 0, sizeof(*pDstBuff_const) * numVectors); + memset(pIV, 0, sizeof(*pIV) * numVectors); + memset(pKey, 0, sizeof(*pKey) * numVectors); + memset(pKeySched, 0, sizeof(*pKeySched) * numVectors); + memset(packetLen, 0, sizeof(*packetLen) * numVectors); + memset(bitOffsets, 0, sizeof(*bitOffsets) * numVectors); + memset(bitLens, 0, sizeof(*bitLens) * numVectors); + + if (!numVectors) { + printf("No Snow3G test vectors found !\n"); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + + size = IMB_SNOW3G_KEY_SCHED_SIZE(mb_mgr); + if (!size) { + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + + /* Create test Data for num Vectors */ + for (i = 0; i < numVectors; i++) { + /*vectors are in bits used to round up to bytes*/ + length = testVectors[i].dataLenInBytes; + + packetLen[i] = length; + bitLens[i] = length * 8; + bitOffsets[i] = 0; + + pKey[i] = malloc(testVectors[i].keyLenInBytes); + if (!pKey[i]) { + printf("malloc(pKey[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + pKeySched[i] = malloc(size); + if (!pKeySched[i]) { + printf("malloc(pKeySched[%d]): failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + pSrcBuff[i] = malloc(length); + if (!pSrcBuff[i]) { + printf("malloc(pSrcBuff[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + pDstBuff[i] = malloc(length); + if (!pDstBuff[i]) { + printf("malloc(pDstBuff[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + pSrcBuff_const[i] = malloc(length); + if (!pSrcBuff_const[i]) { + printf("malloc(pSrcBuff_const[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + pDstBuff_const[i] = malloc(length); + if (!pDstBuff_const[i]) { + printf("malloc(pDstBuff_const[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + pIV[i] = malloc(testVectors[i].ivLenInBytes); + if (!pIV[i]) { + printf("malloc(pIV[%d]):failed !\n", i); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + + memcpy(pKey[i], testVectors[i].key, + testVectors[i].keyLenInBytes); + + memset(pSrcBuff[i], 0, length); + memcpy(pSrcBuff_const[i], testVectors[i].plaintext, length); + + memset(pDstBuff[i], 0, length); + memcpy(pDstBuff_const[i], testVectors[i].ciphertext, length); + + memcpy(pIV[i], testVectors[i].iv, testVectors[i].ivLenInBytes); + + /* init key shed */ + if (IMB_SNOW3G_INIT_KEY_SCHED(mb_mgr, pKey[i], pKeySched[i])) { + printf("IMB_SNOW3G_INIT_KEY_SCHED(mb_mgr) error\n"); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + } + + for (i = 0; i < numVectors; i++) { + int nb_elem, nb_remain_elem = i + 1, idx = 0; + const char *fn_name = job_api ? + "submit_uea2_jobs" : "IMB_SNOW3G_F8_N_BUFFER_MULTIKEY"; + + for (j = 0; j <= i; j++) { + /* Cleanup previous values */ + memset(pSrcBuff[j], 0, packetLen[j]); + memset(pDstBuff[j], 0, packetLen[j]); + } + + /*Test the encrypt*/ + while (nb_remain_elem > 0) { + if (nb_remain_elem >= NUM_SUPPORTED_BUFFERS) { + nb_elem = NUM_SUPPORTED_BUFFERS; + nb_remain_elem -= NUM_SUPPORTED_BUFFERS; + } else { + nb_elem = nb_remain_elem; + nb_remain_elem = 0; + } + + if (job_api) { + submit_uea2_jobs(mb_mgr, + (uint8_t **)&pKeySched[idx], + &pIV[idx], + &pSrcBuff_const[idx], + &pDstBuff[idx], &bitLens[idx], + &bitOffsets[idx], + IMB_DIR_ENCRYPT, nb_elem); + } else { + IMB_SNOW3G_F8_N_BUFFER_MULTIKEY( + mb_mgr, + (const snow3g_key_schedule_t *const *) + &pKeySched[idx], + (const void *const *)&pIV[idx], + (const void *const *)&pSrcBuff_const[idx], + (void **)&pDstBuff[idx], &packetLen[idx], + nb_elem); + if (pDstBuff[idx] == NULL) { + printf("N buffer failure\n"); + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + } + + if (nb_elem == NUM_SUPPORTED_BUFFERS) + idx += NUM_SUPPORTED_BUFFERS; + } + + /*Compare the data in the pDstBuff with the cipher pattern*/ + for (j = 0; j <= i; j++) { + if (memcmp(pDstBuff_const[j], pDstBuff[j], + packetLen[j]) != 0) { + printf("%s(Enc) %s nb_packets:%d vector: %d\n", + fn_name, __func__, i, j); + snow3g_hexdump("Actual:", pDstBuff[j], + packetLen[j]); + snow3g_hexdump("Expected:", pDstBuff_const[j], + packetLen[j]); + status = -1; + } + } + + nb_remain_elem = i + 1; + idx = 0; + while (nb_remain_elem > 0) { + if (nb_remain_elem >= NUM_SUPPORTED_BUFFERS) { + nb_elem = NUM_SUPPORTED_BUFFERS; + nb_remain_elem -= NUM_SUPPORTED_BUFFERS; + } else { + nb_elem = nb_remain_elem; + nb_remain_elem = 0; + } + /*Test the Decrypt*/ + if (job_api) { + submit_uea2_jobs(mb_mgr, + (uint8_t **)&pKeySched[idx], + &pIV[idx], + &pDstBuff_const[idx], + &pSrcBuff[idx], &bitLens[idx], + &bitOffsets[idx], + IMB_DIR_DECRYPT, nb_elem); + } else { + IMB_SNOW3G_F8_N_BUFFER_MULTIKEY( + mb_mgr, + (const snow3g_key_schedule_t *const *) + &pKeySched[idx], + (const void *const *)&pIV[idx], + (const void *const *)&pDstBuff_const[idx], + (void **)&pSrcBuff[idx], &packetLen[idx], + nb_elem); + if (pSrcBuff[idx] == NULL) { + status = -1; + goto snow3g_f8_n_buff_linear_mkey_exit; + } + } + + if (nb_elem == NUM_SUPPORTED_BUFFERS) + idx += NUM_SUPPORTED_BUFFERS; + } + /*Compare the data in the pSrcBuff with the pDstBuff*/ + for (j = 0; j <= i; j++) { + if (memcmp(pSrcBuff[j], pSrcBuff_const[j], + packetLen[j]) != 0) { + printf("%s(Dec) %s nb_packets:%d vector: %d\n", + fn_name, __func__, i, j); + snow3g_hexdump("Actual:", pSrcBuff[j], + packetLen[j]); + snow3g_hexdump("Expected:", pSrcBuff_const[j], + packetLen[j]); + status = -1; + } + } + } + +snow3g_f8_n_buff_linear_mkey_exit: + for (i = 0; i < numVectors; i++) { + if (pKey[i] != NULL) + free(pKey[i]); + if (pKeySched[i] != NULL) + free(pKeySched[i]); + if (pSrcBuff[i] != NULL) + free(pSrcBuff[i]); + if (pDstBuff[i] != NULL) + free(pDstBuff[i]); + if (pSrcBuff_const[i] != NULL) + free(pSrcBuff_const[i]); + if (pDstBuff_const[i] != NULL) + free(pDstBuff_const[i]); + if (pIV[i] != NULL) + free(pIV[i]); + } +snow3g_f8_n_buff_linear_mkey_early_exit: + if (pKey != NULL) + free(pKey); + if (pSrcBuff != NULL) + free(pSrcBuff); + if (pSrcBuff_const != NULL) + free(pSrcBuff_const); + if (pDstBuff != NULL) + free(pDstBuff); + if (pDstBuff_const != NULL) + free(pDstBuff_const); + if (pIV != NULL) + free(pIV); + if (packetLen != NULL) + free(packetLen); + if (pKeySched != NULL) + free(pKeySched); + if (bitOffsets != NULL) + free(bitOffsets); + if (bitLens != NULL) + free(bitLens); + + if (status < 0) + test_suite_update(uea2_ctx, 0, 1); + else + test_suite_update(uea2_ctx, 1, 0); +} + static void validate_snow3g_f8_n_blocks_multi(struct IMB_MGR *mb_mgr, uint32_t job_api, diff --git a/test/snow3g_test_vectors.h b/test/snow3g_test_vectors.h index bd44431943a9103cca9f472ded46528d96ac27b2..a95a9e6b423458c0084cc11ded30cc874369dd0c 100644 --- a/test/snow3g_test_vectors.h +++ b/test/snow3g_test_vectors.h @@ -1,5 +1,6 @@ /***************************************************************************** Copyright (c) 2009-2022, Intel Corporation + Copyright (c) 2022, Nokia Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: @@ -56,6 +57,15 @@ typedef struct cipher_test_vector_s { uint8_t key[MAX_KEY_LEN]; uint8_t iv[MAX_IV_LEN]; } cipher_test_vector_t; +typedef struct cipher_test_linear_vector_s { + uint32_t dataLenInBytes[NUM_SUPPORTED_BUFFERS]; + uint32_t keyLenInBytes; + uint32_t ivLenInBytes; + uint8_t plaintext[NUM_SUPPORTED_BUFFERS][MAX_DATA_LEN]; + uint8_t ciphertext[NUM_SUPPORTED_BUFFERS][MAX_DATA_LEN]; + uint8_t key[NUM_SUPPORTED_BUFFERS][MAX_KEY_LEN]; + uint8_t iv[NUM_SUPPORTED_BUFFERS][MAX_IV_LEN]; +} cipher_test_linear_vector_t; typedef struct cipherbit_test_vector_s { uint32_t dataLenInBits; uint32_t keyLenInBytes; @@ -254,6 +264,176 @@ static cipherbit_test_linear_vector_t snow3g_f8_linear_bitvectors = { } }; +static cipher_test_linear_vector_t snow3g_f8_linear[] = { + { + /*dataLenInBytes*/ + { 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32 }, + /*keyLenInBytes*/ + 16, + /*ivLenInBytes*/ + 16, + { /*plaintext linear bit bit buffer*/ + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 } }, + { /*ciphertext linear bit buffer*/ + { 0xea, 0xe9, 0x11, 0x49, 0x21, 0x58, 0x44, 0x59, 0xc6, 0xec, 0x77, + 0x82, 0x71, 0x6f, 0x91, 0xd4, 0xe1, 0xf6, 0xbf, 0xfa, 0x45, 0xfc, + 0x80, 0x94, 0xc2, 0x3d, 0x5c, 0x5f, 0x17, 0x72, 0x44, 0x92 }, + { 0xe3, 0x96, 0x7f, 0x63, 0x88, 0x74, 0x2d, 0x7c, 0x3b, 0x3d, 0x5a, + 0x08, 0xc0, 0x8d, 0x1a, 0xc2, 0xa1, 0xf2, 0x38, 0x96, 0xc7, 0x41, + 0x1b, 0xc1, 0xde, 0xac, 0x24, 0x57, 0x9c, 0x45, 0x0d, 0xb0 }, + { 0x4d, 0x50, 0xd2, 0xe3, 0xa6, 0xe7, 0x97, 0xee, 0xf3, 0xe6, 0x2a, + 0x32, 0xf3, 0x66, 0xea, 0x99, 0xbc, 0x54, 0x24, 0xa6, 0x3f, 0xc3, + 0xbb, 0x27, 0xc7, 0xba, 0x42, 0xd8, 0x2f, 0x86, 0xfc, 0xdf }, + { 0x4a, 0x98, 0xa3, 0x04, 0xad, 0xcf, 0xb9, 0xfa, 0xdb, 0x3b, 0xe7, + 0x7c, 0xd1, 0x69, 0x59, 0x6c, 0x7e, 0x44, 0x73, 0x8d, 0x96, 0xd0, + 0x51, 0x1f, 0x55, 0x32, 0x55, 0x59, 0xc2, 0xe9, 0x0a, 0x21 }, + { 0xe4, 0xf5, 0x71, 0x42, 0xaf, 0x93, 0x5e, 0x6d, 0x7d, 0xb8, 0x44, + 0xf7, 0xcb, 0x0d, 0x08, 0xd3, 0x3a, 0xe6, 0x20, 0xcd, 0xc5, 0x33, + 0xa3, 0x5a, 0x5e, 0x5a, 0x41, 0x92, 0x96, 0x44, 0x28, 0xc1 }, + { 0x1b, 0x6e, 0x3a, 0x76, 0x18, 0x77, 0x0b, 0x77, 0xf2, 0xda, 0xca, + 0x1d, 0xd2, 0x9c, 0xa9, 0xc2, 0x10, 0x6b, 0xe7, 0x4b, 0xdb, 0x30, + 0x79, 0xc5, 0x56, 0xf4, 0xcb, 0xb7, 0x19, 0xdf, 0xe5, 0xcb }, + { 0x10, 0x4e, 0x19, 0x91, 0x09, 0x91, 0xd0, 0x9d, 0x41, 0x99, 0x33, + 0x1b, 0xfd, 0xa0, 0xe0, 0xf7, 0x80, 0x6c, 0x1f, 0x7b, 0x12, 0x78, + 0x3d, 0x46, 0x65, 0x86, 0x7e, 0xdf, 0x9f, 0xac, 0x31, 0x42 }, + { 0x59, 0x02, 0x9f, 0xe6, 0x98, 0x24, 0xe3, 0xc5, 0x1b, 0x19, 0x61, + 0x08, 0x37, 0x79, 0xd0, 0xfe, 0xe4, 0xa1, 0xd8, 0xd4, 0x04, 0xf2, + 0x43, 0x60, 0xa4, 0x68, 0x08, 0x9b, 0x06, 0x9c, 0xb3, 0x51 }, + { 0x60, 0x59, 0x72, 0x8f, 0xd0, 0x0b, 0x69, 0x67, 0xdf, 0x89, 0xb0, + 0xd1, 0xc3, 0x02, 0x3e, 0xe4, 0x5b, 0xde, 0xe2, 0x0a, 0xea, 0x67, + 0xc6, 0x05, 0x40, 0x4f, 0xe9, 0x38, 0xd4, 0x10, 0x1d, 0x5a }, + { 0x46, 0x63, 0x8d, 0x94, 0x5a, 0xcc, 0x87, 0x2d, 0x57, 0x4f, 0xcb, + 0x55, 0x71, 0xaa, 0x6c, 0x17, 0xcb, 0x7c, 0x7a, 0x6d, 0x19, 0xbf, + 0xba, 0xcd, 0xe1, 0x3c, 0x77, 0xed, 0x0f, 0x10, 0x4f, 0x38 }, + { 0xe5, 0xdd, 0xa1, 0x58, 0xbe, 0x47, 0x6e, 0xa6, 0x3b, 0xea, 0x90, + 0x02, 0xf6, 0x87, 0xbd, 0x69, 0x1b, 0x15, 0xb7, 0x3c, 0x6f, 0xa1, + 0x28, 0xde, 0x0f, 0xf3, 0x80, 0xfb, 0xef, 0x89, 0xa5, 0xd5 }, + { 0xae, 0x68, 0x9e, 0x4e, 0x8d, 0x49, 0x2a, 0x59, 0x5c, 0x02, 0x53, + 0x73, 0x91, 0xb2, 0x92, 0x94, 0xc3, 0x7e, 0x09, 0xae, 0xe3, 0x6d, + 0x3c, 0xea, 0xac, 0x57, 0x34, 0x19, 0x82, 0x56, 0x10, 0x2e }, + { 0xd4, 0x35, 0x4b, 0x73, 0x1b, 0x6e, 0xa3, 0x4c, 0xd5, 0x9b, 0x10, + 0xd0, 0x93, 0x64, 0xc9, 0xdf, 0xe0, 0xe5, 0x2e, 0xa0, 0x8c, 0x2d, + 0x85, 0x38, 0x53, 0xaa, 0x79, 0x92, 0xad, 0xae, 0x36, 0xb3 }, + { 0xdb, 0x9b, 0x66, 0x28, 0xf7, 0x9d, 0x37, 0x32, 0xbe, 0x36, 0x78, + 0x8d, 0xa3, 0xd0, 0xc2, 0x73, 0x68, 0x0d, 0x47, 0xf7, 0x13, 0x8f, + 0x3d, 0x83, 0x2a, 0xcf, 0x5e, 0xe1, 0xb8, 0x6d, 0x86, 0xb8 }, + { 0xd5, 0xbf, 0x80, 0x07, 0x53, 0x33, 0xad, 0x71, 0x9a, 0x05, 0x70, + 0xd9, 0xe7, 0xdd, 0x45, 0xf0, 0xd0, 0x2d, 0xaf, 0xdc, 0xf1, 0x12, + 0x67, 0x8f, 0x46, 0x20, 0xac, 0xb4, 0xd7, 0xf8, 0x98, 0x41 }, + { 0xd5, 0xbf, 0x80, 0x07, 0x53, 0x33, 0xad, 0x71, 0x9a, 0x05, 0x70, + 0xd9, 0xe7, 0xdd, 0x45, 0xf0, 0xd0, 0x2d, 0xaf, 0xdc, 0xf1, 0x12, + 0x67, 0x8f, 0x46, 0x20, 0xac, 0xb4, 0xd7, 0xf8, 0x98, 0x41 } }, + { /*key buffers*/ + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00 } }, + { /* IV buffers*/ + { 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x01, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x02, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x02, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x03, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x03, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x04, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x04, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x05, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x05, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x06, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x06, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x07, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x07, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x08, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x08, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x09, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x09, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x0A, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0A, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x0B, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0B, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x0C, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0C, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x0D, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0D, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x0E, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0E, 0x1C, 0x00, 0x00, 0x00 }, + { 0x00, 0x00, 0x00, 0x0E, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0E, 0x1C, 0x00, 0x00, 0x00 } } } }; + static cipher_test_vector_t snow3g_f8_vectors[] = { {/* SCPM test vector */ /*dataLenInBytes*/ @@ -432,7 +612,254 @@ static cipher_test_vector_t snow3g_f8_vectors[] = { 0x2B, 0xE8, 0xE3, 0x65, 0x66}, /*iv*/ {0x72, 0xA4, 0xF2, 0x0F, 0x48, 0x00, 0x00, 0x00, 0x72, 0xA4, 0xF2, - 0x0F, 0x48, 0x00, 0x00, 0x00} } }; + 0x0F, 0x48, 0x00, 0x00, 0x00} }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xea, 0xe9, 0x11, 0x49, 0x21, 0x58, 0x44, 0x59, 0xc6, 0xec, 0x77, + 0x82, 0x71, 0x6f, 0x91, 0xd4, 0xe1, 0xf6, 0xbf, 0xfa, 0x45, 0xfc, + 0x80, 0x94, 0xc2, 0x3d, 0x5c, 0x5f, 0x17, 0x72, 0x44, 0x92}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xe3, 0x96, 0x7f, 0x63, 0x88, 0x74, 0x2d, 0x7c, 0x3b, 0x3d, 0x5a, + 0x08, 0xc0, 0x8d, 0x1a, 0xc2, 0xa1, 0xf2, 0x38, 0x96, 0xc7, 0x41, + 0x1b, 0xc1, 0xde, 0xac, 0x24, 0x57, 0x9c, 0x45, 0x0d, 0xb0}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x01, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x01, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x4d, 0x50, 0xd2, 0xe3, 0xa6, 0xe7, 0x97, 0xee, 0xf3, 0xe6, 0x2a, + 0x32, 0xf3, 0x66, 0xea, 0x99, 0xbc, 0x54, 0x24, 0xa6, 0x3f, 0xc3, + 0xbb, 0x27, 0xc7, 0xba, 0x42, 0xd8, 0x2f, 0x86, 0xfc, 0xdf}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x02, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x02, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x4a, 0x98, 0xa3, 0x04, 0xad, 0xcf, 0xb9, 0xfa, 0xdb, 0x3b, 0xe7, + 0x7c, 0xd1, 0x69, 0x59, 0x6c, 0x7e, 0x44, 0x73, 0x8d, 0x96, 0xd0, + 0x51, 0x1f, 0x55, 0x32, 0x55, 0x59, 0xc2, 0xe9, 0x0a, 0x21}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x03, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x03, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xe4, 0xf5, 0x71, 0x42, 0xaf, 0x93, 0x5e, 0x6d, 0x7d, 0xb8, 0x44, + 0xf7, 0xcb, 0x0d, 0x08, 0xd3, 0x3a, 0xe6, 0x20, 0xcd, 0xc5, 0x33, + 0xa3, 0x5a, 0x5e, 0x5a, 0x41, 0x92, 0x96, 0x44, 0x28, 0xc1}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x04, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x04, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x1b, 0x6e, 0x3a, 0x76, 0x18, 0x77, 0x0b, 0x77, 0xf2, 0xda, 0xca, + 0x1d, 0xd2, 0x9c, 0xa9, 0xc2, 0x10, 0x6b, 0xe7, 0x4b, 0xdb, 0x30, + 0x79, 0xc5, 0x56, 0xf4, 0xcb, 0xb7, 0x19, 0xdf, 0xe5, 0xcb}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x05, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x05, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x10, 0x4e, 0x19, 0x91, 0x09, 0x91, 0xd0, 0x9d, 0x41, 0x99, 0x33, + 0x1b, 0xfd, 0xa0, 0xe0, 0xf7, 0x80, 0x6c, 0x1f, 0x7b, 0x12, 0x78, + 0x3d, 0x46, 0x65, 0x86, 0x7e, 0xdf, 0x9f, 0xac, 0x31, 0x42}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x06, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x06, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x59, 0x02, 0x9f, 0xe6, 0x98, 0x24, 0xe3, 0xc5, 0x1b, 0x19, 0x61, + 0x08, 0x37, 0x79, 0xd0, 0xfe, 0xe4, 0xa1, 0xd8, 0xd4, 0x04, 0xf2, + 0x43, 0x60, 0xa4, 0x68, 0x08, 0x9b, 0x06, 0x9c, 0xb3, 0x51}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x07, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x07, + 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x60, 0x59, 0x72, 0x8f, 0xd0, 0x0b, 0x69, 0x67, 0xdf, 0x89, 0xb0, + 0xd1, 0xc3, 0x02, 0x3e, 0xe4, 0x5b, 0xde, 0xe2, 0x0a, 0xea, 0x67, + 0xc6, 0x05, 0x40, 0x4f, 0xe9, 0x38, 0xd4, 0x10, 0x1d, 0x5a}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x08, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x08, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x46, 0x63, 0x8d, 0x94, 0x5a, 0xcc, 0x87, 0x2d, 0x57, 0x4f, 0xcb, + 0x55, 0x71, 0xaa, 0x6c, 0x17, 0xcb, 0x7c, 0x7a, 0x6d, 0x19, 0xbf, + 0xba, 0xcd, 0xe1, 0x3c, 0x77, 0xed, 0x0f, 0x10, 0x4f, 0x38}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x09, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x09, + 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xe5, 0xdd, 0xa1, 0x58, 0xbe, 0x47, 0x6e, 0xa6, 0x3b, 0xea, 0x90, + 0x02, 0xf6, 0x87, 0xbd, 0x69, 0x1b, 0x15, 0xb7, 0x3c, 0x6f, 0xa1, + 0x28, 0xde, 0x0f, 0xf3, 0x80, 0xfb, 0xef, 0x89, 0xa5, 0xd5}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x0A, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0A, + 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xae, 0x68, 0x9e, 0x4e, 0x8d, 0x49, 0x2a, 0x59, 0x5c, 0x02, 0x53, + 0x73, 0x91, 0xb2, 0x92, 0x94, 0xc3, 0x7e, 0x09, 0xae, 0xe3, 0x6d, + 0x3c, 0xea, 0xac, 0x57, 0x34, 0x19, 0x82, 0x56, 0x10, 0x2e}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x0B, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0B, 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xd4, 0x35, 0x4b, 0x73, 0x1b, 0x6e, 0xa3, 0x4c, 0xd5, 0x9b, 0x10, + 0xd0, 0x93, 0x64, 0xc9, 0xdf, 0xe0, 0xe5, 0x2e, 0xa0, 0x8c, 0x2d, + 0x85, 0x38, 0x53, 0xaa, 0x79, 0x92, 0xad, 0xae, 0x36, 0xb3}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x0C, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0C, + 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xdb, 0x9b, 0x66, 0x28, 0xf7, 0x9d, 0x37, 0x32, 0xbe, 0x36, 0x78, + 0x8d, 0xa3, 0xd0, 0xc2, 0x73, 0x68, 0x0d, 0x47, 0xf7, 0x13, 0x8f, + 0x3d, 0x83, 0x2a, 0xcf, 0x5e, 0xe1, 0xb8, 0x6d, 0x86, 0xb8}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x0D, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0D, + 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0xd5, 0xbf, 0x80, 0x07, 0x53, 0x33, 0xad, 0x71, 0x9a, 0x05, 0x70, + 0xd9, 0xe7, 0xdd, 0x45, 0xf0, 0xd0, 0x2d, 0xaf, 0xdc, 0xf1, 0x12, + 0x67, 0x8f, 0x46, 0x20, 0xac, 0xb4, 0xd7, 0xf8, 0x98, 0x41}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x0E, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0E, + 0x1C, 0x00, 0x00, 0x00} + }, + { + 32, + 16, + 16, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x62, 0x45, 0xff, 0xfa, 0x89, 0x47, 0x18, 0x63, 0x28, 0x4e, 0xd5, + 0xf2, 0x94, 0xb0, 0x54, 0x43, 0xd5, 0xae, 0xb5, 0x68, 0x2f, 0xf6, + 0x7c, 0x81, 0xe3, 0xc5, 0x81, 0x32, 0x0e, 0x59, 0xc5, 0x60}, + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00}, + {0x00, 0x00, 0x00, 0x0F, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x0F, 0x1C, 0x00, 0x00, 0x00} + } +}; static hash_test_vector_t snow_f9_vectors[] = { { @@ -794,6 +1221,15 @@ uint32_t numSnow3gCipherTestVectors[] = { sizeof(snow3g_f8_linear_bitvectors) / sizeof(cipherbit_test_vector_t) }; +cipher_test_linear_vector_t *snow3g_cipher_test_vectors_linear[] = { + snow3g_f8_linear, snow3g_f8_linear +}; + +uint32_t numSnow3gCipherTestVectorsLinear[] = { + sizeof(snow3g_f8_linear) / sizeof(cipher_test_linear_vector_t), + sizeof(snow3g_f8_linear) / sizeof(cipher_test_linear_vector_t) +}; + hash_test_vector_t *snow3g_hash_test_vectors[] = { snow_f9_vectors, snow_f9_vectors, snow_f9_vectors }; diff --git a/test/test_api.py b/test/test_api.py index e4376befba6f47db9e2ddce97f5c5cdb8ca38b67..005e8b04f3d012face25434aca3682b9949a172d 100755 --- a/test/test_api.py +++ b/test/test_api.py @@ -72,7 +72,7 @@ fixed_end_main = """test_suite_update(&ts, run - errors, errors); def ERR(err_str): print("ERROR: {}".format(err_str)) -# This serches for patterns in header file and returns dict with results +# This searches for patterns in header file and returns dict with results # Functions works only for 3 args exactly # lines : list of all lines from LIB_HEADER # keyword : position of keyword in pattern (0/1/2) @@ -399,7 +399,7 @@ if __name__ == "__main__": function_list.append((name, f_type)) # -------------------------------------------------------------------------- - # Match arg types with arg names and args with ivalid values + # Match arg types with arg names and args with invalid values full_data = prep_func_arg_type_matches(function_list, defines, prototypes) test_cases = assign_errors_to_inv_parameters_by_arg_name(full_data) diff --git a/test/utils.c b/test/utils.c index 46912677f37abd60ff2f3739955e193b0337fc30..a894d81f58949af64fcbfd5f6b9ebf8bc9a25991 100644 --- a/test/utils.c +++ b/test/utils.c @@ -191,6 +191,10 @@ update_flags_and_archs(const char *arg, *flags &= (~IMB_FLAG_SHANI_OFF); else if (strcmp(arg, "--shani-off") == 0) *flags |= IMB_FLAG_SHANI_OFF; + else if (strcmp(arg, "--gfni-on") == 0) + *flags &= (~IMB_FLAG_GFNI_OFF); + else if (strcmp(arg, "--gfni-off") == 0) + *flags |= IMB_FLAG_GFNI_OFF; else match = 0; return match; diff --git a/test/utils.h b/test/utils.h index f3727002743fc5eda2102fca889defb43bc19ebe..bd71380c340f44321f99c1156d3c926a13d7facd 100644 --- a/test/utils.h +++ b/test/utils.h @@ -32,6 +32,7 @@ #include #define DIM(_x) (sizeof(_x)/sizeof(_x[0])) +#define DIV_ROUND_UP(x, y) ((x + y - 1) / y) void hexdump(FILE *fp, const char *msg, const void *p, size_t len); void hexdump_ex(FILE *fp, const char *msg, const void *p, size_t len, diff --git a/test/win_x64.mak b/test/win_x64.mak index 1c3ce01b67f0c4fb8ec72c07240c3fd1900baa1c..896f9c49a36858cc921421cef19a36134c642d1e 100644 --- a/test/win_x64.mak +++ b/test/win_x64.mak @@ -59,13 +59,7 @@ DLFLAGS = # compiler CC = cl -# check for CET support -!if ([$(CC) /? 2>&1 | findstr /C:"guard:cf" > nul] == 0) -DCFLAGS = $(DCFLAGS) /guard:cf -DLFLAGS = $(DLFLAGS) /CETCOMPAT /GUARD:CF /DYNAMICBASE -!endif - -# _CRT_SECURE_NO_WARNINGS disables warning C4996 about unsecure snprintf() being used +# _CRT_SECURE_NO_WARNINGS disables warning C4996 about insecure snprintf() being used CFLAGS = /nologo /DNO_COMPAT_IMB_API_053 /D_CRT_SECURE_NO_WARNINGS $(DCFLAGS) /Y- /W3 /WX- /Gm- /fp:precise /EHsc $(EXTRA_CFLAGS) $(INCDIR) #linker diff --git a/test/wycheproof/Makefile b/test/wycheproof/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..ef9be7ab4194eec53dee2f66737874cad7feb6de --- /dev/null +++ b/test/wycheproof/Makefile @@ -0,0 +1,128 @@ +# +# Copyright (c) 2022, Intel Corporation +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Intel Corporation nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# + +APP := wycheproof +INSTPATH ?= /usr/include/ipsec-mb.h +LIB_DIR ?= ../../lib + +MINGW ?= $(shell $(CC) -dM -E - < /dev/null | grep -i mingw | wc -l | sed 's/^ *//') + +CFLAGS = -MMD -D_GNU_SOURCE -DNO_COMPAT_IMB_API_053 \ + -W -Wall -Wextra -Wmissing-declarations -Wpointer-arith \ + -Wcast-qual -Wundef -Wwrite-strings \ + -Wformat -Wformat-security \ + -Wunreachable-code -Wmissing-noreturn -Wsign-compare -Wno-endif-labels \ + -Wstrict-prototypes -Wmissing-prototypes -Wold-style-definition \ + -fno-delete-null-pointer-checks -fwrapv + +# -fno-strict-overflow is not supported by clang +ifneq ($(CC),clang) +CFLAGS += -fno-strict-overflow +endif + +# if "-z ibt" is supported then assume "-z shstk, -z cet-report=error" are also supported +# "-fcf-protection" needs to be checked separately +ifeq ($(MINGW),0) +CC_HAS_CET = $(and $(shell $(CC) --target-help 2> /dev/null | grep -m1 -e "-z ibt" | wc -l), \ + $(shell $(CC) --help=common 2> /dev/null | grep -m1 -e "-fcf-protection" | wc -l)) +CET_LDFLAGS=-r -z ibt -z shstk +endif + +ifeq ($(CC_HAS_CET),1) +CFLAGS += -fcf-protection=full +endif + +ifeq ($(MINGW),0) +LDFLAGS = -fPIE -z noexecstack -z relro -z now +else +LDFLAGS = -fPIE +endif + +ifeq ($(CC_HAS_CET),1) +LDFLAGS += -fcf-protection=full -Wl,-z,ibt -Wl,-z,shstk -Wl,-z,cet-report=error +endif + +LDLIBS = -lIPSec_MB + +ifeq ("$(shell test -r $(INSTPATH) && echo -n yes)","yes") +# library installed +CFLAGS += +else +# library not installed +CFLAGS += -I../../lib +LDFLAGS += -L$(LIB_DIR) +endif + +DEBUG_OPT ?= -O0 +ifeq ($(DEBUG),y) +CFLAGS += $(DEBUG_OPT) -DDEBUG -g +LDFLAGS += -g +else +ifeq ($(MINGW),0) +CFLAGS += -O3 +else +CFLAGS += -O2 +endif +endif + +OBJ_FILES = aes_gcm_test.json.o aes_ccm_test.json.o \ + chacha20_poly1305_test.json.o \ + aes_cmac_test.json.o gmac_test.json.o gmac_test.json.o \ + hmac_sha1_test.json.o hmac_sha224_test.json.o hmac_sha256_test.json.o \ + hmac_sha384_test.json.o hmac_sha512_test.json.o \ + wycheproof.o + +all: $(APP) + +$(APP): $(OBJ_FILES) + +.PHONY: clean +clean: + -rm -f $(OBJ_FILES) *.d $(APP) + +# style check section +CHECKPATCH?=checkpatch.pl +CHECKPATCH_FLAGS = --no-tree --no-signoff --emacs --no-color --ignore CODE_INDENT,INITIALISED_STATIC,LEADING_SPACE,SPLIT_STRING,UNSPECIFIED_INT,ARRAY_SIZE,BLOCK_COMMENT_STYLE,GLOBAL_INITIALISERS,AVOID_EXTERNS,COMPLEX_MACRO,USE_FUNC,CONSTANT_COMPARISON,MISSING_SPACE,NEW_TYPEDEFS + +%.c_style_check : %.c + $(CHECKPATCH) $(CHECKPATCH_FLAGS) -f $< + +%.h_style_check : %.h + $(CHECKPATCH) $(CHECKPATCH_FLAGS) -f $< + +SOURCES_ALL := $(wildcard *.[ch]) +SOURCES_STYLE := $(foreach infile,$(SOURCES_ALL),$(infile)_style_check) + +.PHONY: style +style: $(SOURCES_STYLE) + +# if target not clean or rinse then make dependencies +ifneq ($(MAKECMDGOALS),clean) +ifneq ($(MAKECMDGOALS),style) +-include $(wildcard *.d) +endif +endif diff --git a/test/wycheproof/aead_test.h b/test/wycheproof/aead_test.h new file mode 100644 index 0000000000000000000000000000000000000000..dd2f45d563f133c33974eab6ce4f6de04ea9bc0a --- /dev/null +++ b/test/wycheproof/aead_test.h @@ -0,0 +1,50 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef AEAD_TEST_H +#define AEAD_TEST_H + +#include +#include + +struct aead_test { + size_t ivSize; /* bits */ + size_t keySize; /* bits */ + size_t tagSize; /* bits */ + size_t tcId; + const char *key; + const char *iv; + const char *aad; + const char *msg; + const char *ct; + const char *tag; + int resultValid; + size_t aadSize; /* bits */ + size_t msgSize; /* bits */ +}; + +#endif /* AEAD_TEST_H */ diff --git a/test/wycheproof/aes_ccm_test.json.c b/test/wycheproof/aes_ccm_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..da8f1791939b3ad8bec1967617b9d19cdb4a1113 --- /dev/null +++ b/test/wycheproof/aes_ccm_test.json.c @@ -0,0 +1,4168 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* AES-CCM, 0.8r12 */ +#include "aead_test.h" +const struct aead_test aes_ccm_test_json[] = { + { 96, 128, 128, 1, + "\xbe\xdc\xfb\x5a\x01\x1e\xbc\x84\x60\x0f\xcb\x29\x6c\x15\xaf\x0d", + "\x43\x8a\x54\x7a\x94\xea\x88\xdc\xe4\x6c\x6c\x85", "", "", "", + "\x25\xd1\xa3\x84\x95\xa7\xde\xa4\x5b\xda\x04\x97\x05\x62\x7d\x10", 1, + 0, 0 }, + { 96, 128, 128, 2, + "\x38\x4e\xa4\x16\xac\x3c\x2f\x51\xa7\x6e\x7d\x82\x26\x34\x6d\x4e", + "\xb3\x0c\x08\x47\x27\xad\x1c\x59\x2a\xc2\x1d\x12", "", "\x35", + "\xd7", + "\x6b\xe3\xfd\x13\xb7\x06\x5a\xfc\x19\xe3\xb8\xa3\xb9\x6b\x39\xfb", 1, + 0, 8 }, + { 96, 128, 128, 3, + "\xca\xe3\x1c\xd9\xf5\x55\x26\xeb\x03\x82\x41\xfc\x44\xca\xc1\xe5", + "\xb5\xe0\x06\xde\xd5\x53\x11\x0e\x6d\xc5\x65\x29", "", + "\xd1\x09\x89\xf2\xc5\x2e\x94\xad", + "\xe6\x4d\x0b\x64\xeb\xb3\x81\xec", + "\x25\x40\x9c\x79\x5d\x49\x1d\x80\x4e\x58\x39\x17\x22\x7b\x73\xc7", 1, + 0, 64 }, + { 96, 128, 128, 4, + "\xff\xdf\x42\x28\x36\x1e\xa1\xf8\x16\x58\x52\x13\x6b\x34\x80\xf7", + "\x0e\x16\x66\xf2\xdc\x65\x2f\x77\x08\xfb\x8f\x0d", "", + "\x25\xb1\x2e\x28\xac\x0e\xf6\xea\xd0\x22\x6a\x3b\x22\x88\xc8\x00", + "\xaa\xf5\x96\xfa\x5b\x00\xaa\xac\x27\x70\x01\x46\xae\xc9\x32\xa9", + "\x84\x8b\x67\x35\xd3\x2c\x96\xe4\xa0\x53\x2b\xcd\xfa\xf3\x35\x82", 1, + 0, 128 }, + { 96, 128, 128, 5, + "\xc1\x5e\xd2\x27\xdd\x2e\x23\x7e\xcd\x08\x7e\xaa\xaa\xd1\x9e\xa4", + "\x96\x5f\xf6\x64\x31\x16\xac\x14\x43\xa2\xde\xc7", "", + "\xfe\xe6\x2f\xde\x97\x3f\xe0\x25\xad\x6b\x32\x2d\xcd\xf3\xc6\x3f" + "\xc7", + "\x03\x33\xdf\x2a\x86\xd7\xf0\x94\xdd\x8b\xce\x75\xda\x6c\x38\xc5" + "\xc1", + "\x41\x7d\xa2\x9d\xf8\x5a\x1d\x13\x4f\xee\xe8\xaa\x35\x56\x90\x81", 1, + 0, 136 }, + { 96, 128, 128, 6, + "\xa8\xee\x11\xb2\x6d\x7c\xeb\x7f\x17\xea\xa1\xe4\xb8\x3a\x2c\xf6", + "\xfb\xbc\x04\xfd\x6e\x02\x5b\x71\x93\xeb\x57\xf6", "", + "\xc0\x8f\x08\x5e\x6a\x9e\x0e\xf3\x63\x62\x80\xc1\x1e\xcf\xad\xf0\xc1" + "\xe7\x29\x19\xff\xc1\x7e\xaf", + "\xfc\xaa\xa3\x8f\xed\xa3\xac\xa9\x75\xac\x76\x55\x3c\x3e\x7e\xf3\x6b" + "\x88\x7a\x8c\x4d\x82\x41\xf9", + "\xc2\xc6\xdc\xae\xae\xb9\xf3\x8a\x3a\x42\xd2\xf4\xe8\xa1\x7d\xe4", 1, + 0, 192 }, + { 96, 128, 128, 7, + "\x16\x55\xbf\x66\x2f\x7e\xe6\x85\x61\x57\x01\xfd\x37\x79\xd6\x28", + "\x42\xb5\x13\x88\xf6\xf9\x04\x7a\x2a\x99\x45\x75", "", + "\x85\x7b\x2f\x6c\xd6\x08\xc9\xce\xa0\x24\x6c\x74\x0c\xaa\x4c\xa1\x9c" + "\x5f\x1c\x7d\x71\xcb\x92\x73\xf0\xd8\xc8\xbb\x65\xb7\x0a", + "\xb3\xfb\x66\xd3\xf2\xcb\x75\x90\xad\x5e\xf5\x60\x48\x13\xc1\x25\x02" + "\x0e\xe3\xd7\x91\xcb\x0e\xc6\x7e\xb5\xeb\x86\x70\x9b\x6b", + "\x15\x55\x77\xb9\x8a\x81\x1e\x45\x32\x46\x16\x04\x39\x97\xbc\x03", 1, + 0, 248 }, + { 96, 128, 128, 8, + "\x3f\xd5\x07\x41\xec\x47\xdd\xbf\xc2\xfc\x09\x09\x75\xd1\x54\xf0", + "\xee\xf1\xa6\xe6\x51\x32\x18\x52\xf0\xb2\x5a\x31", "", + "\xd6\xf6\xa9\xa2\x4d\xb6\xa7\xa6\x17\x6d\x43\x62\x63\x9c\x4f\xd7\x7f" + "\x70\xf3\xe0\x89\xdd\x94\x00\x86\xe1\x2a\x9b\xec\xba\xf9\x7f\x82", + "\x53\x26\x94\xbb\x28\x51\xea\x7f\x3b\xdd\x37\xc4\xe8\x06\xbe\x5b\x95" + "\x3e\xa7\x9d\x08\x10\x0e\x74\xaf\x3f\xa6\x7e\xca\x88\x90\xdb\x28", + "\x9e\x1a\xce\xe6\x88\x84\x8b\xca\x45\x4c\x6d\x04\x75\x3d\x3c\x7d", 1, + 0, 264 }, + { 96, 128, 128, 9, + "\x42\xe3\x8a\xbe\xf2\xdd\x75\x73\x24\x8c\x5a\xef\xb3\xec\xca\x54", + "\x06\x4b\x3c\xfb\xe0\x4d\x94\xd4\xd5\xc1\x9b\x30", "", + "\x2c\x76\x3b\x9e\xc8\x49\x03\xbc\xbb\x8a\xec\x15\xe6\x78\xa3\xa9\x55" + "\xe4\x87\x0e\xdb\xf6\x2d\x9d\x3c\x81\xc4\xf9\xed\x61\x54\x87\x78\x75" + "\x77\x9c\xa3\x3c\xce\x8f\x73\xa5\x5c\xa7\xaf\x1d\x8d\x81\x7f\xc6\xba" + "\xac\x00\xef\x96\x2c\x5a\x0d\xa3\x39\xce\x81\x42\x7a\x3d\x59", + "\x4c\xa0\x1b\x5b\x2a\x5e\x57\xbc\xc1\xa4\xb7\xf6\x3f\x04\x9d\xc4\x77" + "\xe3\xee\x2e\x5c\x26\x8e\xfb\x34\x6f\xf9\x5b\x7d\xcd\x67\xf8\x6e\xd0" + "\xf1\x1b\xb1\x7c\x1d\xd7\xfb\x51\x1d\x2f\x37\xb9\x68\x45\x50\xc0\xd8" + "\x4b\xe0\xf1\x00\x30\xcc\xc4\xe0\xde\x5b\x74\xef\x92\xea\x54", + "\xc5\xa5\x7d\xd6\xfa\x16\xaa\x9d\xe8\xde\x20\xe6\xbd\x32\x13\x96", 1, + 0, 528 }, + { 96, 128, 128, 10, + "\x59\xab\x7e\xc1\xc0\x2b\xb2\x06\xaf\x5a\x91\x31\xf1\x13\x43\x11", + "\x55\x08\xf5\xce\xa1\x97\x38\x69\x86\xd9\x2d\xbe", + "\xa4\x3d\x39\xf7\x8a\x2e\x9a\x8a", "", "", + "\x09\xec\x70\xfa\xae\x33\x35\x37\xa7\x31\x49\x29\xdd\xfb\x52\x5b", 1, + 64, 0 }, + { 96, 128, 128, 11, + "\x94\x15\xf9\x25\xbc\xb4\x1d\xc2\x5e\x86\xc8\x26\xdb\xc8\xbf\x68", + "\xbd\xff\xaa\x76\x3b\x91\x6f\xf0\xee\x3f\x3c\xe4", + "\x70\x5d\x67\x6c\xd8\xa9\x44\x51", + "\xfe\xb3\x61\x67\xea\xfc\x02\xc8\xe2\xbd\x6e\x13\x81\x76\x86\xba", + "\x08\xdb\x32\x7a\x88\xbe\x7b\x48\xf4\x30\xfd\x7b\xfc\xcd\xf5\x02", + "\xb7\xc2\x49\xf8\x10\xad\xac\xf9\x9a\xbd\xed\x1f\x3b\x91\x30\xf2", 1, + 64, 128 }, + { 96, 128, 128, 12, + "\xd9\x7c\x9b\x04\x3b\xdc\xcf\xd5\x94\x91\xa9\x95\xe7\x8f\x16\x96", + "\xef\x42\x32\x40\x35\x88\x30\xdf\x91\x55\x06\xa3", + "\x3d\xdb\xa7\xb3\xab\x69\xc8\xb2", + "\xf0\x47\x59\x4a\x5c\xff\xda\x64\x30\x3a\x80\xb2\xfa\x6a\x95\x71" + "\x69", + "\xe0\xca\xf2\xa9\xd5\x0f\x70\xec\xaa\x43\xb4\xa2\x87\xc3\xb3\x4a" + "\x99", + "\xcf\xf4\xc6\x18\x82\xb4\x13\xb6\x86\xff\x35\xb6\x3a\x3a\x73\xde", 1, + 64, 136 }, + { 96, 128, 128, 13, + "\x16\xbe\x38\xc0\x5c\x7b\xc5\xc6\x8e\xe6\x20\x38\x71\x79\x92\x40", + "\xac\xca\x8a\xe9\x16\x11\x9e\x49\xd8\x7c\x33\xa7", "\x28", "", "", + "\x21\x7d\x40\xef\xd9\x72\x70\x1f\xcc\x33\xdf\x53\x62\xe1\xea\x9c", 1, + 8, 0 }, + { 96, 128, 128, 14, + "\x7c\x89\x68\x0b\x4b\xca\x11\xa6\x43\x14\xf4\xca\xc5\x7a\x95\xdf", + "\x07\xc8\xef\x98\x1b\xea\x99\x52\x57\xd3\xd6\x5a", "\xb8\xe8", "", + "", + "\xde\xa6\x36\xde\xd8\xb9\xef\x2a\x08\xff\xdf\x58\xa0\x5b\x78\x71", 1, + 16, 0 }, + { 96, 128, 128, 15, + "\x43\x9f\xd5\xc3\xb7\x65\x87\xd5\xa6\x01\xba\x6e\xf8\xfa\xd2\x14", + "\xed\x1d\x31\x6d\x08\x34\xd1\x74\xc1\xb5\xb4\x38", + "\xea\xe2\x52\xf4\x2d\x2c\x71", "", "", + "\xe8\x53\x04\x26\xcb\xab\xf6\x36\x33\xff\x37\x31\x59\x24\x7e\x38", 1, + 56, 0 }, + { 96, 128, 128, 16, + "\x1a\x44\xf3\x55\x06\x88\xfd\xdb\xc1\xe5\x04\x1d\xc9\x89\x52\xc0", + "\x5d\x29\x04\x29\x8f\x66\x8b\xa9\x5e\xaa\x17\x97", + "\xd5\x59\x08\x95\x8b\x70\xab\xee\x81\x05\x4c\xdf\x3d\x3d\xf5", "", + "", + "\x5c\x71\xb4\xf0\x69\xcf\xa1\x3b\x76\x34\xdb\x4b\x13\xe7\xbe\x7d", 1, + 120, 0 }, + { 96, 128, 128, 17, + "\x7d\xb6\xd8\xe5\x8e\x3c\x55\x2a\x64\x45\x20\xaa\x80\x5e\x2f\x48", + "\xe9\x86\x93\xe9\xf6\x63\x2d\x11\x5b\x5d\x5a\x74", + "\x6f\xc1\xca\x24\xe6\x97\x86\xaa\x26\xbf\xb5\xd4\x6e\xf8\xcb\x56", + "", "", + "\xb9\xa8\xa6\xd4\x61\xa4\x41\xfb\xd5\xbb\x6a\x8a\xc0\xd4\x7e\x9d", 1, + 128, 0 }, + { 96, 128, 128, 18, + "\xde\x6e\xd1\x69\xd3\x96\xcf\xb7\x37\x8e\x89\x2c\x7f\xaf\x1d\x5d", + "\xef\xc1\x87\x02\x82\xe7\x7c\xa8\x06\x3f\x1b\xeb", + "\xee\xdf\x6e\x77\x6a\xd3\x7d\xc6\x10\x82\x5a\x61\x68\xe2\x13\x56" + "\xc2", + "", "", + "\x0d\x65\x0a\x97\x4e\xbe\xa2\x2f\xed\x07\x7d\x22\x9e\x0c\x9e\x65", 1, + 136, 0 }, + { 96, 128, 128, 19, + "\x6c\xf0\x95\x99\x18\x1c\x07\xae\xb2\x1d\x78\x20\xbf\x70\x65\x95", + "\x4c\x4c\x52\x5a\x8c\x7e\xe6\x87\x9a\xef\xa7\x9e", + "\xbd\x91\x39\x67\xdb\x07\xb9\xeb\x59\x07\xf0\xbe\x71\xce\x88\x6c\x41" + "\xff\x92\x3c\x29\x6c\x0e\xf3\xf7\x04\xe9\x8f\x64\x9e\x59", + "", "", + "\x80\x6e\x48\xe7\xd4\x52\xb6\x3b\x61\x26\xf5\x76\xef\xbd\xf4\xc4", 1, + 248, 0 }, + { 96, 128, 128, 20, + "\xef\x35\xb5\xc7\x97\xbb\x6b\xee\xdb\x51\x3b\xa3\xd8\xae\xbd\x25", + "\x05\x76\xa1\x01\x7a\xc0\x0e\x49\x11\x0c\x4c\xac", + "\xa3\x86\xd5\xc4\x4d\xe8\xc6\xa5\x06\x3a\xdf\x5b\xa9\xf0\xb7\x5e\x9a" + "\xd1\xf2\x39\xa5\x30\xdd\x76\xd7\x97\x55\x4d\x7b\x03\x7d\x7d", + "", "", + "\x69\x66\xa1\xcf\x57\x29\x33\x2b\x26\xfd\x3e\x38\x50\xb7\x48\x65", 1, + 256, 0 }, + { 96, 128, 128, 21, + "\x64\x9f\x3d\xfd\xdb\xf1\xaf\x60\x87\x67\x45\x68\xe2\xe6\xd7\xc3", + "\x6c\xa6\xf8\x7b\x7a\x85\x84\xdf\x4f\x46\x87\xb9", + "\x35\x31\x2c\xa2\x3e\x4e\xb3\x6c\xb0\xa6\x6c\x6f\x38\x6b\x8e\xc2\x9f" + "\x6d\x11\xe8\x2f\xbf\xca\xad\xfd\x6c\xbc\x9b\x59\xd5\x1a\x6c\x02\x70" + "\x86\x82\x74\xd9\x1f\x60\x97\x8d\x1f\x0f\x37\x28\x09\x30\xd3\xfd\xcb" + "\x3e\x90\xea\x46\x1e\xcc\xc8\x3f\xa0\xd9\x75\x54\x88\x16", + "", "", + "\x3d\xaa\x00\x03\xde\x38\x4d\x78\x44\x3f\xfd\x3a\x5e\xa4\x81\x79", 1, + 520, 0 }, + { 96, 128, 128, 22, + "\xa5\xb5\xb6\xba\xe4\x5b\x74\x1f\xe4\x66\x38\x90\x09\x8f\x32\x6a", + "\x4b\xad\x10\xc6\xd8\x4f\xd4\x3f\xd1\x3a\xd3\x6f", "\x30", + "\x12\x7b\x15\x00\x80\xec\x0b\xc7\x70\x4e\x26\xf4\xab\x11\xab\xb6", + "\x75\xe6\xff\xcb\x61\x14\x83\x3b\x67\xcd\x93\xbd\xf2\xc2\x2b\x55", + "\xc9\x0e\x18\xea\xf8\x10\xb7\xbc\xef\xe7\xa5\x26\xb1\x78\x3b\x20", 1, + 8, 128 }, + { 96, 128, 128, 23, + "\x0c\xec\xb9\xf5\x12\x93\x2d\x68\xe2\xc7\xc0\xbc\x4b\xd6\x21\xc8", + "\x21\x86\xa3\x09\x12\x37\xad\xae\x83\x54\x0e\x24", "\x74\x3e", + "\x43\x7a\xeb\x94\xd8\x42\x28\x3b\xa5\x7b\xb7\x58\xe3\xd2\x29\xf0", + "\x64\x6c\xef\x72\x90\x6e\x2b\x8f\x69\xac\x31\x34\xb4\x96\x59\x8e", + "\x9d\xab\x1e\xe9\x31\x4a\x04\x30\xab\xf5\x4c\x37\xc8\x8c\x79\x0f", 1, + 16, 128 }, + { 96, 128, 128, 24, + "\xa3\xfd\x2f\xdc\xce\x8a\x63\xbf\xe4\xeb\x2d\xb7\xe4\x2a\xdb\xe1", + "\x69\x0e\x7a\xd1\xe0\x5d\x0d\x4a\xb4\x55\x2c\xf7", + "\xab\x91\xec\x8c\xc7\x33\x73", + "\xbe\x02\x31\xb5\xc7\x86\x1f\x0a\xf7\xb6\x38\x14\x79\xd2\x5b\x77", + "\xa8\x84\xf7\x69\xfc\xc7\x27\x83\x9d\x59\x71\x1f\xa3\xcb\x5e\xe0", + "\xf2\x01\x7e\x3b\xd1\x0b\xb1\xb4\x3f\xdc\xc0\xfe\xef\xfc\x9c\x68", 1, + 56, 128 }, + { 96, 128, 128, 25, + "\x55\xe0\x4c\x12\x27\x80\xbe\x52\xed\x93\x28\x92\x80\x39\x00\x8c", + "\x0c\x90\x8e\x58\xcd\xda\xd6\x9d\xea\x1a\x32\xc3", + "\x25\x59\x17\x07\xc0\x04\xf5\x06\xf4\xb5\x1e\x85\xe2\x9f\x6a", + "\x26\xeb\x70\x67\x2e\xef\x03\x66\x7b\x34\xcc\x7d\x0d\xf0\x58\x72", + "\x89\x16\x6d\xcd\x7d\x74\xa4\x45\xdf\xd3\x52\x6c\x51\x80\xd8\x25", + "\x8b\x8e\xd5\xf9\x7a\x16\x88\x81\xc3\xb6\xef\xe9\x1c\xfe\x70\x43", 1, + 120, 128 }, + { 96, 128, 128, 26, + "\x5f\x0a\x1b\x5f\x8f\x86\x73\xd5\x66\xec\x7f\x54\xe7\xdc\xa4\xf2", + "\xc3\x09\x68\xc9\x67\xe5\x35\x05\x62\x16\x28\xdb", + "\xc0\x70\x92\xd7\x99\xda\xc2\xb4\xc0\x5f\xbd\xdd\x04\x74\x3c\x34", + "\xf6\x53\x84\x76\xda\xf0\x45\x24\xcf\x13\x43\x09\xdd\x84\xe1\x87", + "\x23\x15\x11\x0f\x7e\xc6\x4e\x7a\x23\xe5\xa7\x62\x82\x2f\x71\xab", + "\xdc\x7b\x12\xfa\x2d\xbf\xbd\xc6\xd8\x5f\xaa\x77\xa2\xeb\x76\x7e", 1, + 128, 128 }, + { 96, 128, 128, 27, + "\x67\x1a\x70\xe8\x83\xfb\x06\x11\xdf\xfd\x0b\x1d\xd9\xb8\xcc\xa2", + "\xa5\x1c\x37\xf4\x67\x89\x3c\x16\x08\xe5\x62\x74", + "\x3e\xa1\x2d\x80\xf4\x0f\x34\xf8\x12\x47\x9d\x2e\xcc\x13\xd2\xd6" + "\xdf", + "\x3b\xaf\x3e\xdf\x04\xdc\x0c\x97\xaa\xe0\x81\xcd\xeb\x08\x02\x1d", + "\x5d\x56\x30\xfc\x72\x8f\xfb\x08\xce\x69\x3f\x72\x99\xe6\x72\x8b", + "\x00\x02\x3f\x11\xa0\x23\xc0\x78\x6c\x10\x5f\xe4\xc0\x03\xaf\x6e", 1, + 136, 128 }, + { 96, 128, 128, 28, + "\x20\xbb\xf7\x4c\x1e\x63\x98\x2c\x47\x2c\x47\x43\x56\x9e\x4c\x84", + "\x45\x9f\xc7\xc0\x04\xbf\x46\x32\x3a\x02\xd8\x46", + "\x4f\x22\x85\xce\x3d\xaf\xa5\x28\xc6\x94\xa5\x27\x2d\x3b\x7b\x92\x90" + "\x97\xdb\x39\x87\x72\x65\x3b\xd9\xbb\xbd\xb3\xb2\xc8\xe1", + "\x6d\xb5\x09\x92\xe8\xfb\xbe\xe1\x5d\x49\x79\xd3\xe3\x22\xda\xcd", + "\x87\x03\xe4\x46\x97\x13\x8c\x58\x53\x2d\x97\xee\x99\x23\x1d\x94", + "\xf1\x4c\x2f\x39\xa4\x87\x1a\x4a\x16\xc4\x2f\x6f\xe8\x78\xde\xef", 1, + 248, 128 }, + { 96, 128, 128, 29, + "\x63\xf0\x31\x72\x50\x5d\x90\xe9\x49\x00\x12\x5c\xb8\xa4\xb0\xdd", + "\x52\xc2\x09\x79\xcd\xaa\xad\xe5\x73\xdb\xa6\x50", + "\x51\x89\xea\x6f\x39\xb2\xa7\x8c\x02\x02\xfd\xff\x14\x6c\x5c\xc6\xbd" + "\xc7\x49\x1d\x47\x86\xf8\x0c\x6c\x6a\xef\x65\x63\x4c\x05\xda", + "\x60\x2c\x98\x99\x7e\xe0\x3f\xd1\x1c\xe0\x0e\x92\xde\x19\x39\x77", + "\x55\x90\x15\x5f\x3e\x70\x1b\x4a\x96\x09\x89\xd0\x25\x1b\xac\x65", + "\xfd\x6a\x2c\x92\x73\xd1\x24\xb5\x55\x3b\xe4\x2e\x78\x93\x14\x65", 1, + 256, 128 }, + { 96, 128, 128, 30, + "\x5b\xf0\x08\xf6\xf2\x7c\xc2\x1f\x5a\xe8\x2f\xb7\x90\x7b\x1d\x92", + "\x58\x0a\xf4\x8b\xc1\x10\x86\x04\xd5\x55\x13\x43", + "\x48\x2d\xa2\x4b\xb4\xfb\x9e\xaa\x0d\xbf\x40\x37\x33\x59\x7f\x5b\x3e" + "\xe8\x33\x8b\x5d\x09\xa1\xd6\xf9\x07\x0b\xb0\x69\x26\x4a\xbb\xca\xcc" + "\x56\x57\xaa\x63\x53\xf1\x79\xd1\xbb\x4c\x7f\xa0\x05\x26\x78\x9e\xaf" + "\x08\xe0\xda\x25\x8c\xbd\xb3\x9e\x98\x77\xc6\x8b\x4a\x75", + "\xca\x89\xd6\xae\x28\x4a\xfb\x67\x92\xcd\x89\x4e\x07\xaa\x83\x36", + "\x1b\x89\xc6\xbc\xdd\xef\xbe\x92\x33\xee\x40\x93\x46\x8a\x5f\x61", + "\xa4\x9c\x77\x47\xda\xd4\x2d\xf6\xd7\x29\xa0\x1f\x4c\x50\xcf\x34", 1, + 520, 128 }, + { 96, 128, 128, 31, + "\x30\xd8\x69\x2e\xb3\xb6\x2d\xb6\x14\x4f\x74\xee\x9d\xec\x52\x96", + "\xfe\x9f\x6f\xb4\x41\x5c\xfb\x41\x89\xf9\xc7\x6d", "", "", "", + "\x80\x4f\x91\x5f\xc7\xfe\xa2\xca\x7d\x8b\xaf\x13\x50\xc5\x22\x7b", 1, + 0, 0 }, + { 96, 128, 128, 32, + "\x21\x51\x7f\xd9\xeb\xfd\x38\x7d\xff\x2a\x0c\x05\x18\xab\x82\x67", + "\x61\xf6\xc4\xec\x9e\x20\x91\xd4\xa0\x31\x80\x4a", + "\x76\xd3\x32\xba\x08\x1b\x3d\x3c\xfb\xa2\x71\x16\x7b\xa1\x08\xcd", + "", "", + "\x2c\xc4\xb9\x05\xa4\xd3\x9e\x35\xd4\xbe\xae\xbd\xed\x9b\x59\x66", 1, + 128, 0 }, + { 96, 128, 128, 33, + "\xa7\x16\xf9\x31\xc8\xf9\xd9\x77\xf7\xda\x85\x73\xbc\x65\xf2\xbf", + "\x91\x77\x36\x59\xad\xac\x8f\x12\xe5\x52\x63\x16", "", + "\x9c\x98\x03\x8c\x5e\x8d\x1a\xf5\x97\xb3\xb9\x18\x8b\x36\x24\xfb", + "\x99\xae\x76\xcf\xff\x55\x2c\xe3\x7b\x21\x0e\x26\xe8\x10\x78\x7c", + "\x2b\xda\xb5\xe6\xf0\x08\xb0\xcc\x75\x1d\x5b\x06\x74\x87\xeb\x2b", 1, + 0, 128 }, + { 96, 128, 128, 34, + "\x87\x5b\x0b\x4a\x84\x15\x05\x24\xeb\x1f\x50\xf9\xd8\xde\x13\x88", + "\xfd\xc5\x01\x4e\xd1\xad\x70\x61\x29\xd5\x73\x22", + "\x35\xa6\xd9\x82\x9c\x84\x49\xc4\x40\x2e\x38\x5c\xc5\xc6\xfe\x98", + "\x2f\x57\x5d\xfb\x2d\xbe\x9d\x23\x8d\xe5\x76\xfc\x63\xe4\xac\x32", + "\x34\x69\xc3\xff\x73\x8a\xa3\x2a\xac\xc1\xac\x48\xd8\x9b\x1d\x75", + "\xfa\x68\x72\x0a\x31\x71\xa5\x4c\x4b\x36\x90\xbf\xfd\xe7\xb6\x10", 1, + 128, 128 }, + { 96, 128, 128, 35, + "\xd6\x29\x8f\xff\x67\xdb\xa1\xec\x25\x03\x08\xe0\xbc\x5f\x4f\xae", + "\xd9\x6f\x9b\xbb\xfa\x14\xe9\x61\x6c\x45\x8d\xf5", "", + "\x8e\xeb\x44\x45\xa3\x4c\x81\xfd\xbc\x47\x8b\x83\xdf\x71\x11\x6c" + "\xe6", + "\x47\x68\x69\xa3\xdd\xb3\x86\xbf\x42\x47\x8d\x0c\x84\x17\x90\x45" + "\xbe", + "\x91\x36\xd9\x94\xda\xa2\x2e\xad\x4d\x08\x27\xe5\x82\x50\x01\xac", 1, + 0, 136 }, + { 96, 128, 128, 36, + "\xe3\xd3\xec\x41\xf2\x8e\xb3\x5f\xb5\x3f\x5f\xa9\x18\x04\xe0\x51", + "\x89\xd2\x70\xfc\x8b\x58\x3b\xc6\x31\xce\xfd\x39", + "\x3d\x2f\x45\x8c\x67\xc5\xb6\xc7\x94\xb1\xf1\x2d\xad\x40\x9e\x0f", + "\x84\x7a\xcf\x52\x19\x95\xb3\x3f\x8b\xc4\x74\xc8\xbe\xfb\xca\x3b" + "\xb2", + "\x28\xaa\xec\x53\x49\x3c\xd6\x25\x2c\xf6\x41\x0e\xd1\x41\xbd\xaf" + "\xb7", + "\x47\xbf\x3e\x16\xc2\x27\xca\x11\xfd\x68\xa1\x6d\x40\x7c\x2c\xc3", 1, + 128, 136 }, + { 96, 128, 128, 37, + "\xfa\xf3\x6a\x66\xf8\xe5\x4f\x2f\xb2\xa0\x2f\x3a\x30\xf0\x18\x0b", + "\x2f\xfa\x98\x2a\x47\x84\x79\x7c\xf4\x6b\x07\xab", "", + "\x50\xa5\x9e\xdc\x01\xb7\xbd\x0d\xb6\xec\x43\xfe\x23\xf7\x2e\x70\xed" + "\x4d\x42\x33\x7a\xb1\x92\x6c\xc6\x95\x6a\xa4\x4d\xbe\xbf", + "\xa7\x89\x90\x7a\xeb\x23\x44\xf0\x25\xb1\xb4\x26\xc9\xde\xe5\x2b\x10" + "\x6f\xf2\x11\x0c\xb2\x00\xcf\xb8\x5a\xea\x60\xfd\xdf\x6a", + "\x72\x2e\x5c\x45\x0c\x5e\xd9\x49\x28\x59\xa3\x23\x6a\x22\x0f\x76", 1, + 0, 248 }, + { 96, 128, 128, 38, + "\x2c\x9b\x9f\xf4\x7d\x74\x2c\x4a\xb2\x24\xe9\xca\x1e\xd5\x7c\x4c", + "\x91\x79\x62\xca\xf3\x93\x24\x41\xc2\x59\x28\x2f", + "\x72\x17\x5b\xdf\xdb\x4a\x23\xe9\x7f\xdc\xbd\x26\x3b\xaf\x43\x16", + "\xb5\x42\xc2\xf3\xf8\x16\x70\xdd\xf7\x4f\x15\x18\x4a\xb7\xde\x17\xe0" + "\x57\xcd\xe9\xee\xf9\x2b\xab\xdb\x83\x75\x00\x77\x4c\x19", + "\x32\x0a\xe0\xc1\x1e\x92\xd1\x0d\x5b\xf5\x48\x5c\x85\x4b\x2d\x8f\x63" + "\x18\xe3\x3f\x16\xb5\x20\xcf\xfd\x35\xad\xa3\x81\xc9\x67", + "\xa4\x86\x69\x08\xe6\x64\xee\x14\x0c\x6a\xe2\xb9\xd2\xab\x84\x16", 1, + 128, 248 }, + { 96, 128, 128, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3d\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3e\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\xbc\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6c\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\xe6\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x42\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x41\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\xdb\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb1\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\x30\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x14\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x06\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x96\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x95\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x17\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x9b", 0, + 0, 128 }, + { 96, 128, 128, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x98", 0, + 0, 128 }, + { 96, 128, 128, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\xda", 0, + 0, 128 }, + { 96, 128, 128, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\x5b\xb0\x34\x07\x72\x97\xf0\x92\x1a", 0, + 0, 128 }, + { 96, 128, 128, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3d\x6d\x5f\x66\x43\x0a\xd6\x5b\xb1\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\xe6\x43\x0a\xd6\xdb\xb0\x34\x07\x72\x97\xf0\x92\x9a", 0, + 0, 128 }, + { 96, 128, 128, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3c\x6d\x5f\x66\x43\x0a\xd6\xdb\xb0\x34\x07\x72\x97\xf0\x92\x1a", 0, + 0, 128 }, + { 96, 128, 128, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\xc3\x92\xa0\x99\xbc\xf5\x29\xa4\x4f\xcb\xf8\x8d\x68\x0f\x6d\x65", 0, + 0, 128 }, + { 96, 128, 128, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, 128 }, + { 96, 128, 128, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, 128 }, + { 96, 128, 128, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\xbc\xed\xdf\xe6\xc3\x8a\x56\xdb\x30\xb4\x87\xf2\x17\x70\x12\x1a", 0, + 0, 128 }, + { 96, 128, 128, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3e\xe9\xf3\x43\x0f\x3e\x80\x3c\x0a\x46\xb7\xa8\x4c\xd8\x03\xde", + "\x3d\x6c\x5e\x67\x42\x0b\xd7\x5a\xb1\x35\x06\x73\x96\xf1\x93\x9b", 0, + 0, 128 }, + { 96, 192, 128, 66, + "\x50\x19\xeb\x9f\xef\x82\xe5\x75\x0b\x63\x17\x58\xf0\x21\x3e\x3e\x5f" + "\xcc\xa1\x27\x48\xb4\x0e\xb4", + "\xff\x0d\xdb\x0a\x0d\x7b\x36\xd2\x19\xda\x12\xb5", "", "", "", + "\x2d\x03\xf5\xe8\xc2\xe5\xa1\xb4\x3c\x77\x08\xdd\x0c\xbf\x0a\xcd", 1, + 0, 0 }, + { 96, 192, 128, 67, + "\x21\x21\x8a\xf7\x90\x42\x8f\x80\x24\xd3\xe7\xe1\x42\x8c\x9f\xcf\x57" + "\x8c\x21\x66\x36\xd6\x0e\x73", + "\x34\x04\x7b\xc3\x9b\x9c\x60\x83\x84\xdf\xf5\xb8", "", "\xe3", + "\x39", + "\x74\x50\xf5\x5a\x21\xe7\x17\xa1\x10\x6e\xa0\xc1\x18\x71\xf5\xff", 1, + 0, 8 }, + { 96, 192, 128, 68, + "\x3a\x8b\xf5\x43\xc4\x80\x92\x56\x32\x11\x82\x45\xbc\xbf\x5d\x01\x52" + "\x2b\x98\x7a\x31\xa3\x3d\xa3", + "\x4e\xbc\x13\xcf\x46\x36\xcc\x7c\x45\xe5\x60\xa7", "", + "\x53\xfc\x72\xe7\x1b\x59\xee\xb3", + "\x5d\x24\xd0\xe1\xa2\xee\x9f\xce", + "\xe7\x70\xf9\x1a\x51\xf5\xb5\x87\xa4\x4c\xd9\xd3\x63\x4b\x97\x06", 1, + 0, 64 }, + { 96, 192, 128, 69, + "\xbc\xb6\xbc\x5e\xe6\x74\x3d\xf1\x39\x6a\x34\x63\x93\x27\xb2\x58\x09" + "\xec\x9c\x81\xdd\x6a\x0c\x0e", + "\xbe\x03\x26\xd2\x3b\xdc\x2c\x64\x64\x8d\x13\xf4", "", + "\x80\x47\x4a\x3a\x3b\x80\x95\x60\xee\xe2\xce\x7a\x7a\x33\xea\x07", + "\xdb\x58\x93\xdc\x8d\xa3\x36\x61\x4a\xa0\xff\x76\x8d\x46\x95\x35", + "\x90\x2c\x2a\x83\x25\xcb\x55\xbc\x95\xf0\xe1\x3c\xaf\xe9\xaa\x8d", 1, + 0, 128 }, + { 96, 192, 128, 70, + "\x5e\x1d\x28\x21\x3e\x09\x25\x36\x52\x5b\xba\xe0\x9e\x21\x4a\xf4\xc8" + "\x91\xe2\x02\xb2\xb4\xfa\x4f", + "\xb6\xbe\x6c\xd0\x68\x12\x35\xd8\x26\xaa\x28\xea", "", + "\x53\xd5\x94\x33\xa7\xdb\x7f\x41\xb3\x1c\xcb\x6d\x4a\x2d\x78\x99" + "\x65", + "\x20\xeb\xc1\xf5\xa2\xc9\xf8\x8d\x1c\xdb\x18\x2e\x81\x32\x9c\xc0" + "\x3e", + "\xbc\x54\x5e\x91\xc9\x74\xa7\x44\xba\xea\xb2\xdd\x8c\xe6\x09\x60", 1, + 0, 136 }, + { 96, 192, 128, 71, + "\x7f\x67\x2d\x85\xe1\x51\xaa\x49\x0b\xc0\xee\xc8\xf6\x6b\x5e\x5b\xee" + "\x74\xaf\x11\x64\x2b\xe3\xff", + "\xb0\x22\x06\x70\x48\x50\x5b\x20\x94\x62\x16\xef", "", + "\xef\x64\x12\xc7\x2b\x03\xc6\x43\xfa\x02\x56\x5a\x0a\xe2\x37\x8a\x93" + "\x11\xc1\x1a\x84\x06\x5f\x80", + "\x1b\xc9\x90\x29\xa0\x9c\x08\x01\x40\x60\x8a\x62\xc3\x3b\xc7\xae\x69" + "\xff\x81\x1f\xef\xb2\x0b\x2d", + "\x80\xf0\x91\x03\x48\x5f\x95\xf8\x6a\xd1\xf0\x72\xa2\x14\xc5\x5e", 1, + 0, 192 }, + { 96, 192, 128, 72, + "\xf7\xac\xe6\xc3\xc1\x0c\x3f\xf9\x77\xfe\xbe\x7d\xc8\x82\xb8\xe7\x79" + "\xef\x3a\x17\xef\x93\x24\xa8", + "\x6e\x2b\xa2\x83\x3c\x5d\xce\x6b\xec\xc4\xf6\xd8", "", + "\x2e\x11\xe4\x19\x51\xc2\x04\x60\xc7\x68\xb0\xd7\x1a\xd5\x6e\x77\xbe" + "\xc0\x5e\x04\x78\xf9\x9d\x5b\x62\xe7\x99\xf7\x32\xe4\x67", + "\xb3\xce\xc7\x77\xf8\x07\xd1\x6b\x69\x71\x63\xd0\xc6\xa4\x5d\x00\x29" + "\x36\x71\x4d\x60\x0a\x15\x6d\x7e\x53\x65\xd1\xaa\xca\xd0", + "\x11\xa5\x6e\xdb\xe2\xfb\xbb\xb2\xb0\x11\xc4\x3a\x62\x00\x08\x30", 1, + 0, 248 }, + { 96, 192, 128, 73, + "\xa9\x54\x1a\x96\xb8\x6d\x32\xb4\x52\x09\x2e\x8b\x92\x09\x9e\xa3\xf4" + "\x5f\x98\xca\x05\xce\x69\x2b", + "\x90\x15\xb4\xbc\xd6\x98\x90\x83\x04\x6b\xe8\x6d", "", + "\x9d\x35\x9a\xad\x3f\xf5\xce\x37\x35\xa8\xcf\xfe\x4f\x08\x71\x14\xd4" + "\xd6\xc5\xe0\x1d\xce\xb1\x96\x9f\x40\xc8\xe0\xdb\x6b\xb9\x02\x81", + "\x0a\x6b\x84\xde\x44\xcc\xe1\x42\x55\xa9\xcb\x19\x16\x96\x95\xcf\x46" + "\x60\x48\x9f\x1e\x3f\x60\x53\x34\x35\x58\x28\xc5\xc0\x9f\xb3\x0e", + "\xa1\x9a\x60\x10\x5f\xc7\xa0\x3b\xe1\x78\x3f\x55\x8e\xa2\x3e\x9c", 1, + 0, 264 }, + { 96, 192, 128, 74, + "\x23\x91\x95\xb5\x86\x68\xeb\x89\x63\x6b\x1e\xc2\xb3\x31\x33\x69\x46" + "\x36\x9f\xc6\xc8\x7b\x88\x49", + "\x14\xa6\x28\x1a\x43\xb4\xeb\x05\x6a\x67\xb9\xe6", "", + "\x39\xd8\x73\xd4\xca\xd7\x1c\xb2\x52\x78\x4b\xd1\x46\x48\xa4\x94\xce" + "\xb5\x17\xeb\x9e\x3e\x6f\x32\xd1\x9b\xd1\x8d\xfa\xf8\x77\xc7\xae\xc2" + "\x21\x03\xd2\x42\x99\x3e\xd7\xba\xb1\x23\x32\x61\x10\xdf\xdb\x72\x29" + "\x14\x3a\x0c\x60\x1e\x16\xaa\x4e\xcd\xde\x80\x8c\xd8\x3b\xb2", + "\x8f\xce\xa9\xe2\xfa\xa5\x23\x29\x84\x72\xb5\x58\x3e\x35\x6d\x18\x75" + "\x39\x3e\xa3\xbc\x1b\x4f\x8e\xa4\xaa\xd5\x97\x14\x7a\x7c\xa9\x4e\x26" + "\x09\xfe\x6b\xf0\xab\x86\x1e\x06\x31\xa3\x12\x4e\xb1\x5d\x0d\xe2\x65" + "\xef\x11\xa3\x3e\x45\x07\xe3\x07\x70\xce\x37\xbb\xb4\xb6\xc3", + "\xa4\x45\x68\x28\xb4\x9c\xdb\xf8\xf3\xc2\x00\x42\x9c\x33\x9a\x89", 1, + 0, 528 }, + { 96, 192, 128, 75, + "\x03\xb4\x67\x58\x51\xb7\x8b\x69\xfb\x7b\x55\x89\x88\x2e\x71\x8b\x07" + "\x5e\x9a\x54\x02\xb5\x20\xfb", + "\xc4\xca\x2d\x67\x8e\x51\x74\x2e\xc5\xe5\x60\xab", + "\x91\xe1\x0a\xc5\x63\x6f\xe9\x9b", "", "", + "\x93\x7f\x15\xff\xd1\xcc\xd6\x45\xd9\xc7\xcd\xd6\x67\x73\x11\xcd", 1, + 64, 0 }, + { 96, 192, 128, 76, + "\x40\x0e\xec\x9b\x06\xa8\x0a\x84\x03\xd4\x5d\xae\x5d\x58\xcc\x91\x7b" + "\xc8\x54\xf5\x1c\xd3\xce\x0d", + "\x44\x7d\xd0\x9a\x23\x70\x8f\x3b\x66\x64\xe1\x5b", + "\x73\x20\x36\x7d\x5b\x07\x05\x59", + "\xb7\x84\x92\x5a\x69\x5f\x0e\xd1\x4c\xa4\x02\x49\xc1\xfd\x5d\x1a", + "\x91\x2d\x05\xc4\x02\x38\x39\x50\xe1\xc5\xa5\x18\x8e\x62\x41\xd8", + "\xab\x30\x9b\xe2\xc0\x5c\x94\x1f\xbf\xb3\x38\xba\x06\x4b\x19\xa1", 1, + 64, 128 }, + { 96, 192, 128, 77, + "\xe2\x58\xb1\x17\xc2\xfd\xd7\x55\x87\xf0\x7b\x40\x0a\xe4\xaf\x3e\x67" + "\x3a\x51\xdc\xf7\x61\xe4\xca", + "\x5e\xad\x03\xaa\x8c\x72\x0d\x21\xb7\x70\x75\xdb", + "\x27\x70\x29\x50\x96\x0b\x9c\x79", + "\xaf\xe9\x61\x13\xa6\x84\xbc\x52\xa6\xd9\x62\xcf\x27\x24\xf6\x79" + "\x1d", + "\x78\x30\x44\x6f\x33\x30\x57\xd9\x96\xa1\xa7\x9b\x21\xc6\x8d\x8b" + "\x43", + "\x72\xac\x47\x8a\x66\xf5\x63\x75\x63\xf1\xf1\x2c\x1d\x02\x67\xca", 1, + 64, 136 }, + { 96, 192, 128, 78, + "\x6c\x2a\x03\xe9\xed\x8e\x42\x1e\x07\xdf\xc3\x6b\x99\xc0\xd0\xdc\x9b" + "\xb8\x74\xea\x3a\xf8\xa8\xb7", + "\x8f\x01\x5e\xce\x4e\x03\x38\xe7\x82\xfa\x3a\x2f", "\xf1", "", "", + "\x92\x26\xc4\xc3\x91\x66\xdf\x5a\xf4\xe0\xc9\x1b\x64\xb4\x63\xa2", 1, + 8, 0 }, + { 96, 192, 128, 79, + "\x14\x86\xb5\xf1\x50\x52\x4c\xc6\x01\xb2\xea\x7d\xa4\x7d\x7c\x8a\xfb" + "\x06\xd6\x42\x0d\xd3\x3f\x8d", + "\x93\x07\x31\x7d\x2f\x42\x3b\x57\xb3\x72\x0f\x8f", "\x3c\x09", "", + "", + "\x36\x00\xe0\x6d\xef\x58\x5e\x20\x12\x35\x0e\xfe\x04\x78\x26\xe9", 1, + 16, 0 }, + { 96, 192, 128, 80, + "\xe0\x9c\x83\xff\x0f\xc0\xb6\xa3\x0f\x93\x8e\x50\xe2\x66\x85\x24\x7e" + "\x9d\xed\x2e\x7d\x6d\xbc\x7c", + "\x3e\xc6\x1e\x9c\x16\x6d\x67\x83\x99\x23\x91\x52", + "\xc1\xa1\x3c\x74\xc1\x1c\xb8", "", "", + "\x9f\x5b\x3e\x48\xdd\xda\x9a\xf3\x75\x15\x01\x50\x9c\x94\x0a\xc1", 1, + 56, 0 }, + { 96, 192, 128, 81, + "\xd0\xc6\x88\xc5\x25\x80\xd8\xf8\x00\xac\xa3\x4f\xa7\x4c\xec\x48\x7b" + "\x67\x1a\xaf\x85\x02\x7b\x9b", + "\x9c\x46\x0a\xbf\x56\x29\x2d\xcb\x1b\x35\xb3\xb4", + "\x35\x96\xce\x98\x9f\xf9\x75\xf3\x25\x0e\x6c\x9e\xce\xd2\x5b", "", + "", + "\xcd\xa4\x34\xc2\x29\xe5\x4b\xf9\xbf\xd5\x4c\x8d\x8c\xe4\x73\x0c", 1, + 120, 0 }, + { 96, 192, 128, 82, + "\x18\x92\xdb\xd7\xe6\xb3\xfe\x18\xeb\xdc\x81\xbb\x27\x1a\xb0\x3a\x8f" + "\x32\xaf\x04\xf1\x33\x00\xd2", + "\x0e\x87\x2d\xe5\x8a\xd1\x0d\xa2\x48\x40\x3f\x21", + "\xe8\xb1\xc6\xcc\x6c\x45\x10\x5e\x0c\x32\x58\x7a\x0d\xe3\x69\xe3", + "", "", + "\x0c\x7e\x14\xdc\x49\xa8\x1e\x6b\xe1\x23\xb9\xcf\xbb\x28\x17\x87", 1, + 128, 0 }, + { 96, 192, 128, 83, + "\xef\x79\x92\xb0\xf8\xec\x7a\x10\x1d\x34\x00\x01\x00\xeb\x7d\x9b\x2e" + "\xae\xe3\x33\xd0\xaa\x2f\xf8", + "\x31\x6d\x38\xa9\x00\x19\xb9\xa3\x7a\xd0\x80\xb7", + "\x10\x14\xed\x78\x89\x69\x4c\xff\x76\x78\x76\xc0\x69\xae\x1f\x91" + "\x85", + "", "", + "\x17\xff\x8e\x79\x97\x60\x55\x8f\x1d\x4c\xf8\x92\x7d\x5e\xc6\x99", 1, + 136, 0 }, + { 96, 192, 128, 84, + "\xe4\x57\x08\x15\xa1\x49\x59\x9d\x13\xbd\x8d\xca\xad\xbe\xc9\x3c\xf0" + "\x90\x19\xba\xa2\xd4\x07\x0b", + "\x5a\x31\xa3\xa0\x26\x78\x6c\x49\xdb\x9d\x09\x58", + "\x90\xa7\x35\x75\x19\xe3\x5e\x8d\xbd\x89\x76\xd4\xb3\x67\x10\xff\xc1" + "\xeb\x0d\x9a\x4a\xe7\xd5\x31\x5a\xe7\x32\x4e\xb1\xd1\x8c", + "", "", + "\x06\xcc\xbf\x76\x7e\x0a\x63\xc8\x9d\x50\xb8\x14\x11\x87\xa5\x55", 1, + 248, 0 }, + { 96, 192, 128, 85, + "\x9f\x0c\x07\x6b\x06\x30\xca\xa1\x0e\x7b\xdc\x07\xdc\xdc\x89\xa2\x70" + "\xf0\x39\x30\x99\x7a\xde\x0b", + "\x3f\x5d\xef\x08\x80\xb8\x89\xdb\x0b\x3f\x2b\xf0", + "\xf5\x36\x8b\x9d\x8f\xdc\x1e\xfa\xb2\xb1\x7a\x45\xf4\x60\x42\x45\x98" + "\x35\x72\xf8\xc1\x67\xaa\x31\xfa\x3f\x53\x0f\x1c\x5e\x17\x81", + "", "", + "\x33\x4c\x13\x25\xfa\x96\x9a\x07\x17\x90\x11\xd2\xf8\x61\x36\x36", 1, + 256, 0 }, + { 96, 192, 128, 86, + "\x80\x3f\x4e\xbb\xed\x8b\x1a\x4f\x34\x87\x13\x46\x1c\x0e\xb0\xbd\x30" + "\xca\xec\x55\xa1\xe7\x16\x28", + "\xb0\x5e\xc4\x9b\xc4\x05\xeb\x7e\x97\x29\x4f\x19", + "\xdf\xb7\x1f\x25\xe7\xf1\x1c\xca\x17\x70\x2e\xb8\x9a\x18\x4e\x57\xf2" + "\x2e\x4e\xa4\x74\x1f\xf6\x03\xab\xc9\x01\xfa\x02\x6b\xde\x7c\xe1\x10" + "\x7e\x2f\xfb\xa0\xa0\xa0\xf2\x4f\x47\xee\x62\x78\x32\xee\x5b\xc2\x19" + "\x2c\x18\x84\x56\x30\x00\x99\x10\xc0\x7f\x8d\x0a\xb4\x51", + "", "", + "\x2b\x71\xd2\xa8\x1f\x4c\x6e\xa2\x67\xa9\x86\x50\x94\xfe\x20\xe9", 1, + 520, 0 }, + { 96, 192, 128, 87, + "\xda\x6e\x3d\xf6\x73\x5f\x63\x2e\x03\x5a\xb8\xf1\x0c\x37\xb5\xf0\x0a" + "\x40\xe1\x8b\x17\x77\x8a\x85", + "\x80\x76\x5e\xac\x22\x81\x96\x9c\xba\x56\x9c\xe7", "\x6d", + "\x3d\x87\x65\xdf\x3a\x06\xf5\x24\x8b\x1a\xaa\x54\x12\x3b\x86\xbd", + "\xa4\x28\x63\xb9\x5a\xbe\xa3\x91\x94\x0a\xda\xc7\xfe\x0c\x41\x43", + "\x12\x2b\x46\xf8\x1a\x0b\x6e\x92\xcd\xa1\x95\x0c\xe6\xfe\x02\x6e", 1, + 8, 128 }, + { 96, 192, 128, 88, + "\x54\xe1\x71\xcf\x90\x72\x9c\x77\xd5\x00\xe1\xd2\x53\x33\x60\xe8\x41" + "\xe2\x60\x89\x45\x76\xb1\x29", + "\x8b\x1e\x57\xf9\x8e\xa4\xe7\x7d\xea\xe4\x57\x6c", "\x27\xff", + "\x2d\x71\x63\x66\xf2\x87\x38\x60\xd5\x04\x37\x00\xf1\xe9\xa9\xd8", + "\x34\x18\x15\x4a\xfc\x95\xd0\x55\x69\xee\x6f\xe4\xdb\x82\xeb\x6e", + "\x0a\x73\x4b\xac\x17\x84\x3b\x85\x73\xfb\xfe\xba\x4a\x5f\xc5\xd4", 1, + 16, 128 }, + { 96, 192, 128, 89, + "\x99\x2c\xab\xb6\x4f\x12\x21\x0c\x8b\x0b\x14\xd7\x3d\x39\x31\x7d\xdb" + "\x2b\x8a\x62\x8a\xc3\x51\x06", + "\x57\xc8\x78\x6e\x66\xd8\xb0\xbe\xc3\x36\x04\xb0", + "\x06\x8d\x94\x0e\x26\xb6\x78", + "\x9e\x76\x92\xf1\x21\x32\xcd\xd5\x3f\x50\x53\x16\x51\x41\x7b\xd2", + "\xba\x34\x09\x26\xde\x50\x0d\x01\xae\x3d\xff\x2e\x90\x56\x08\x16", + "\xdb\x9d\xba\x31\xa0\x30\x19\xce\x88\xce\x74\x1c\x03\x94\x06\x60", 1, + 56, 128 }, + { 96, 192, 128, 90, + "\x89\xbe\x64\x95\xc9\x17\xbd\x7a\xf0\xa3\xb7\xa6\xc8\xa4\xc6\xb5\xca" + "\xde\x76\x6d\x32\xde\x36\x04", + "\xf4\x7f\x6b\x65\xd6\x60\xf1\x0c\x04\x3e\xa6\x41", + "\xf6\xa2\x8d\x27\x68\x6a\xdc\xbf\x9f\xf8\xab\x80\xec\xc1\xc1", + "\xc3\xc5\x0f\x4b\x38\xaa\x37\x51\xf4\x91\x0a\x44\x67\x5d\x37\xe5", + "\xb1\x9b\xe8\xba\x6e\xbc\xdb\x74\xd3\x3c\x2d\xbd\xfd\xc0\x20\x74", + "\x93\xbb\x71\x70\x61\x33\x0a\xcc\xc7\x18\xb4\xa8\xef\x05\xab\x20", 1, + 120, 128 }, + { 96, 192, 128, 91, + "\x8e\xa9\x54\x55\x24\x17\x51\x6c\x97\x2e\x43\x11\x69\x2d\x65\x8d\xd7" + "\xac\x9a\x7f\xd6\xf3\xd0\x2c", + "\xea\x16\xc1\x04\xbc\xe5\xb7\xed\xd5\xa2\x5a\x46", + "\x7d\x4d\x7c\x27\x3a\x9a\xa0\xf3\x5d\x1f\x91\x57\x01\x41\xdb\x54", + "\x0f\xc6\xcc\x80\x0a\x57\x86\xe6\x3a\x45\x46\xfb\x33\x88\x7a\xf9", + "\xeb\x45\xae\x29\x80\xd9\x96\xf5\x02\x35\x93\xf7\x62\xd3\x70\x51", + "\x4a\xcc\x06\x24\x2c\xc4\xec\x4b\xa3\x6e\x8b\xfb\xe8\x4f\x3d\x5d", 1, + 128, 128 }, + { 96, 192, 128, 92, + "\xdf\xa7\xef\x72\x30\x2d\xfb\xcd\x26\x48\xb8\x89\x58\xfe\x0f\x04\x9f" + "\x1d\x60\x14\x3d\x86\xe3\x95", + "\xf3\x72\x3b\x9a\xb7\x28\xc9\x9b\xbd\x6f\x23\x04", + "\xcf\x75\xdd\x45\x36\xd0\x0f\x11\xed\xa4\x0d\xb4\xd2\x52\xe1\x72" + "\xe3", + "\x20\xb3\x88\x32\x44\x30\x0a\x82\x09\x4d\xdb\x9b\x3d\x1e\xfb\x81", + "\x5a\x83\x74\x39\x1a\xdc\x22\xe8\xc0\x66\x55\x7d\x9f\xf5\x86\xcf", + "\xd4\xa1\x11\xd6\x11\xef\xa6\x3f\x0c\x3f\x08\xb2\xfb\xb3\xb0\xac", 1, + 136, 128 }, + { 96, 192, 128, 93, + "\xb6\x3b\x52\xd1\xd1\x59\xa1\x75\x96\xdf\xbd\x9b\xe5\xc5\x08\x86\x99" + "\xd9\x4b\x9c\x5d\x95\xc2\x2d", + "\xb1\xbe\xee\x8a\xfb\x00\xe0\x1a\x9c\xbf\x59\x73", + "\x90\x20\xb5\x62\x56\xbc\xb0\x2c\x69\x07\x20\xe3\x23\x9d\x32\x5d\x25" + "\x9f\x18\x98\xea\x05\x17\x0e\x31\x5c\x14\x49\x60\xd2\x63", + "\xdf\xb4\x17\x90\x3c\x6d\x48\x27\x50\x0a\x3e\xca\x21\x84\xfe\x1c", + "\xe8\x44\x71\x60\x1f\xb8\xb4\xdb\xbd\xc8\x0d\x56\xe3\x7f\x69\xb8", + "\x52\x08\x9c\xb4\xe6\xc1\x1b\xd7\x64\xae\x7d\x44\x38\xcf\xd1\xf8", 1, + 248, 128 }, + { 96, 192, 128, 94, + "\xaa\xf6\x40\x23\xf5\x76\x2c\x4a\x54\xc3\xeb\x0e\xf3\xbb\x2e\xbd\x23" + "\xce\xaf\x38\xb3\xe0\x28\x5f", + "\x2a\x56\x23\x5f\x07\x9d\x53\xa3\x10\x07\x02\xd4", + "\x2d\x68\x8b\x3b\x33\x11\x77\x05\x79\xdd\x06\x46\x14\xa7\xd1\x1b\x17" + "\xa1\x69\x53\xbd\x97\x00\x75\x9b\x35\xa5\x03\x1a\x2d\x8b\xd0", + "\x34\xae\xc5\xa6\x57\x95\xcb\xee\xda\xc2\xee\x6f\xd7\x76\x5c\x6f", + "\xd1\x3c\xce\xb8\x37\x6e\xb4\x23\xf5\xb2\xea\x25\x7c\x11\x8f\xb5", + "\x7b\x6a\x3c\x76\xfd\x52\x93\x0a\x98\x9f\x03\x4e\x5d\xfe\x07\x4b", 1, + 256, 128 }, + { 96, 192, 128, 95, + "\xb5\x21\xe4\xfa\x92\xeb\x46\xfd\x49\x16\xc7\x1e\x3f\x99\x90\x27\xaa" + "\xf2\x14\x66\xfa\xd5\xf9\x6e", + "\xb2\xb4\x2f\xa6\x0a\x2a\x80\x41\x2e\xec\xc7\xfc", + "\xb4\x97\x22\x1c\x7f\xad\x55\xa0\x6e\xa9\xf5\x6f\x39\xb3\x60\x93\x30" + "\xed\xc4\x67\xb7\x9c\xbf\x33\x53\x63\x6b\xce\xb7\x84\xb6\x0e\xc6\x3a" + "\x83\x60\x74\xce\xb4\x86\x24\xa4\xa4\x1a\xc0\x49\x6d\x5a\xdf\xe2\x98" + "\x93\x13\xd7\x41\x2b\x9c\x2d\x89\xca\xfd\x9c\xd5\xa7\x34", + "\xa3\xa8\x0a\xc0\xf5\xb4\x65\x97\xa7\xf4\xa5\x83\xdd\xa0\x21\x24", + "\x99\x4b\xb3\x44\x70\xd4\xdd\xb7\xbb\x7a\x3c\x3a\xbb\x5d\xa3\xc5", + "\x02\x55\x0e\x2b\x27\x8c\x72\x36\x72\xa0\x1e\x4b\x6a\x46\xaf\xc7", 1, + 520, 128 }, + { 96, 192, 128, 96, + "\x77\x57\x75\x4a\xec\xcf\x85\xc9\x1e\x48\xe4\xd4\x97\x0d\x4d\x62\xda" + "\xe9\x4c\xf4\x4f\x9f\xf0\x26", + "\x3d\x92\x81\xc4\xac\xfc\x72\x38\x73\x46\xfd\x92", "", "", "", + "\xda\x9a\xe6\x0a\x12\xaa\x6f\x92\x6c\xf4\x6d\x2a\x33\x5f\xaa\xc7", 1, + 0, 0 }, + { 96, 192, 128, 97, + "\xa0\xca\xc7\xe8\x3c\x7e\xba\x22\x36\x52\x56\xcb\x8f\x23\x70\x39\xb9" + "\x4f\x1a\x26\x92\x63\x64\x8e", + "\x2d\x2a\x5c\x8b\x17\x21\x2d\x4c\x44\xce\xd4\x59", + "\x8b\x38\x8e\x2e\x72\x25\xc0\x87\x30\x00\x42\xf6\x02\x4a\x11\x1f", + "", "", + "\x84\x04\x34\x98\xce\x07\xd8\x74\x23\x30\xc6\x05\xd9\x6d\x96\x6c", 1, + 128, 0 }, + { 96, 192, 128, 98, + "\xfa\x07\xaa\x39\x32\xb9\x01\x69\x62\x69\xc8\xf8\xbf\x56\x66\x2f\x82" + "\xdf\xf2\x95\x7a\x4a\xca\x35", + "\xec\x1b\x6d\x7f\x09\x7a\x2c\xad\x8c\xbc\x9f\xe9", "", + "\xd9\xa6\x89\x79\x3c\x94\x79\x68\xf0\x7d\x4b\xa2\xeb\x1c\x25\xeb", + "\xa3\x22\xf7\x58\x00\xfc\xaf\xf6\x91\x25\x17\x62\xed\x39\xfb\x39", + "\x8a\x73\x48\x92\x21\x33\x83\xac\x54\xdc\x2c\x1f\x48\x84\x19\x38", 1, + 0, 128 }, + { 96, 192, 128, 99, + "\xf9\x97\xa7\x9b\x63\xb1\xcf\x64\x14\x83\x70\x60\x97\xff\x4a\xbe\xeb" + "\xa1\x39\x62\xdb\x05\x62\x06", + "\xba\x95\x38\xad\x15\x75\xa1\xdf\x78\x79\x78\x2b", + "\xaa\x13\xc1\x09\xb2\xf5\x7f\x70\x0a\x89\x93\x1d\xe7\x5b\x70\x80", + "\xf3\x76\x94\x34\x59\xb6\x04\x1e\xd5\x23\x2d\x7b\x9f\xb5\xe9\xe6", + "\x98\x2b\x14\xf6\x64\x82\xd0\xc9\x83\x71\xe0\x80\x78\xef\xa0\x12", + "\x14\xf1\xb2\xb2\x45\x75\xa1\xa3\x32\x06\xac\xdf\x50\x0e\x9d\x46", 1, + 128, 128 }, + { 96, 192, 128, 100, + "\xd7\xc6\xea\x0a\x28\x5a\x5d\x8c\x59\x64\x77\x30\x80\x48\x89\x67\xe7" + "\xe6\x59\x35\x89\x0c\x32\x65", + "\xf5\x11\xd1\x6e\x97\x2e\x13\x8d\x5a\xe8\xdd\xac", "", + "\x50\x34\xfa\x6d\xa3\xa9\xee\x38\x0b\xe7\xe8\xd0\x26\x05\xac\x20" + "\x23", + "\x34\x50\xa9\x92\xa6\xfb\xce\x9c\xe2\x9f\x6c\x4f\x9f\x41\xc3\x6e" + "\xf6", + "\xed\x6c\xa1\xff\x3e\xa1\xa7\xca\x88\x19\x50\x11\x39\xf8\xa0\xb8", 1, + 0, 136 }, + { 96, 192, 128, 101, + "\xb2\x93\x46\xa9\x5c\x3b\x65\x3c\x9b\xed\x02\x3d\xf2\xe0\x3b\x6d\xe4" + "\x5b\x8d\xe1\xa4\x06\x7d\x86", + "\xc5\xb4\x5d\xf3\xa5\xbf\x4e\xf5\x39\xc3\xdb\xd8", + "\x40\x40\x59\x18\x9f\x1e\xaf\x31\xb2\xe5\x05\xfe\xc0\x8c\x70\x53", + "\x22\xe6\x28\x1f\xba\x3e\x5b\x05\x68\x71\xa9\x8d\xd2\xef\x0e\x16" + "\x4d", + "\x6c\x26\x39\x28\x99\xe3\x65\x45\x00\x54\xbf\x0a\xb3\x3f\x98\x3f" + "\x27", + "\xc4\x72\x40\xbb\x8d\x1d\xbb\x68\x7f\xab\x77\x7f\x72\xad\xbd\x2a", 1, + 128, 136 }, + { 96, 192, 128, 102, + "\xa6\x1e\xd3\xb8\x1d\x78\x56\x06\x02\x77\x74\x07\x75\x9b\x1f\x2c\xa3" + "\x4c\xd7\x0c\x6b\x57\x79\x1b", + "\x07\xb8\x24\x97\xb8\x15\xd1\x61\x82\x48\x10\x45", "", + "\xdd\xc1\x86\x2e\x35\x31\x62\x2e\x69\x83\x22\xf0\xb1\xca\x6d\x22\x22" + "\x31\xef\x14\xdb\xee\xa3\x36\x79\xd3\x1c\x48\x77\x7c\x88", + "\xa0\xc1\x52\x6c\x88\xdc\xc2\x65\xf7\x5d\x4e\xf9\xb2\xa0\x00\xfb\x3c" + "\xce\x9e\x5d\x99\x4c\x47\x2c\x46\xbf\xac\x38\x21\xd6\x11", + "\x9a\xa6\xcd\xb8\x5b\x12\x6e\x1f\x21\xd0\x66\xa3\xc0\x5e\x82\xf6", 1, + 0, 248 }, + { 96, 192, 128, 103, + "\x49\xf3\x3b\xc3\xc1\xa4\x0e\x1c\xa3\xb5\x6a\x49\x9e\x4c\x91\x37\xc1" + "\x48\xd1\x25\x61\x55\xfd\xb6", + "\x47\xbc\x33\xd9\x13\x49\x05\x68\x38\xb6\x24\x74", + "\x44\x9c\x8c\xbb\x9a\x67\xad\xb0\x3f\x60\x64\x6e\x5b\x90\x46\x20", + "\xa9\x20\xb4\xfe\xa9\x08\xb1\x77\x1d\x58\xd4\xc1\x08\x83\x8f\x3a\xf7" + "\xb8\x41\x54\x97\x06\x3d\xd9\x69\x1a\x55\x23\x44\xd6\x42", + "\x67\xad\xef\x99\x61\x1f\x34\x1d\x14\xea\x27\xe7\x2d\xa9\xb6\x58\xc9" + "\xa7\x9e\x3b\x32\x8e\x79\x75\x8c\x9d\x34\xdb\x0b\xed\x06", + "\xb2\xa4\x4d\x0f\xc9\x46\x06\xc4\xe2\xb6\xc3\x9b\x24\x2b\x3a\xca", 1, + 128, 248 }, + { 96, 192, 128, 104, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x74\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 105, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x77\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 106, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\xf5\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 107, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x54\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 108, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\x65\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb4\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb7\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\x36\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7b\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\xfa\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\x9e\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x94\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbf\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbc\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\x3e\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x85", 0, + 0, 128 }, + { 96, 192, 128, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x86", 0, + 0, 128 }, + { 96, 192, 128, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\xc4", 0, + 0, 128 }, + { 96, 192, 128, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7a\xbe\x95\x75\xbe\x8e\x47\x04", 0, + 0, 128 }, + { 96, 192, 128, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x74\x55\xdf\xe5\xb5\xe6\xf2\xb6\x7b\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\x65\xb5\xe6\xf2\x36\x7a\xbe\x95\x75\xbe\x8e\x47\x84", 0, + 0, 128 }, + { 96, 192, 128, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x75\x55\xdf\xe5\xb5\xe6\xf2\x36\x7a\xbe\x95\x75\xbe\x8e\x47\x04", 0, + 0, 128 }, + { 96, 192, 128, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x8a\xaa\x20\x1a\x4a\x19\x0d\x49\x85\x41\x6a\x8a\x41\x71\xb8\x7b", 0, + 0, 128 }, + { 96, 192, 128, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, 128 }, + { 96, 192, 128, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, 128 }, + { 96, 192, 128, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\xf5\xd5\x5f\x65\x35\x66\x72\x36\xfa\x3e\x15\xf5\x3e\x0e\xc7\x04", 0, + 0, 128 }, + { 96, 192, 128, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xef\x67\x20\x50\x90\x35\x75\x0e\xca\xfa\xc4\xb3\xe4\xfa\xc3\x56", + "\x74\x54\xde\xe4\xb4\xe7\xf3\xb7\x7b\xbf\x94\x74\xbf\x8f\x46\x85", 0, + 0, 128 }, + { 96, 256, 128, 131, + "\x80\xba\x31\x92\xc8\x03\xce\x96\x5e\xa3\x71\xd5\xff\x07\x3c\xf0\xf4" + "\x3b\x6a\x2a\xb5\x76\xb2\x08\x42\x6e\x11\x40\x9c\x09\xb9\xb0", + "\x4d\xa5\xbf\x8d\xfd\x58\x52\xc1\xea\x12\x37\x9d", "", "", "", + "\x6d\xc4\xef\x59\xa7\x3e\xbc\xab\xb5\xe3\x4c\x0d\x34\xd9\xf2\xd7", 1, + 0, 0 }, + { 96, 256, 128, 132, + "\xcc\x56\xb6\x80\x55\x2e\xb7\x50\x08\xf5\x48\x4b\x4c\xb8\x03\xfa\x50" + "\x63\xeb\xd6\xea\xb9\x1f\x6a\xb6\xae\xf4\x91\x6a\x76\x62\x73", + "\x99\xe2\x3e\xc4\x89\x85\xbc\xcd\xee\xab\x60\xf1", "", "\x2a", + "\x06", + "\x85\xca\x1f\x6c\x46\x28\x3c\xb5\xdd\x59\x60\xbd\x34\xa8\xdc\x36", 1, + 0, 8 }, + { 96, 256, 128, 133, + "\x51\xe4\xbf\x2b\xad\x92\xb7\xaf\xf1\xa4\xbc\x05\x55\x0b\xa8\x1d\xf4" + "\xb9\x6f\xab\xf4\x1c\x12\xc7\xb0\x0e\x60\xe4\x8d\xb7\xe1\x52", + "\x4f\x07\xaf\xed\xfd\xc3\xb6\xc2\x36\x18\x23\xd3", "", + "\xbe\x33\x08\xf7\x2a\x2c\x6a\xed", + "\x7f\xd4\xb5\xd3\x09\x52\x35\xa3", + "\xf6\x4d\x10\xb4\x1e\x3d\x69\x28\x74\x19\x47\xc5\x0c\xa0\x39\x1f", 1, + 0, 64 }, + { 96, 256, 128, 134, + "\x59\xd4\xea\xfb\x4d\xe0\xcf\xc7\xd3\xdb\x99\xa8\xf5\x4b\x15\xd7\xb3" + "\x9f\x0a\xcc\x8d\xa6\x97\x63\xb0\x19\xc1\x69\x9f\x87\x67\x4a", + "\x2f\xcb\x1b\x38\xa9\x9e\x71\xb8\x47\x40\xad\x9b", "", + "\x54\x9b\x36\x5a\xf9\x13\xf3\xb0\x81\x13\x1c\xcb\x6b\x82\x55\x88", + "\x48\xdd\x95\x89\xa4\x7e\x63\x8b\xbb\xc2\xaa\x3e\x23\x2f\xa5\x29", + "\xdf\x69\xfb\xe1\xf0\x99\xf0\x13\x4f\xe2\x86\x91\x56\xab\x07\xdb", 1, + 0, 128 }, + { 96, 256, 128, 135, + "\x3b\x24\x58\xd8\x17\x6e\x16\x21\xc0\xcc\x24\xc0\xc0\xe2\x4c\x1e\x80" + "\xd7\x2f\x7e\xe9\x14\x9a\x4b\x16\x61\x76\x62\x96\x16\xd0\x11", + "\x45\xaa\xa3\xe5\xd1\x6d\x2d\x42\xdc\x03\x44\x5d", "", + "\x3f\xf1\x51\x4b\x1c\x50\x39\x15\x91\x8f\x0c\x0c\x31\x09\x4a\x6e" + "\x1f", + "\x20\x22\x97\xd3\x6c\xa6\x2c\x5a\x1d\x44\x37\xfa\xfc\x7b\x50\xe7" + "\x64", + "\x66\x5f\x05\xa9\x6b\xf8\xde\x45\x36\x1d\xbf\x33\xc9\x8b\x09\x05", 1, + 0, 136 }, + { 96, 256, 128, 136, + "\x02\x12\xa8\xde\x50\x07\xed\x87\xb3\x3f\x1a\x70\x90\xb6\x11\x4f\x9e" + "\x08\xce\xfd\x96\x07\xf2\xc2\x76\xbd\xcf\xdb\xc5\xce\x9c\xd7", + "\xe6\xb1\xad\xf2\xfd\x58\xa8\x76\x2c\x65\xf3\x1b", "", + "\x10\xf1\xec\xf9\xc6\x05\x84\x66\x5d\x9a\xe5\xef\xe2\x79\xe7\xf7\x37" + "\x7e\xea\x69\x16\xd2\xb1\x11", + "\x3c\x0a\x0b\x34\x94\xd7\x5c\xcb\xcf\xfa\xa9\x17\xd6\x15\x92\x94\xfd" + "\x93\xe8\xa2\xee\x66\x44\x7a", + "\xb0\xb0\x7b\xa0\x51\x67\xe8\x8c\x24\xe5\x48\x24\xa0\x70\x61\xb9", 1, + 0, 192 }, + { 96, 256, 128, 137, + "\x2e\xb5\x1c\x46\x9a\xa8\xeb\x9e\x6c\x54\xa8\x34\x9b\xae\x50\xa2\x0f" + "\x0e\x38\x27\x11\xbb\xa1\x15\x2c\x42\x4f\x03\xb6\x67\x1d\x71", + "\x04\xa9\xbe\x03\x50\x8a\x5f\x31\x37\x1a\x6f\xd2", "", + "\xb0\x53\x99\x92\x86\xa2\x82\x4f\x42\xcc\x8c\x20\x3a\xb2\x4e\x2c\x97" + "\xa6\x85\xad\xcc\x2a\xd3\x26\x62\x55\x8e\x55\xa5\xc7\x29", + "\x0e\x29\xb2\x33\x5b\x90\x07\x58\xfa\xd2\x78\xae\xfb\x9b\x3a\xfa\x07" + "\xfd\x42\xb5\xd2\xf7\xd3\x87\xe3\xea\x0e\x0c\xa4\x16\xe0", + "\x6e\xd7\xe1\xe1\x27\x8c\x40\xce\x2e\x78\x1d\x10\x05\xde\x88\xdd", 1, + 0, 248 }, + { 96, 256, 128, 138, + "\x51\x55\xde\xe9\xaa\xde\x1c\xc6\x1e\xe7\xe3\xf9\x26\x60\xf7\x59\x0f" + "\x5e\x5b\xa8\x2f\x1b\x59\xb8\x50\xe3\xfa\x45\x3d\x2f\xa6\xb3", + "\xc2\x6c\x4b\x3b\xfd\xb9\x7e\xe6\xb0\xf6\x3c\xa1", "", + "\x27\x34\xe0\x8e\xff\x8f\x5c\x4f\x84\xfa\x0c\x20\x7f\x49\xc7\xfd\x78" + "\xaf\x1a\xd5\x12\x3f\xf8\x1f\x83\xf5\x00\xed\xf4\xed\xa0\x9e\xdf", + "\xae\xd2\x4e\x00\x82\xe1\x3e\xe1\x5b\xa0\x50\x6a\x83\x6c\x78\xb9\x7e" + "\xf2\xfa\xa3\xc6\xe8\xeb\x37\x8d\xc6\x4d\xd4\xad\xc9\x98\xad\x68", + "\x5b\xa1\xb4\x8a\x70\x16\x84\xd9\x40\xbe\x24\x4c\x3d\xe9\x38\xd2", 1, + 0, 264 }, + { 96, 256, 128, 139, + "\x95\xe8\x7e\xda\x64\xd0\xdc\x2d\x4e\x85\x10\x30\xc3\xe1\xb2\x7c\xca" + "\x22\x65\xb3\x46\x4c\x2c\x57\x2b\xd8\xfc\x8c\xfb\x28\x2d\x1b", + "\xce\x03\xbb\xb5\x67\x78\xf2\x5d\x45\x28\x35\x0b", "", + "\x2e\x5a\xcc\x19\xac\xb9\x94\x0b\xb7\x4d\x41\x4b\x45\xe7\x13\x86\xa4" + "\x09\xb6\x41\x49\x0b\x13\x94\x93\xd7\xd6\x32\xcb\xf1\x67\x4f\xdf\x25" + "\x11\xc3\xfa\xd6\xc2\x73\x59\xe6\x13\x7b\x4c\xd5\x2e\xfc\x4b\xf8\x71" + "\xe6\x62\x34\x51\x51\x7d\x6a\x3c\x68\x24\x0f\x2a\x79\x91\x6a", + "\x2f\xa0\xbd\x2c\xdf\xe9\xe8\xd7\x91\x9b\x97\xba\x05\xd5\x9e\x33\x89" + "\xfd\xcb\xa7\x28\xec\x12\x4d\x0f\x28\x49\x48\x4f\x63\x5a\x00\x0e\x73" + "\x4c\x8c\x80\x94\x1b\x3d\xa3\x2d\x23\xee\xa5\x1e\xdc\xe8\xd6\x61\x7b" + "\x16\xeb\xb4\x3a\xc8\x11\x3a\x09\x2e\x9d\xda\xa3\x72\x1a\xe9", + "\xa9\x25\x2a\x50\xdd\xbb\xf1\x8c\xd3\xe4\x3a\xdc\x0b\xa1\xa4\x81", 1, + 0, 528 }, + { 96, 256, 128, 140, + "\x7a\x4c\xd7\x59\x17\x2e\x02\xeb\x20\x4d\xb2\xc3\xf5\xc7\x46\x22\x7d" + "\xf5\x84\xfc\x13\x45\x19\x63\x91\xdb\xb9\x57\x7a\x25\x07\x42", + "\xa9\x2e\xf0\xac\x99\x1d\xd5\x16\xa3\xc6\xf6\x89", + "\xbd\x50\x67\x64\xf2\xd2\xc4\x10", "", "", + "\x56\x9d\x82\x69\x18\x92\xe1\x03\xe6\x27\x40\x7c\x95\xf0\x8a\x0e", 1, + 64, 0 }, + { 96, 256, 128, 141, + "\xb9\x07\xa4\x50\x75\x51\x3f\xe8\xa8\x01\x9e\xde\xe3\xf2\x59\x14\x87" + "\xb2\xa0\x30\xb0\x3c\x6e\x1d\x77\x1c\x86\x25\x71\xd2\xea\x1e", + "\x11\x8a\x69\x64\xc2\xd3\xe3\x80\x07\x1f\x52\x66", + "\x03\x45\x85\x62\x1a\xf8\xd7\xff", + "\x55\xa4\x65\x64\x4f\x5b\x65\x09\x28\xcb\xee\x7c\x06\x32\x14\xd6", + "\xab\x01\xf9\x2d\xb4\xf2\x10\xbd\xb5\xed\xaf\x0a\x1b\xd1\x9e\xba", + "\x62\x16\x30\xc5\x05\xd2\x4e\x3b\x29\x29\x49\x77\xd8\xff\xa4\xb4", 1, + 64, 128 }, + { 96, 256, 128, 142, + "\xf6\x0c\x6a\x1b\x62\x57\x25\xf7\x6c\x70\x37\xb4\x8f\xe3\x57\x7f\xa7" + "\xf7\xb8\x7b\x1b\xd5\xa9\x82\x17\x6d\x18\x23\x06\xff\xb8\x70", + "\xf0\x38\x4f\xb8\x76\x12\x14\x10\x63\x3d\x99\x3d", + "\x9a\xaf\x29\x9e\xee\xa7\x8f\x79", + "\x63\x85\x8c\xa3\xe2\xce\x69\x88\x7b\x57\x8a\x3c\x16\x7b\x42\x1c" + "\x9c", + "\xf0\x5e\x29\x0b\xbb\xc6\x19\x27\xfa\x65\x76\x06\x48\xdc\xca\x88" + "\xb0", + "\xb7\x21\xbe\x96\xa6\xb9\x5c\x09\x31\xfb\x24\x3d\xd1\x28\x7c\x70", 1, + 64, 136 }, + { 96, 256, 128, 143, + "\xaf\x1a\xc1\x9b\x3b\x84\xea\xaf\x26\x03\x37\x9c\xdd\x1d\xc1\xae\xe4" + "\xa4\x84\xfd\xc2\xc1\x90\x69\x1a\xfc\xc5\xb7\x62\xf9\xb5\x26", + "\xda\xf9\x8f\x1b\xd4\xc0\x71\xc6\xb1\x00\xf9\xc4", "\x14", "", "", + "\xe7\x72\xcc\x77\x14\xef\xce\xfb\xd1\x15\x08\xde\x48\x9f\x7c\x61", 1, + 8, 0 }, + { 96, 256, 128, 144, + "\x95\xfb\x78\x61\xf8\xc7\x5e\x14\x24\xd8\x40\x1c\xa3\xb3\x45\x2c\x56" + "\x3b\x99\xb0\x02\xc2\x4a\xfa\xd4\xcf\x5e\x82\x8f\x23\x53\xed", + "\xc1\xac\x60\x8d\x1f\xda\x28\xeb\x40\x34\x07\x9c", "\xfb\xc8", "", + "", + "\x54\xf2\xd2\xa5\x4c\xbe\x6e\x95\x9d\x51\xba\x4f\xfa\x8e\x0e\x9e", 1, + 16, 0 }, + { 96, 256, 128, 145, + "\xd3\x0e\x68\x2b\x58\x4d\x41\x60\x88\xa8\x1b\xd6\xf8\x55\x51\xec\x1f" + "\x2e\x11\x89\x38\x8a\x7a\x9c\x05\x21\xe2\x5b\x72\x5f\x7d\xbe", + "\x92\x72\x14\xf6\x43\x36\x70\x1a\x3b\x4d\xb6\x03", + "\x5c\xdb\x70\x70\x08\xb0\x65", "", "", + "\x2c\x6b\x6b\xef\x6b\xa0\x82\xba\xa7\x24\x15\xaa\xaa\x88\x3c\x75", 1, + 56, 0 }, + { 96, 256, 128, 146, + "\xac\x5a\x03\x8c\xae\xa8\x47\x5e\x71\xca\x41\x03\x93\x88\xb8\x61\xf0" + "\x08\xb6\x0c\x62\xff\x2e\x91\x4f\xf0\x83\x39\x86\x2f\xb8\x50", + "\xac\x38\x11\x7b\x39\x6a\xa0\x68\x43\x31\xfe\x74", + "\x02\xd1\xd0\x0a\x8f\x1f\x05\x2c\x08\x35\x75\xeb\x0c\x2a\x09", "", + "", + "\xd2\xe3\xaa\xdf\x9e\xd6\x0d\x91\xda\x5a\x1d\xc1\x21\xdb\xfd\x24", 1, + 120, 0 }, + { 96, 256, 128, 147, + "\xef\xa5\xc5\x05\x3b\x45\x20\x02\xfc\x34\x53\x12\xa3\xbe\x65\x0e\x9f" + "\xf4\x24\x4a\x1e\x44\x55\x7d\x8a\x41\x55\x70\xd2\xdb\xe9\x02", + "\x1a\xbf\xb6\xe3\x18\x99\x5e\xa0\x22\xb1\xd3\x69", + "\x01\xa1\x0b\xc7\x1a\x88\xc9\x4a\x3f\xf9\x24\xfe\x74\xcc\xa2\x29", + "", "", + "\x8e\x97\x80\xcc\xa8\x6d\x3c\xa4\x02\xe1\xdf\xaa\x03\xa7\x2d\x77", 1, + 128, 0 }, + { 96, 256, 128, 148, + "\xbd\xf2\x99\x4d\xd0\xfe\xb3\xc8\x70\xb3\x9f\x52\xbc\xc7\x6d\x28\xee" + "\xd7\x12\xd9\x11\xd9\x56\xc0\x42\xe4\x70\x1c\x4b\x20\xe5\xd5", + "\x0a\x82\x3c\x80\x1d\x05\x7e\x84\x3a\xf7\xca\x55", + "\x03\xf3\xd0\xfc\x23\xdd\x8f\x3e\x20\x88\x4d\x3c\x6f\xff\x26\x08" + "\xb1", + "", "", + "\x36\x88\x6d\x89\xcd\xcc\xe1\x57\x49\x7f\xd0\x9d\xcd\x67\xf3\x29", 1, + 136, 0 }, + { 96, 256, 128, 149, + "\x5a\xe3\xc3\xff\x78\xce\xdc\x19\x2c\xa7\x04\x4b\x3f\x41\xa2\x42\x43" + "\x2b\x0e\xa7\xd3\x48\x8c\x68\x0c\xd4\x22\x51\x5b\x09\x3b\x5b", + "\x7c\x62\x04\x28\x69\xa2\xe5\x97\x01\x48\x16\x14", + "\x7b\xa3\xf3\xbb\xba\x5d\xff\x63\x74\x88\x06\x4b\x6a\x52\x49\xd2\xad" + "\x46\x17\x17\x27\x87\x19\xfe\x71\xfe\xbf\x71\x00\x82\x8e", + "", "", + "\x3f\x8b\x8f\x90\x4c\xeb\x30\x45\x05\xf9\x42\xf3\x6c\xca\xc5\xf8", 1, + 248, 0 }, + { 96, 256, 128, 150, + "\xa8\x10\x8c\x33\xda\x05\x9f\xed\xf6\x02\x2a\x6e\xc4\x95\x27\xbe\x0a" + "\xb6\x40\x02\x47\x2c\xb2\xf7\x03\xb9\x7e\x01\x79\xa3\x43\x12", + "\xbc\xe6\x36\xca\x40\x1a\x88\xfa\xc2\x36\x1e\xd1", + "\x35\xea\xc1\x65\x26\xc2\xf1\x0a\x12\x71\xb3\xa8\xf8\x10\xbb\xf2\x39" + "\xee\xb9\x61\xe1\xa7\xe9\x20\x5b\xea\xe6\x00\x45\xf0\x08\xe6", + "", "", + "\x43\x0c\xcb\xb1\xf7\x5d\xe0\x6b\x71\x63\x7d\x1a\x76\xb3\x5c\xf5", 1, + 256, 0 }, + { 96, 256, 128, 151, + "\x4c\x8e\x95\xa7\x36\x1b\xb3\x78\x49\xb1\x6f\x0e\x5f\x9a\x6e\xab\x87" + "\x39\x13\x39\xd9\x51\xd7\x40\x4f\xf5\xcd\x82\x9c\x08\x7a\x6b", + "\xb4\xa8\xde\x71\xfb\x0f\xe1\x72\xff\x6d\x89\xb6", + "\x94\xd0\x6e\xdc\xfa\x5a\xe3\xd2\x7b\x99\x53\xfe\x5d\xf0\xcc\xa6\x19" + "\x4f\xf6\xdf\xa9\x4d\x82\xb7\x35\x9c\xb3\x87\xdd\x5e\x80\xc6\x18\x6f" + "\xbf\x17\x48\xc1\x92\xbb\x0c\x68\x8e\xbb\x47\x1b\x90\x20\xfe\x8f\xbf" + "\xae\xe3\xde\xe8\x78\x7a\xce\x3c\x20\xdd\x50\xbe\x08\x3e", + "", "", + "\xda\x7f\xe2\xa4\x46\x9e\x39\x1c\x20\x5b\x6d\xe8\xe1\x82\xa9\x14", 1, + 520, 0 }, + { 96, 256, 128, 152, + "\x7d\x00\xb4\x80\x95\xad\xfa\x32\x72\x05\x06\x07\xb2\x64\x18\x50\x02" + "\xba\x99\x95\x7c\x49\x8b\xe0\x22\x77\x0f\x2c\xe2\xf3\x14\x3c", + "\x87\x34\x5f\x10\x55\xfd\x9e\x21\x02\xd5\x06\x56", "\x02", + "\xe5\xcc\xaa\x44\x1b\xc8\x14\x68\x8f\x8f\x6e\x8f\x28\xb5\x00\xb2", + "\x6f\xe8\x78\x84\xb9\x4e\xac\x04\x1c\xb4\xc7\x8c\x23\xf2\x83\xa3", + "\x7e\xeb\x30\x08\x25\x49\x63\x72\x24\xe9\x26\xe5\x27\xb6\x9a\xea", 1, + 8, 128 }, + { 96, 256, 128, 153, + "\x64\x32\x71\x7f\x1d\xb8\x5e\x41\xac\x78\x36\xbc\xe2\x51\x85\xa0\x80" + "\xd5\x76\x2b\x9e\x2b\x18\x44\x4b\x6e\xc7\x2c\x3b\xd8\xe4\xdc", + "\x87\xa3\x16\x3e\xc0\x59\x8a\xd9\x5b\x3a\xa7\x13", "\xb6\x48", + "\x02\xcd\xe1\x68\xfb\xa3\xf5\x44\xbb\xd0\x33\x2f\x7a\xde\xad\xa8", + "\xe0\x17\xbf\x1d\xdd\x27\x98\x86\xf7\x54\x53\x65\xf1\x46\x5c\xc7", + "\x6d\xca\xb7\x9d\x1d\xda\xb4\xf3\xad\x8b\x4a\xf7\x23\x18\xeb\x1b", 1, + 16, 128 }, + { 96, 256, 128, 154, + "\xda\xcd\x51\xa8\xa8\xe4\xd5\x90\x5b\x4c\xbb\x94\x7e\xf4\x01\x3e\xb2" + "\x96\x88\x93\x53\xf3\xc9\xee\x35\xf5\x57\x7b\x26\x73\x7a\x51", + "\x3f\xa3\x78\xa1\xbe\xfd\xdd\xd6\x1a\xe6\x8c\xf4", + "\xbb\x5a\x38\x12\xf0\xae\xfd", + "\xe1\x48\x31\x38\x83\xa7\x7d\xa1\x21\x12\x4d\x06\xb1\xc7\x7d\xca", + "\xdd\xf8\xad\xe1\x3d\x69\xf3\x64\x9e\x36\xc6\x69\xd2\x5b\x4d\x81", + "\x01\x86\x15\x57\xd4\x3a\xb0\x14\xc4\xed\xe1\x9f\xcd\x75\x48\xea", 1, + 56, 128 }, + { 96, 256, 128, 155, + "\x8e\x34\xcf\x73\xd2\x45\xa1\x08\x2a\x92\x0b\x86\x36\x4e\xb8\x96\xc4" + "\x94\x64\x67\xbc\xb3\xd5\x89\x29\xfc\xb3\x66\x90\xe6\x39\x4f", + "\x6f\x57\x3a\xa8\x6b\xaa\x49\x2b\xa4\x65\x96\xdf", + "\xbd\x4c\xd0\x2f\xc7\x50\x2b\xbd\xbd\xf6\xc9\xa3\xcb\xe8\xf0", + "\x16\xdd\xd2\x3f\xf5\x3f\x3d\x23\xc0\x63\x34\x48\x70\x40\xeb\x47", + "\xc6\x0d\x2a\x92\xe6\x0a\x1a\x73\xa9\xce\x4b\x22\x69\xe1\x3a\x45", + "\x71\xfa\x66\x5b\x61\x1f\xed\x6e\xf5\xe6\x7e\xe8\x27\xac\x20\x6d", 1, + 120, 128 }, + { 96, 256, 128, 156, + "\xcb\x55\x75\xf5\xc7\xc4\x5c\x91\xcf\x32\x0b\x13\x9f\xb5\x94\x23\x75" + "\x60\xd0\xa3\xe6\xf8\x65\xa6\x7d\x4f\x63\x3f\x2c\x08\xf0\x16", + "\x1a\x65\x18\xf0\x2e\xde\x1d\xa6\x80\x92\x66\xd9", + "\x89\xcc\xe9\xfb\x47\x44\x1d\x07\xe0\x24\x5a\x66\xfe\x8b\x77\x8b", + "\x62\x3b\x78\x50\xc3\x21\xe2\xcf\x0c\x6f\xbc\xc8\xdf\xd1\xaf\xf2", + "\x72\x2a\xc6\xa2\x26\xf4\x9c\x90\xab\x22\x52\x7a\x51\x38\xb4\x01", + "\x2e\x4e\xe9\x97\xc7\x52\x78\x3e\x74\x3b\x36\x6b\xb6\xb3\x50\xa5", 1, + 128, 128 }, + { 96, 256, 128, 157, + "\xa5\x56\x9e\x72\x9a\x69\xb2\x4b\xa6\xe0\xff\x15\xc4\x62\x78\x97\x43" + "\x68\x24\xc9\x41\xe9\xd0\x0b\x2e\x93\xfd\xdc\x4b\xa7\x76\x57", + "\x56\x4d\xee\x49\xab\x00\xd2\x40\xfc\x10\x68\xc3", + "\xd1\x9f\x2d\x98\x90\x95\xf7\xab\x03\xa5\xfd\xe8\x44\x16\xe0\x0c" + "\x0e", + "\x87\xb3\xa4\xd7\xb2\x6d\x8d\x32\x03\xa0\xde\x1d\x64\xef\x82\xe3", + "\x2f\xad\xf1\x6a\xd1\x6a\x21\xc3\x17\xaf\x9d\x0b\xc1\x87\xf1\x36", + "\x88\xca\xf7\x0f\x6b\x5d\x8f\x3e\xf6\xa3\x9d\x1a\xe4\x13\x77\x2b", 1, + 136, 128 }, + { 96, 256, 128, 158, + "\x07\x74\x33\x02\x2a\xb3\x4d\x38\x0f\xc1\x92\xfc\x24\xc2\xed\xc6\x30" + "\x1f\xec\x6f\x24\x44\x2f\x57\x2a\x10\x87\xff\x2e\x05\xb3\x9a", + "\x28\xad\xcb\xc7\x43\x64\xf2\x6d\xd4\xb3\x10\x8b", + "\xe0\x10\x0e\xb1\x16\xcd\xc5\xe2\x2a\x3b\x9f\x9b\x41\x26\xc1\x49\x59" + "\x5e\x75\x10\x7f\x6e\x23\x7c\x69\xe8\x29\x60\x05\x22\x70", + "\x03\xc8\x74\xee\xaa\xa6\xfa\x9f\x0d\xa6\x2c\x75\x8f\xb0\xad\x04", + "\x13\x6f\x04\x9e\xa8\x51\xc6\xdf\xd2\xe8\x73\x12\xd8\x2f\x08\x82", + "\xfd\xf9\xbc\x04\x12\xcf\xca\x2b\x03\x5c\x5a\xe6\x8a\xc6\xda\x79", 1, + 248, 128 }, + { 96, 256, 128, 159, + "\x39\x37\x98\x6a\xf8\x6d\xaf\xc1\xba\x0c\x46\x72\xd8\xab\xc4\x6c\x20" + "\x70\x62\x68\x2d\x9c\x26\x4a\xb0\x6d\x6c\x58\x07\x20\x51\x30", + "\x8d\xf4\xb1\x5a\x88\x8c\x33\x28\x6a\x7b\x76\x51", + "\xba\x44\x6f\x6f\x9a\x0c\xed\x22\x45\x0f\xeb\x10\x73\x7d\x90\x07\xfd" + "\x69\xab\xc1\x9b\x1d\x4d\x90\x49\xa5\x55\x1e\x86\xec\x2b\x37", + "\xdc\x9e\x9e\xaf\x11\xe3\x14\x18\x2d\xf6\xa4\xeb\xa1\x7a\xec\x9c", + "\x2c\xed\x0d\x04\x32\x3d\xb2\x0a\xa8\xb8\xb9\x00\x0d\x2c\x33\xa5", + "\x48\xb8\x1c\xb7\x0d\x55\x5b\x7c\x38\x49\x2b\xa2\xc0\xa3\xb1\xb8", 1, + 256, 128 }, + { 96, 256, 128, 160, + "\x3f\x49\x4f\xd8\xf1\xb5\x06\x92\xde\x9c\xe3\x3f\x6d\x45\x1e\xf0\xc5" + "\x8c\x6b\x2c\x6f\xfb\xa3\xb4\x17\x10\xff\x63\xe6\x7e\xda\x68", + "\x9e\x62\x8e\x58\x03\x51\x92\x90\xe6\xb2\xc6\x8a", + "\xd0\x2e\xc8\x92\xd3\xb0\x3e\xac\xb2\xf1\xd8\xa6\x0a\x02\x84\x85\x77" + "\x6a\xf0\xfd\x66\x65\xcb\x6f\x74\xfb\xa5\xef\x89\x7e\x2c\xf5\x4b\x32" + "\xe8\x0b\xdd\xee\xc9\x38\xab\x53\x0b\x45\xed\x97\x12\x34\x80\x4f\xa8" + "\x4a\x19\x1d\xc1\x1a\xe6\x60\xf5\xa8\x66\x2a\x46\x51\xe9", + "\xf2\xc4\x1a\x26\xa4\x38\xe9\xff\x73\x3b\x78\x28\xf2\x4a\x24\x49", + "\x1d\x9f\x42\x56\xe0\x84\x66\x56\x0a\xb2\x71\xde\x36\x21\xa0\x3f", + "\x81\xdb\xe3\x8a\xc7\x51\x12\x7e\xfc\x11\xca\xed\xec\xa9\xc9\x3b", 1, + 520, 128 }, + { 96, 256, 128, 161, + "\xc1\x80\xc1\x2e\x6a\xf8\xcc\x07\x19\x04\x9e\xfe\x99\xd4\xdf\x2d\xe2" + "\x41\xef\xec\x5a\x01\x31\x45\xb2\xb7\x5e\x15\xdb\xa1\x6f\xe5", + "\x12\x6f\xbb\xd6\x99\xbe\xb3\x74\xf6\x7b\xaa\x7b", "", "", "", + "\x15\xdc\x4b\x81\x22\xb0\xe5\xad\x13\xdb\xea\x70\x96\xe8\x18\x68", 1, + 0, 0 }, + { 96, 256, 128, 162, + "\x80\x03\xe6\x54\x7a\x96\x4c\xd5\xc2\x84\x41\xc9\xb1\xa3\xc0\x83\xcc" + "\xb9\x6c\x7e\x13\x83\x85\xa5\x29\x4a\x1c\x30\x6b\x05\xf9\xf4", + "\xf2\x51\xc4\xa2\x62\x5c\x61\x2f\x86\xec\x16\x50", + "\x8c\x6e\xea\x97\x56\x15\x5d\x6e\xa9\x59\x5c\xc4\x9e\x8a\x74\xe1", + "", "", + "\xe6\xe9\x1b\xfe\x55\x18\xb7\x6e\x0a\x2d\xca\x79\xc6\x4d\x23\x2b", 1, + 128, 0 }, + { 96, 256, 128, 163, + "\xa4\x20\xe4\xf9\xa6\x16\xbf\x63\x1a\x94\x9b\x32\x4a\xe9\x01\x6a\x77" + "\xd9\x43\xa0\xfd\x1d\xa2\xb3\xe9\x74\x2e\x2e\xe5\x0f\xa0\x97", + "\x77\xf6\x71\x99\xab\x7b\x96\xf6\xf4\x83\x2c\x01", "", + "\xfc\xaf\x04\xe3\x33\xd8\x76\xae\x34\xfc\xab\x93\xaf\xd7\xba\xa7", + "\x13\x55\xcd\x0e\x1b\x42\xb1\x78\x81\x83\x00\x9a\x11\xca\xd5\x38", + "\x59\xd3\x5f\x9a\x12\xbe\xd5\xb5\xca\x87\x0b\x28\xba\xdb\x27\xf4", 1, + 0, 128 }, + { 96, 256, 128, 164, + "\x69\x05\xf2\xc4\xd6\x3a\x7b\xa7\xe9\x36\x6b\xa6\xc3\xbb\xf6\xe3\x55" + "\x2d\x56\x9e\xb5\x53\x21\xbe\xba\x3f\x8d\x98\xa1\x82\xb9\x7d", + "\x49\xe0\x8c\x06\xdb\xae\x3f\xf5\xcb\x7d\x9e\xcd", + "\xab\xa0\xc4\x4e\x9f\x93\x99\x74\x8f\x4a\x7e\x91\x9c\xeb\x8a\x62", + "\xb5\x62\x54\xe2\x21\xcf\x55\x8e\xa7\xd9\x19\x4e\xec\xd6\x3e\xd5", + "\x68\xe6\x6f\x43\x3c\x3c\xf9\xe4\x24\x72\x25\x68\x2f\x4c\x73\xa4", + "\x07\xf5\x3a\x76\xb8\x44\x60\xec\xf4\xaa\x18\x13\xe5\x17\x0b\x2f", 1, + 128, 128 }, + { 96, 256, 128, 165, + "\x77\x0b\x39\x74\x1c\x56\xd4\x67\x00\xa9\xf3\xcc\x23\x1d\x1a\xcb\x17" + "\x44\x98\x70\x2c\x0f\x2d\x0e\xec\x20\xdb\x57\x49\x4b\xb4\x9e", + "\x93\x84\xa1\xec\xbe\xe1\xde\x2b\x5a\xe7\x06\x84", "", + "\xea\x0b\x32\x28\xb8\x3c\xa6\x61\x50\xa7\x9a\xba\x15\x9e\x50\x6b" + "\x75", + "\x40\x52\xc0\xfc\x81\x63\x46\xc8\x69\x21\xdb\x57\x64\x6f\xeb\x29" + "\x43", + "\xa1\x9a\x58\x53\x10\xce\xb2\xdf\x76\x7e\xc1\x72\x4d\x52\xe3\x9d", 1, + 0, 136 }, + { 96, 256, 128, 166, + "\x41\xd6\xc6\xba\xbb\x72\x41\x53\x9a\xc1\x66\x47\x48\xdd\x1c\xf2\x9c" + "\xe7\x94\x0e\x29\x15\x3c\xd8\x18\x0e\xd1\x97\xda\xb5\xc7\x3f", + "\x00\x05\xde\xa1\x2e\xb6\x98\x50\x64\x7c\x7a\xd9", + "\x67\x5f\x31\xd7\x6b\xf4\x83\xd2\xd2\xab\x57\xcb\xe9\x3c\xf2\xf1", + "\xe5\xc4\x44\xa0\x45\x8d\xca\xf7\x89\xc8\xf3\x56\x66\xf1\x5b\xcc" + "\xb4", + "\x99\xa4\x9b\xde\x03\x72\x8c\x47\x9d\xaf\x4c\x67\xd3\x07\xf1\x28" + "\x5e", + "\xf0\xf3\x85\x9d\x12\xcd\x41\x48\xb9\xe8\x4d\x22\xba\x7a\xd9\x66", 1, + 128, 136 }, + { 96, 256, 128, 167, + "\xbc\x53\x6d\x8d\x9b\x43\x40\xcd\x14\x14\x7f\xca\x7c\xa3\x65\x73\xba" + "\x45\xbf\xf5\xb0\xa7\xcb\x80\x91\xa5\x50\xcf\x2b\x4b\xb9\x45", + "\x17\x86\x94\xeb\x62\xd7\x77\x3b\x0f\x0f\xbe\x8b", "", + "\xfb\x82\x20\x33\xc4\x43\x76\x80\x30\x1f\x72\xff\xc7\x4b\xa3\xbd\x46" + "\x7a\x9a\xb7\x46\x5a\xe4\x5e\xc8\x7a\xb4\xbe\xfd\x7c\xc9", + "\xb6\xce\x6d\x86\xab\x7e\xee\x1f\xdd\x7d\xcb\x95\x5b\x83\x24\x36\x08" + "\x39\xeb\x2b\xdd\xd8\x21\xec\xc1\xef\xe1\x29\x18\x26\x89", + "\x72\xfd\x99\x7b\xf0\x58\x9a\xa2\xd6\x07\x21\x65\x4f\x55\x2e\x3a", 1, + 0, 248 }, + { 96, 256, 128, 168, + "\xf5\x9a\xbc\xbf\x42\x18\xbd\x5c\x76\x01\xf0\x80\xb5\xfb\xd3\xae\x08" + "\x87\x33\x70\x2c\x8f\xbe\xf0\xc5\x29\x6a\x40\x6f\x56\x38\x27", + "\xa5\xeb\x0e\x6f\xe6\x69\xe6\x82\x39\xac\xe5\x50", + "\xd6\x03\x49\x1f\xbf\x09\x50\xd3\x64\x89\xab\xb4\x0d\xd8\xd4\x2b", + "\x97\xdc\xba\xcd\x70\xa6\x78\xcf\xae\xd1\x3c\x94\x2c\xf9\x20\xe8\x51" + "\xec\x3e\x6f\xb1\xf6\xc6\xeb\x95\xf1\xc9\x65\xfb\x1a\x13", + "\xc0\xb2\x7e\xdd\x65\x33\xcf\xba\x81\x32\x3a\xc7\x8d\x0a\xeb\x03\x71" + "\xb1\xd7\xb8\x99\x38\xe0\x4c\x31\x91\x48\x96\x15\x13\xfb", + "\x56\xaa\xbb\xde\x47\xab\x2c\x53\xdb\x48\x70\x30\x33\xf8\xca\x68", 1, + 128, 248 }, + { 96, 256, 128, 169, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8a\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 170, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x89\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 171, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x0b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 172, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc3\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 173, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\xf7\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 174, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0c\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 175, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0f\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 176, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x8b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 177, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x93\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 178, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x12\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 179, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\xa0\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 180, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2e\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 181, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb8\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 182, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xbb\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 183, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\x39\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 184, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x11", 0, + 0, 128 }, + { 96, 256, 128, 185, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x12", 0, + 0, 128 }, + { 96, 256, 128, 186, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x50", 0, + 0, 128 }, + { 96, 256, 128, 187, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x0b\x92\x80\x2f\xdc\xb9\xd7\xd4\x90", 0, + 0, 128 }, + { 96, 256, 128, 188, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8a\xc2\xdd\x77\x0d\xe5\x8f\x0b\x93\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 189, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\xf7\x0d\xe5\x8f\x8b\x92\x80\x2f\xdc\xb9\xd7\xd4\x10", 0, + 0, 128 }, + { 96, 256, 128, 190, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8b\xc2\xdd\x77\x0d\xe5\x8f\x8b\x92\x80\x2f\xdc\xb9\xd7\xd4\x90", 0, + 0, 128 }, + { 96, 256, 128, 191, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x74\x3d\x22\x88\xf2\x1a\x70\xf4\x6d\x7f\xd0\x23\x46\x28\x2b\xef", 0, + 0, 128 }, + { 96, 256, 128, 192, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, 128 }, + { 96, 256, 128, 193, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, 128 }, + { 96, 256, 128, 194, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x0b\x42\x5d\xf7\x8d\x65\x0f\x8b\x12\x00\xaf\x5c\x39\x57\x54\x90", 0, + 0, 128 }, + { 96, 256, 128, 195, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x9d\x2a\x30\xab\xc5\xe1\x78\xf7\xc6\x31\x7e\xc9\x49\x8d\xac\x39", + "\x8a\xc3\xdc\x76\x0c\xe4\x8e\x0a\x93\x81\x2e\xdd\xb8\xd6\xd5\x11", 0, + 0, 128 }, + { 56, 128, 128, 196, + "\x19\xd5\x32\xdb\xcc\x93\x4a\x00\x9c\xe1\xb9\x4a\x0b\x31\xdd\xc7", + "\xf0\xbf\x6a\x9b\xcf\x6c\x0d", "", "", "", + "\x7b\x12\x26\x3a\xaf\x1e\x4c\xb6\xe4\xb4\x06\xe0\x26\x69\x82\x09", 1, + 0, 0 }, + { 56, 128, 128, 197, + "\x8c\xdb\x7f\x67\x89\x27\x1a\x6e\xf3\xe0\x64\x61\xe9\x0e\xaa\x0e", + "\x7c\x0d\x6b\xce\xba\x28\x2e", "\xfb\xc4\xf4\xa5\x2e\xcb\x4c\xaa", + "", "", + "\x50\xb1\x2c\x1f\xa4\xdc\x4b\x2d\xc4\xdd\x0e\xb1\x52\xdb\x41\x9e", 1, + 64, 0 }, + { 56, 128, 128, 198, + "\x6b\xd7\x36\x3b\xe8\x1b\x3f\x80\x3c\x7f\xae\xe6\x07\x05\x02\x74", + "\x30\x3d\xa6\x78\xd1\x67\x9e", "", + "\x53\x9c\x7d\x6f\xcc\x0a\x69\x1b\xd3\x9b\xc4\x34\x22\xd4\xe1\x3c", + "\x38\x33\x8e\x92\x4b\xf2\xec\xc3\xae\x0f\x5f\x75\xf2\xaf\x2d\x30", + "\xe4\x0b\xbb\xa6\x73\x49\x55\x22\x3f\xab\x6d\xdb\x3c\x7b\xba\x83", 1, + 0, 128 }, + { 56, 128, 128, 199, + "\x5b\x49\xd6\x7b\x0a\x74\xe3\xf3\x9e\x8d\x0b\xac\x6a\x00\x50\x40", + "\xb9\xbb\xb9\xae\x00\x3b\x08", "\xaa\xdc\xed\xa4\x4e\x5d\x23\x23", + "\xfe\x66\xe3\x59\xd3\x40\xec\x00\x24\x17\x36\xc2\xa6\x78\x90\x02", + "\xa4\x41\xe2\xeb\x45\x8f\x8a\x6f\x2a\xc7\x56\x27\xab\x10\x85\xef", + "\x5c\x54\x51\x81\x68\x52\x79\x65\x21\x87\x81\x9a\xa6\xd1\x5b\x86", 1, + 64, 128 }, + { 56, 128, 128, 200, + "\x6a\x5b\x3b\x57\xf8\x3c\xac\x23\xeb\xbb\x97\xa6\x0f\x9c\x13\xc3", + "\x64\xc0\x18\x42\xe7\x3e\x74", "", + "\x7d\xab\x0c\x47\x34\x73\xdf\x8d\x30\x12\xc3\xfd\xf0\x93\xf0\x07" + "\x09", + "\xfb\x8f\x96\x40\x65\x71\x8f\x93\x90\x10\xea\x5e\x5d\xa3\x27\xcd" + "\xdb", + "\xed\xd3\x49\xc3\x74\xc3\xd0\xdb\x1a\xc3\x6f\x11\xb1\x50\x6d\x2c", 1, + 0, 136 }, + { 56, 128, 128, 201, + "\xeb\x26\x3b\x3a\x87\xfc\xf2\x32\x32\x7a\x05\xb2\x07\x92\x92\xab", + "\x95\x54\x02\x3b\xad\xf3\xe2", "\xbe\x0d\xd7\x00\x2e\x2f\xe3\x58", + "\x0c\xac\x1a\xfd\x57\x08\xab\x03\xc8\xd3\xfe\x1d\x7c\xc8\x3b\x26" + "\xff", + "\x81\x86\x0e\x45\xcb\x00\x9f\x57\x28\xf8\x0f\xd1\xdf\x21\x4f\x84" + "\x49", + "\xa2\xcb\x64\x54\xa2\xa4\x9e\x96\xed\xca\x89\xb9\x4e\x49\xc5\x0c", 1, + 64, 136 }, + { 64, 128, 128, 202, + "\xf3\x43\x47\x25\xc8\x2a\x7f\x8b\xb0\x7d\xf1\xf8\x12\x2f\xb6\xc9", + "\x28\xe9\xb7\x85\x17\x24\xba\xe3", "", "", "", + "\x1c\x64\x58\x30\xe6\xee\x05\x58\x9b\x70\xf0\x23\x47\xe1\x1c\x93", 1, + 0, 0 }, + { 64, 128, 128, 203, + "\xea\xf5\xc7\xe3\x5b\x61\xc6\x4f\xd8\x99\xbf\x26\x50\x6c\xb8\x3c", + "\x07\x1f\xfe\xd7\x58\x5e\xb0\xb7", + "\xf0\xaf\x44\x31\xf3\x3e\x7e\x15", "", "", + "\xe5\xe1\x54\xd4\x3f\x32\x98\x89\x6b\x34\xbb\x4f\x76\xb7\x39\x9f", 1, + 64, 0 }, + { 64, 128, 128, 204, + "\xde\xb6\x22\x33\x55\x9b\x57\x47\x66\x02\xb5\xad\xac\x57\xc7\x7f", + "\xd0\x84\x54\x7d\xe5\x5b\xbc\x15", "", + "\xd8\x98\x6d\xf0\x24\x1e\xd3\x29\x75\x82\xc0\xc2\x39\xc7\x24\xcb", + "\x97\xbc\x3c\x09\xd5\xe3\x71\x78\xe7\xfd\xd3\x5d\x53\x23\x91\x80", + "\x3a\x2d\xc0\xbf\xde\x10\x24\x70\x29\xf5\xc4\x89\xe3\x06\xa3\x96", 1, + 0, 128 }, + { 64, 128, 128, 205, + "\x9e\xbe\x10\xef\x15\xeb\xcc\x60\x00\xed\x72\xd9\x74\x21\x9b\x97", + "\x28\xa8\x40\x39\xf2\xda\xe6\x51", + "\x1b\xab\x91\x6d\x21\xbc\xbb\x35", + "\xd0\x73\xa8\x8d\x45\x36\x41\x51\x40\x87\x18\x78\x69\x30\xed\xfb", + "\x6a\xdf\x7f\x96\xa3\x20\x22\x71\xb0\x37\x87\x37\x2f\x4c\xf3\xce", + "\x50\xbc\xdd\x15\x2a\xe3\x31\x55\x48\x78\xf9\xa2\xb8\x14\x0e\x72", 1, + 64, 128 }, + { 64, 128, 128, 206, + "\x74\xdf\xd2\x96\x3b\xc8\x14\x83\x38\x09\x44\x14\xe3\xfc\x2b\x8d", + "\xd1\xef\x66\xef\x2e\xb7\x65\xb8", "", + "\x17\x9c\x18\x65\xe2\xbc\x0f\x70\x24\x87\xc4\xe5\x4f\x83\x74\x45" + "\x7e", + "\xa4\xb7\xb9\xc9\xc9\x02\xed\xdb\x02\xfb\x64\x87\x31\x40\x25\x6e" + "\x94", + "\x4b\xc1\xe9\x91\x48\xf6\xa4\x72\x2d\x4b\x92\x73\x4b\x08\x8d\x43", 1, + 0, 136 }, + { 64, 128, 128, 207, + "\x5b\xf0\x96\x5f\x14\x39\xed\x83\xee\xde\xaa\xd9\x46\x7f\x5f\x60", + "\xb9\x26\x8f\xec\xa7\x29\x68\x0f", + "\x31\xfb\x02\xa7\xf4\xeb\xe9\xaa", + "\xc8\x44\xb6\xc4\x57\xe1\xe5\xf4\x3f\x82\xb4\xe4\x84\xb4\x70\x9e" + "\xf1", + "\x61\x1e\xe7\xdf\x91\xb0\x62\xb7\x5d\xf8\x6b\x10\xa4\xce\xeb\x01" + "\x34", + "\x9a\x56\x7c\x20\x65\xd7\x83\x2c\x35\xf6\x14\x3a\x41\x41\x46\x62", 1, + 64, 136 }, + { 72, 128, 128, 208, + "\x2e\xc7\xa4\x68\xe3\x64\x91\x86\xe1\xf9\xde\xcc\xdf\x95\xa2\x29", + "\x99\xfd\xb1\x58\xfb\x8e\xbc\xce\x64", "", "", "", + "\x65\x8f\x01\xf9\x0c\x35\x33\x1c\xdd\x69\x86\xf7\x36\xcc\x37\xda", 1, + 0, 0 }, + { 72, 128, 128, 209, + "\xc6\xbc\x09\x87\xb5\xdc\x94\x32\xda\x66\xbd\xb1\x53\x85\x9f\xda", + "\xd7\x68\xd1\xb8\x0a\x09\x45\x06\xb4", + "\x47\x13\xf8\x6a\x53\xce\xdd\x50", "", "", + "\x4e\xc3\x1d\xfe\xbf\xf4\xa9\x0d\x5e\xfb\xdb\x85\x04\xcd\x8c\x79", 1, + 64, 0 }, + { 72, 128, 128, 210, + "\x6a\x1c\x98\xe4\xd2\x0b\xc6\xad\x59\x48\x33\xd6\xe9\xaa\x47\x94", + "\x3d\xe2\x9d\x92\xd3\x01\x8e\xaa\xfc", "", + "\xf6\x23\x32\x2f\xef\x6d\x49\xcf\x7a\xbf\xa1\x6b\x5f\xd8\x39\x51", + "\xaf\xcb\xf0\x38\x5f\x26\x89\x5b\xcd\x61\x26\x60\x06\xdc\x1d\x98", + "\x79\x14\xe3\x2d\x18\x10\x43\x32\x12\x34\xdc\x16\xd7\x9b\x35\x76", 1, + 0, 128 }, + { 72, 128, 128, 211, + "\x02\x2b\x66\x9b\x7d\x39\x1f\x0f\xf5\xfa\xb1\x23\xc2\xba\x88\x17", + "\x2e\x02\xb0\x4c\xe6\xb3\x48\xef\x86", + "\xf9\xc8\x62\x29\x17\x05\x51\x9b", + "\xab\xb1\x8f\x46\x62\xc1\xbf\xa7\x98\x45\x60\xde\xac\x4a\x41\x5a", + "\x45\xc3\xab\x70\xb1\xc8\x83\xc9\x8b\x53\xfc\x9c\x0b\xe7\x7e\xcb", + "\x2e\xce\xd5\xeb\x7d\x60\x43\x1c\x09\xd3\xba\x49\x23\x0b\x23\xd0", 1, + 64, 128 }, + { 72, 128, 128, 212, + "\x89\x65\xe6\x41\xf4\x6d\xbb\xb1\x6a\xa8\x3f\x94\x59\x37\x0d\xc9", + "\x31\x0c\x4e\xe0\x82\xc4\x87\x0f\xc2", "", + "\x5d\x22\x78\xc8\xb4\xed\x8a\x37\xc9\x54\x88\x93\x5c\x1d\xb0\x6e" + "\x68", + "\xdb\x91\xb9\x8d\xa5\x78\xd8\xe4\xeb\xf1\x8f\x65\xf2\x44\x57\x9e" + "\xb7", + "\x21\x25\xef\x21\xfc\xba\x47\x10\x39\xc1\x31\x78\x6c\x99\xe7\x4e", 1, + 0, 136 }, + { 72, 128, 128, 213, + "\x22\xc9\x42\xb1\x21\x2a\x3c\xfa\x19\x6e\x9a\xd0\x6b\x03\xfb\x2b", + "\x71\xa2\xb8\x75\x40\xb1\x1d\x76\xa6", + "\xc0\xe0\x59\x60\xb8\x31\xe8\x75", + "\x4e\x3b\x6e\x3e\x2b\xa5\x66\x3e\xeb\xc5\xd8\x3d\xc2\x49\xea\xbc" + "\xe1", + "\x6f\xfa\x87\x5b\x85\x3c\xc4\xf6\xa4\x1a\xfd\x43\x0b\xd7\xc3\xce" + "\xee", + "\x0b\x49\x64\x85\x6b\xb2\xb2\x7e\xda\x38\x26\xae\x12\x8f\x67\xb3", 1, + 64, 136 }, + { 80, 128, 128, 214, + "\x1e\x6c\x62\x14\xa6\xa5\xdd\x5b\x62\x8c\x71\xde\x07\x78\x81\x37", + "\x40\xbc\xc3\x15\xde\xc8\x8b\xf3\x26\xcc", "", "", "", + "\xdf\xd7\x0e\x3e\x5a\x13\x16\x6b\x46\x06\x13\xab\xab\x92\x8f\x26", 1, + 0, 0 }, + { 80, 128, 128, 215, + "\xe4\x13\x43\xe5\xff\xe2\x0f\xe4\x8f\xf0\x10\xb1\x46\xce\xae\xad", + "\x5f\x17\x2f\xbe\x9f\x8e\xec\x0f\xbf\x79", + "\x9b\x46\x67\x59\x01\xa4\xbe\x0f", "", "", + "\x21\x04\xbc\x9e\xcb\x79\xb7\x1f\x32\xf2\x7c\x9e\xe4\xfe\xc6\x40", 1, + 64, 0 }, + { 80, 128, 128, 216, + "\xfc\x93\x58\x2f\xa1\xf8\xb5\x8c\xc9\xe8\x0d\xd5\x83\xe9\xbf\x8b", + "\x5d\x4b\xf5\x87\x98\xfa\xc3\x51\xa3\x99", "", + "\x86\x6d\x5e\x1b\x0a\xa2\x90\x04\xe5\x1e\xa8\x7d\xe8\x6e\x3c\x05", + "\xb6\x46\x50\xb6\x93\x5f\xb0\x4b\x97\x42\xf5\x72\x9f\x28\x6e\x03", + "\xb9\xd3\x94\x7c\x16\x05\xa2\xd5\x8e\xc3\xf3\x22\x18\x46\xc7\xda", 1, + 0, 128 }, + { 80, 128, 128, 217, + "\xdf\x2d\xb4\x8b\x19\x44\xfd\x9e\x24\x58\x9d\x14\x35\x7d\x0f\x80", + "\xf9\xa1\xbb\x32\xf5\x79\xb5\xf0\x27\x28", + "\x03\x92\x26\x00\xd7\xd0\x33\xdd", + "\xea\xfb\x69\xe4\x02\x38\xa3\x4e\x39\x85\x23\xfb\x35\xbd\x66\x12", + "\x9d\xe6\xac\x37\xf5\x2b\x81\x35\x04\x7a\xf8\xd5\xe5\x7f\xb3\x6e", + "\x05\x43\x35\x1a\xa8\x6f\xf8\xca\x58\x25\xbd\x7b\xb7\xc0\x25\x4e", 1, + 64, 128 }, + { 80, 128, 128, 218, + "\x3b\xf8\xc3\xc3\x01\x19\x0d\x23\xf7\x1a\xc8\x2c\x0c\x5b\x0f\x9b", + "\xd5\x27\x44\x06\xa4\xf5\xa2\xe2\xd1\x01", "", + "\x03\xca\x74\xe5\x8b\x8b\x38\x50\x0e\x1e\x65\xb8\x33\x2f\x41\xf0" + "\x6c", + "\x99\x9f\x47\x6b\x51\xce\x68\x6a\xf5\x9b\x0b\xbe\x22\x1c\xe4\xe8" + "\xa4", + "\x2a\xc4\x83\x77\xd2\x39\xfa\x7f\xfb\xe3\xc5\x03\xe0\x27\x8f\x98", 1, + 0, 136 }, + { 80, 128, 128, 219, + "\x16\x94\x2e\xb8\x9d\x4f\x7f\xe6\x5b\xf9\xb4\x9c\x16\xf8\x30\xab", + "\x2b\xa7\x6d\x03\x99\x5c\x62\xdc\x7e\xd2", + "\x32\xc7\xc6\x07\x2d\xbd\x73\x5f", + "\xb2\x38\x0e\x9e\xb5\x96\xd5\xaf\x69\x7c\x0b\xa1\xd3\x01\xa8\x33" + "\xd9", + "\xf5\x63\xca\xb1\xf5\xe5\x6f\x23\x7a\x60\xc2\xc2\x95\x0a\xb7\xa5" + "\xc2", + "\x25\xc8\x9e\x41\x0c\x0a\x53\x5b\x94\xa3\x83\xad\x3c\x01\x44\x64", 1, + 64, 136 }, + { 88, 128, 128, 220, + "\x9d\x2f\xa7\x59\x29\x61\x2e\x12\x13\x46\x0f\x99\x89\x46\xdc\xec", + "\x0e\x94\x8a\x03\xdb\xfa\x10\x81\x7e\x88\x26", "", "", "", + "\x2f\x1f\xa3\x58\x5b\x9a\x11\xff\x47\xbd\x48\x6f\x95\x57\x22\x46", 1, + 0, 0 }, + { 88, 128, 128, 221, + "\x7a\x97\x04\x06\xa7\x47\xc2\x32\x7e\xcb\xc8\xb1\x07\xa1\x90\xcd", + "\x99\xb0\x79\xde\x95\x2d\x60\xda\x0d\x03\x4b", + "\x2f\x08\x0b\x2d\xeb\x66\x44\xef", "", "", + "\x9d\xca\x9b\xa4\x41\x01\x3c\xe9\xfc\x0a\xc3\xdb\xf4\x14\x62\x6d", 1, + 64, 0 }, + { 88, 128, 128, 222, + "\xac\xab\x31\x48\x3d\x9d\xe4\xad\x77\xf4\xe6\x3f\xe4\x1b\x57\xae", + "\x5c\x09\x39\xe7\x1b\xae\x1a\x9d\xe1\x67\xd4", "", + "\xe4\xd7\x2b\x2f\x7c\xb6\x9b\xc5\x4a\x49\xf4\xd5\xce\xa4\xf2\x3a", + "\x0c\x50\x68\x43\xba\x7b\xd2\xdc\x45\x78\xe6\xbb\x83\xfd\x0c\x8d", + "\xc2\x75\x37\x30\x37\x63\x5b\xae\x29\x4f\x99\xc8\xe4\x69\x64\xb3", 1, + 0, 128 }, + { 88, 128, 128, 223, + "\x82\x84\x1e\xf7\xfb\xae\x35\x54\x65\x25\xfb\xbe\xbf\x47\x18\xfb", + "\xb5\xcd\x81\x8f\x73\xa3\x6e\xd0\x25\xb6\xcf", + "\x44\xf4\x8c\x2a\x20\x45\x63\x58", + "\x8c\x2c\x82\x3b\xb3\x99\x41\xb1\xc6\xb7\x5b\xbc\x82\xf0\x5b\xa4", + "\xb2\x87\xc6\x37\xa7\x55\x43\x62\xc8\x0d\x6b\x24\xd5\x0d\xdf\xb3", + "\x39\x67\x27\x7d\xa0\xf8\x56\xf8\xf0\xad\x49\x28\x28\x94\xd2\xbb", 1, + 64, 128 }, + { 88, 128, 128, 224, + "\xf9\xc6\xd9\x62\x7f\xd2\xe7\x31\xe2\xf1\x15\xb3\xd0\xa5\x3b\xfd", + "\x08\x45\x58\xeb\xfb\x65\x82\xf3\xd1\x87\x9a", "", + "\x74\x63\xaf\x94\x62\x62\x79\xce\x01\x12\xf6\x70\xc3\x11\x50\x99" + "\xfd", + "\xd4\x6b\x6e\x96\x2b\x6b\x7a\x23\x52\xfc\x43\x79\x14\xd9\xc0\xd9" + "\xc0", + "\xeb\xe3\x36\x17\x6e\x27\x04\x1a\xe7\x87\x91\xac\xa3\x45\x48\xc8", 1, + 0, 136 }, + { 88, 128, 128, 225, + "\x1d\x28\x6e\x52\x5e\xc2\x86\x4d\x9e\xa6\xe7\xad\xfb\xdc\x49\x70", + "\x1a\xd4\x48\x79\xf1\x94\x7a\xbd\x50\x3d\xce", + "\x1d\xb0\xea\xaa\xa1\xe2\xc8\x48", + "\x7b\x40\xe6\xc9\x87\x69\x2d\x02\x02\xcb\x6f\x44\xb4\x23\xc2\x67" + "\xdd", + "\x84\xf9\x1f\x9d\x35\xb9\x98\x59\x8e\x50\xfe\xb3\x4d\xca\xa0\xd2" + "\x60", + "\xae\x4e\x38\xcc\x4b\x73\x09\x44\xbc\x45\x9d\xf8\x5a\x53\x6f\x4e", 1, + 64, 136 }, + { 104, 128, 128, 226, + "\x69\x4a\x2a\xe9\x4c\xc2\xfc\x6c\x82\xdc\xd1\x6c\x58\xa3\x41\x95", + "\x8b\x4d\xe9\x49\x7e\x78\xd9\xc7\x3b\xdc\xb3\x74\xde", "", "", "", + "\x36\xb2\x0b\xcb\x06\x46\x09\xcb\xc0\x3a\xe3\x27\x86\xf7\x2e\xb5", 1, + 0, 0 }, + { 104, 128, 128, 227, + "\x26\x8c\x96\x1a\x4f\xa5\x4c\x21\x4a\x2a\xf8\xfe\x76\xa2\x77\xcc", + "\xf1\x34\x43\xda\x0e\x41\x2f\x1c\xc7\xa9\x01\x65\xc6", + "\x4f\xad\x12\xb4\x02\xc5\x80\x29", "", "", + "\xbe\x58\x07\x13\x42\xc9\x34\x8b\xaf\x78\x10\x4e\x22\x58\xe6\x16", 1, + 64, 0 }, + { 104, 128, 128, 228, + "\x7c\x5a\xa1\xcd\x2f\xc1\x71\xd5\xbb\x91\xee\x74\xf3\x1e\x1a\x63", + "\x70\xa2\xa5\x30\x37\x34\x51\x78\x27\xfa\xa7\xee\x78", "", + "\x4b\xd1\xd3\x42\x7e\x07\x35\xa0\x8f\x47\x5b\xc7\x3e\xc3\x64\x8c", + "\x9a\xb6\x12\x14\xea\x66\x11\x51\x07\x06\x31\x5f\x0f\xc9\xbd\x65", + "\xa2\xb3\xfc\x3c\x13\x49\xcb\xfd\x9d\xed\x7c\xc3\x2e\xff\xb3\x89", 1, + 0, 128 }, + { 104, 128, 128, 229, + "\xe7\x4b\x73\xc2\xad\x93\xd3\x8d\xd4\x43\x2d\x6e\x51\xd3\xe3\xec", + "\x06\xee\x28\xea\x53\x2f\xf5\xaa\xe6\xb0\xf6\xa2\x8a", + "\x10\xbc\x98\x64\xf1\x33\x2e\x41", + "\xaa\xd5\xd7\x58\x04\x1e\x54\x43\xed\xe7\xe9\xbb\xac\x1d\xb4\x90", + "\xd3\xed\x6b\xb5\x5d\x98\xb0\x0e\x1b\x76\x93\x8a\x1c\x6b\xd5\xed", + "\x22\x20\x1e\x4e\xb2\xa4\x22\x91\xa7\xd5\x7e\x35\x70\x82\xd7\x7e", 1, + 64, 128 }, + { 104, 128, 128, 230, + "\xbe\x70\x77\x74\xd9\xea\xb3\x70\xdb\x4e\x82\x50\x29\x7a\x74\x37", + "\x79\x44\xb4\x87\xd5\x9b\x6f\xfc\xc9\x6c\x9d\xf6\x2d", "", + "\x28\xea\xb5\x68\x85\xe1\xe1\x2b\xd7\x2d\xef\x11\x38\x23\x7f\x0d" + "\xbc", + "\xee\x05\xff\xea\xab\x17\xbb\x4d\xe9\x45\x27\xe0\x29\x70\x58\xc4" + "\x8b", + "\xff\x89\x9f\x92\x9b\x37\x16\x39\xd4\x8c\x3c\x44\x7f\x51\xc5\x77", 1, + 0, 136 }, + { 104, 128, 128, 231, + "\x30\x82\x94\x16\xd8\x0d\x4b\x6d\xd9\x1a\x16\xc4\x69\x4c\x5a\xcd", + "\x8b\xc7\xec\xd5\xbd\xe1\x96\xb7\x23\x19\xde\x6b\x77", + "\x97\x0f\xe6\x5b\x77\x89\xa5\x55", + "\x4a\x31\x22\xf8\x01\xd6\x63\x82\x28\xfa\x0e\x30\xaf\x3f\x36\x62" + "\x7a", + "\x51\x25\xed\x68\xaf\xbd\x34\xbc\x00\xc7\x31\x71\xad\xa3\x1e\xe8" + "\x4a", + "\x1d\xa3\x20\xc8\xba\xb5\x25\xe3\x75\xf3\x7a\x3b\xba\x3e\x0e\xb9", 1, + 64, 136 }, + { 56, 192, 128, 232, + "\xb4\x54\x4c\xae\x6b\x60\xb7\x72\x0f\x3a\xde\x71\xe9\x0e\x58\xc2\x1d" + "\x6e\x48\x71\x83\xd4\x66\x6a", + "\x2b\xa3\xda\x11\x2c\xf5\xe6", "", "", "", + "\x34\x15\x51\xf2\xc0\x5e\xe9\x31\x4f\x0e\xb5\x52\x93\x9e\x54\x86", 1, + 0, 0 }, + { 56, 192, 128, 233, + "\x26\xbd\xdb\x2e\xb2\xa7\x27\xe2\x91\x0d\xf9\x4a\xd3\xe1\x2a\xc1\x30" + "\xa4\x9a\x8f\x7f\x41\x95\x1c", + "\xc6\xc0\xef\x48\x15\x1b\x32", "\x74\x43\xb9\x1e\x73\x47\x5d\xe1", + "", "", + "\xfd\x7a\xab\x59\x5d\xfe\xb3\xc8\xa1\x66\x0e\xab\x04\x3b\x3d\x01", 1, + 64, 0 }, + { 56, 192, 128, 234, + "\x80\x6e\xd9\xcf\x33\xa1\xc2\xfa\x6a\x8b\xff\xad\x79\x37\xc3\xea\x22" + "\x64\x08\xeb\xf2\x48\xd1\x76", + "\x2b\xeb\xcc\x0a\xf6\x72\xbf", "", + "\xbe\x17\x02\x41\x48\x68\xc9\x4a\xeb\x99\xc1\xa0\x88\xba\x8c\x48", + "\x6e\x6f\x3d\x3b\x36\x48\x2c\x39\xa9\x9d\x59\x7e\xa5\x82\xf4\x30", + "\x7d\x11\xf5\x54\x9d\x87\xdd\xa7\xb0\x76\x22\x02\x27\x0a\x7e\x28", 1, + 0, 128 }, + { 56, 192, 128, 235, + "\x09\x5f\xa6\x78\xa1\x04\xe9\xc3\xd2\x46\x30\x4c\x5d\xdd\xee\x04\x5d" + "\xda\xb3\xd7\x9e\xa8\xa7\x26", + "\xfe\xbf\x6b\xf7\xdd\x16\xa7", "\x6a\x44\x90\xba\x9f\x61\xdb\x88", + "\xd2\x5e\xcf\xa8\x77\x89\x60\x30\x05\x8d\xca\xca\xb3\x15\x9c\xb3", + "\xf3\x57\x8a\xca\x6d\x3c\xcb\xc9\x16\xf5\xc1\xd7\x1a\x45\x87\x8e", + "\x8c\x73\x2f\x4a\x57\x1b\xf1\x05\xc6\xed\x1c\xef\x6f\xab\x28\x76", 1, + 64, 128 }, + { 56, 192, 128, 236, + "\x98\x98\x8d\xa4\x62\xa4\x6a\xb3\xdd\x61\x3b\xd3\x70\x69\xf4\xf4\x29" + "\xa9\xa8\x18\x41\xe7\x6d\xd3", + "\xeb\xad\x9a\xf5\xf8\x69\xf5", "", + "\xaa\x3f\xc0\x55\x74\xee\x10\x1e\xd7\x52\x7d\xe5\xda\x4a\xc3\x78" + "\x60", + "\x9b\x7b\xb3\x5d\xb7\x23\x71\x8c\x7f\x9f\xd8\xcd\x5c\x83\x12\x4e" + "\x78", + "\xa5\x95\x67\x30\x75\x77\xc8\xe8\x31\xc2\x3a\xc0\x9a\x92\xc6\xa1", 1, + 0, 136 }, + { 56, 192, 128, 237, + "\x71\xde\x00\x8c\xd8\x20\xfc\x03\x39\x74\xb6\xb1\x30\x8f\x66\x28\x74" + "\x25\x9b\x19\x56\x2e\x70\xf3", + "\xe7\x25\xd3\x1d\xbf\x5b\x99", "\xd7\x67\xf4\x0e\x91\xc4\xf1\x5c", + "\x83\x1a\x38\xcb\xea\xa9\xf2\x2e\xdf\x91\x8e\x97\x19\x56\xc1\x5f" + "\xa3", + "\xc8\xea\xf9\x54\x6a\xf7\x22\x61\x72\x3c\xeb\x3a\xe3\xbb\xb7\x30" + "\x3c", + "\x68\xd7\x28\x74\x4e\x59\x77\x34\x2d\x93\xaf\x81\x44\x58\x57\xed", 1, + 64, 136 }, + { 64, 192, 128, 238, + "\xd7\x45\x99\xb3\xd2\xdb\x81\x65\x3d\xe4\x3b\x52\xfc\x99\x4c\x50\xd0" + "\xbe\x75\x9f\xab\x87\xc3\x3a", + "\xd1\xc6\x1c\xf8\x53\x25\x31\xb5", "", "", "", + "\xbd\x78\xdf\xc8\x04\xa4\x20\xc1\x9f\xb1\x3b\x2f\x58\xd8\x2c\x5c", 1, + 0, 0 }, + { 64, 192, 128, 239, + "\x8f\xc2\x69\xef\x34\xd2\xc2\x12\x7c\x89\x49\x3c\x09\x60\xee\x08\x49" + "\xfa\xdf\x76\x66\x78\x85\xd5", + "\x62\x6b\xf0\x0a\xcb\x93\x04\x80", + "\x13\xaa\x17\x48\xae\xc4\x10\x42", "", "", + "\x9c\x45\x32\x75\xaf\xb0\x06\xc7\x8f\x6f\x29\x07\x9c\x7c\x3e\xf1", 1, + 64, 0 }, + { 64, 192, 128, 240, + "\x0b\x17\x71\x98\xc8\xb4\x19\xbf\x74\xac\xc3\xbc\x65\xb5\xfb\x3d\x09" + "\xa9\x15\xff\x71\xad\xd7\x54", + "\x8f\x07\x5c\xbc\xda\x98\x31\xc3", "", + "\xc4\xb1\xe0\x5c\xa3\xd5\x91\xf9\x54\x3e\x64\xde\x3f\xc6\x82\xac", + "\x01\x48\xcd\xf9\x0d\x56\x6a\x8e\xb6\x51\x40\x99\x56\xc3\x69\x5e", + "\xb1\x0d\x57\xdf\x83\xc4\xc7\x9b\x9f\x59\x0e\x3e\x5a\xa9\xe9\xb6", 1, + 0, 128 }, + { 64, 192, 128, 241, + "\xdb\x86\x9e\x55\x57\x6f\x57\xc8\xf9\x26\x49\x65\x9e\x3c\xb8\xbe\x10" + "\x65\x6b\xbf\xf4\xb6\x94\x60", + "\x67\xc0\x30\x53\x32\xe1\x31\x7b", + "\x93\xf5\x3a\xb3\x6f\x45\xcb\xa3", + "\x00\x2f\xbd\x2e\x0f\x39\xd4\x9f\x25\x8b\x3f\x73\x98\x39\x1e\x2c", + "\x0c\xe4\xb1\xf5\x39\x22\xea\x14\x8f\x26\xa6\x38\xd1\xc9\xe7\x85", + "\x9b\xe9\x41\x7b\x3f\xf9\xe8\xd5\xb2\x4e\x04\x14\x39\xb0\x2c\x86", 1, + 64, 128 }, + { 64, 192, 128, 242, + "\x2a\xf9\x6f\x8c\xca\x1b\x56\x3d\x17\xe7\x96\x9e\x01\x64\x5e\xe7\xb9" + "\xf5\x41\x3a\xc9\x3e\xa5\x70", + "\x95\x8d\x1f\xaf\x8c\x12\x67\xd8", "", + "\xc7\x1e\xd8\x02\x7c\x74\x56\x26\xea\x03\xbd\x25\x62\x8b\x99\xe1" + "\xdd", + "\xfd\x66\xcd\x92\x6b\xea\xdb\xee\x33\xcd\xae\x43\x82\x4f\xba\xcf" + "\xc0", + "\x8c\x8d\xaf\xb2\x02\x7f\x2b\x0c\x03\xf6\x2b\x5f\x9f\xcc\x0a\xd8", 1, + 0, 136 }, + { 64, 192, 128, 243, + "\x5e\xd7\x70\x86\xec\x0d\xa8\xf6\xac\x00\x56\x3a\xd6\xfc\xb8\x50\x05" + "\xac\x40\xf3\x92\x11\xb0\xe4", + "\x4e\x70\xdf\xd4\x9c\xc2\xac\x76", + "\x29\xbf\x75\x6a\x2f\x77\x06\x6b", + "\xc0\x49\xdc\xf1\x5a\xf3\xf9\x75\x98\x7d\x5f\x12\x50\xfe\xf5\x41" + "\x4f", + "\x36\x96\x0d\xa0\x29\xe6\x7a\xee\xb1\x45\xd5\x7d\xc0\xda\x68\xac" + "\x27", + "\x09\x50\x5b\xad\xdf\x28\xcd\x84\x2f\xd7\xfa\x7c\x54\x4d\x0c\x48", 1, + 64, 136 }, + { 72, 192, 128, 244, + "\xf1\x62\xa1\x09\x40\x12\xf6\xbf\xb1\x02\x70\xcd\x56\x09\xa2\x0d\xc2" + "\x4d\xec\x37\x27\xf8\xe5\x98", + "\x05\x62\xf0\x3f\x51\x24\x64\x2f\x40", "", "", "", + "\xa9\xee\xf8\xc9\xf9\x46\x00\x06\xb7\x3f\x2d\xa2\x31\x7c\x7b\x7d", 1, + 0, 0 }, + { 72, 192, 128, 245, + "\x0f\xa5\x86\x1e\xf4\x39\x18\x42\x65\x11\x2c\xa6\xea\x78\x5d\x21\x4a" + "\x5b\xb1\x2d\xd1\x08\xe4\x34", + "\x04\x1f\xfc\xd9\x55\xeb\x49\x39\xff", + "\xbe\xed\x0c\x76\x3b\x56\xc5\x82", "", "", + "\x39\xfd\x1a\x21\x07\x54\x0f\x9e\x6d\x33\xad\x23\xb4\x25\xdd\xef", 1, + 64, 0 }, + { 72, 192, 128, 246, + "\x4f\x58\x9a\xaf\x03\xe1\x21\x95\x85\xf4\x11\x63\x1a\x2b\x28\x7f\x20" + "\xe9\xcc\xa9\x33\x04\xd0\x04", + "\xf0\xbd\x78\x63\xd3\x4b\x6c\x96\x3f", "", + "\xca\x80\xf9\x13\x29\xf1\xcf\xd8\x78\x4b\xdb\x97\xdc\x0d\x5b\x01", + "\x0c\x3f\x7b\x1e\x05\x85\xde\xaa\x80\x0a\x71\x05\xfc\x14\x13\x64", + "\x94\x27\xfd\x74\x87\x0e\x29\xdb\x52\x7f\x7d\xf2\x47\x47\x79\x39", 1, + 0, 128 }, + { 72, 192, 128, 247, + "\x07\x87\x9e\x22\xe8\xc3\xcb\x5b\x5f\xc2\x05\x7c\x39\x85\x90\x6c\x39" + "\xaf\xf4\xe4\x0a\xae\x4e\x20", + "\x4e\xde\x0c\x3a\xf9\xc0\xde\xbb\x8a", + "\xcb\x33\x3d\x66\xbd\xe2\x47\x5d", + "\xb9\xac\x42\xc5\xd3\x16\x90\x87\xa7\x21\x87\x9c\x19\x86\x59\x08", + "\x6c\xf2\xcd\x3a\x10\x61\xd9\xb6\xfb\xe3\x62\x33\x77\xc6\xb4\x43", + "\xf7\x29\x7a\x72\x66\xd2\xf7\xf7\xaa\x7e\xc0\x5e\x0f\x9b\xf9\xa9", 1, + 64, 128 }, + { 72, 192, 128, 248, + "\x0b\x14\x4f\x06\x68\xff\xd1\xa9\x7f\xf2\xd2\xbf\x93\x44\xef\x0e\x28" + "\x48\x96\x4a\xec\xb2\x85\x0f", + "\x3b\x7f\x06\xb4\xba\x5b\x0b\x71\xec", "", + "\x60\x0b\x5c\xa3\xe8\xcf\x20\xa0\x9f\xf7\x52\xec\x2e\x73\x78\xac" + "\x1f", + "\x58\xd5\x76\x23\x17\xcf\x50\x24\x62\x71\x59\xac\xe6\xb4\x8f\x79" + "\x7f", + "\x16\xee\x7c\x4a\xad\xf2\x58\x45\x80\x30\xe5\xaf\x1b\xdb\xed\x2d", 1, + 0, 136 }, + { 72, 192, 128, 249, + "\xa3\xbd\xd0\x65\xfe\x64\x75\xdf\x94\xa2\x09\x2c\x3f\x72\xb1\xdc\xd3" + "\xd0\xf0\x41\x3b\x4f\x34\xdc", + "\xfe\xeb\x53\xf7\xcd\x16\xad\xc8\xe4", + "\x6b\xa7\x08\x2e\x39\x8b\xab\x61", + "\xd1\xe7\x61\x64\x72\xca\x17\x01\x5e\xea\xea\xc3\x0b\x5b\x22\xf0" + "\x07", + "\x95\x38\xe5\xa7\x0a\xc3\x3a\xd0\x92\x4f\x03\x8b\x34\xd1\x99\x5b" + "\x7b", + "\xcb\x45\x9d\x32\xbb\xfd\x20\x93\xeb\x4d\x79\x33\xd5\x0f\xfa\x27", 1, + 64, 136 }, + { 80, 192, 128, 250, + "\x17\x2f\x22\xf2\xe5\x93\x64\xdc\x41\x8c\xd7\x51\xdf\xa8\x44\x4a\xe1" + "\x86\x44\xc0\xf9\xa2\xbe\x84", + "\xbf\x90\x26\xd3\xdd\xaa\x37\xe7\xf1\x80", "", "", "", + "\xb0\x77\xff\x4f\xad\x9f\xf4\xa9\x4b\x6d\xe8\xa6\x6b\xa5\xb1\x6c", 1, + 0, 0 }, + { 80, 192, 128, 251, + "\x14\x3e\xfb\xf8\xe0\x29\x3d\xd4\xc1\x31\x59\xcf\x26\x0e\xc5\x91\xf5" + "\xf9\x2b\xb3\xaf\x8d\xd8\x63", + "\x11\x1a\x95\xbb\xb6\x0f\x9a\x3b\xba\x53", + "\x51\xc1\x46\x78\xc4\x54\x47\x77", "", "", + "\x09\x9a\x7c\x50\x90\x44\x3c\xd4\x00\x0f\x97\x0d\x42\xbc\xd1\xd5", 1, + 64, 0 }, + { 80, 192, 128, 252, + "\x4c\x41\x10\x4d\x3f\x40\x26\x5f\x9e\x35\xc3\x20\xa0\x1e\x78\x76\xc3" + "\x14\x00\xa0\xbd\x4d\x70\x92", + "\x85\xfd\xc8\x1a\xfd\x9f\x48\x28\x17\x7e", "", + "\xba\x7c\xd0\x7d\xfd\x8b\x5c\xf6\xff\xd3\xdd\xb7\x63\x56\x12\xc6", + "\x38\x6b\x63\x4a\x5d\xef\x89\xdc\x73\x02\x72\x4a\xd1\x19\x21\xfe", + "\x4d\x79\x22\x01\xa9\x98\x88\x94\x57\xb4\xc8\x3c\xab\x0e\x5c\x35", 1, + 0, 128 }, + { 80, 192, 128, 253, + "\x8b\xcd\xe5\x17\xdd\xc6\x3f\xed\xcc\xe8\xe3\x41\x81\xf2\x35\x30\xf4" + "\x71\xd6\x85\x8c\x48\xdb\xf9", + "\xbc\x6d\x51\xde\x0c\x0b\xe7\xc4\x59\x11", + "\xe0\xb3\xfb\x36\xc7\xb1\x63\x41", + "\x1b\x42\x19\x8b\x4a\xc0\x82\x24\xe1\xe7\x61\xa7\x72\x05\xe3\x92", + "\xe8\x73\x99\x72\xe4\x18\x0e\x2e\x52\x01\x21\xd8\xe9\xaa\xd7\xc5", + "\x6c\xee\xbc\x4d\x20\x29\x45\x38\x3e\x51\x1b\x7c\xad\xde\x56\x95", 1, + 64, 128 }, + { 80, 192, 128, 254, + "\x37\xf0\xb2\xd5\x3d\x52\x40\x7e\xb9\xff\x33\x53\x0e\x74\xb4\xed\xf5" + "\x82\x5a\x7b\xb3\x7c\x3d\xc5", + "\x30\x3c\x76\x67\x53\x01\x1b\x63\x55\x44", "", + "\x1e\xd6\xab\xb2\xf2\x83\xca\x7f\xde\x5d\xe6\x62\xbd\x70\x58\xa1" + "\xea", + "\x75\xa6\x0d\xf0\x77\x8e\xb9\x3a\x34\x07\x2e\x74\xfa\x3d\x6b\x02" + "\x24", + "\xf2\x7b\x8c\xfc\x5b\xf5\xd2\xb4\xb4\xd9\x3f\xb5\x84\xd7\x19\xff", 1, + 0, 136 }, + { 80, 192, 128, 255, + "\x10\xa7\x79\xb2\x45\x74\x1f\x1a\xb3\x12\x4e\x0e\x50\x4f\xdc\xd3\x15" + "\x78\x4c\x67\xd0\x13\x6f\xcb", + "\x75\xa4\x46\x16\xee\x96\xb3\x0c\x9e\xca", + "\x4c\x5d\x64\x71\xac\x20\xdf\x18", + "\x57\xd7\x58\xf9\x24\xa6\xea\xef\xe4\xd2\x62\x59\x31\xfc\x84\x71" + "\x07", + "\xab\x7b\x01\xba\x57\xed\xc0\xa4\x1b\x19\x0f\xb0\xf1\xd5\x18\x6c" + "\xd5", + "\x3e\x93\xa4\x5f\x5c\xc5\xea\xca\x21\x50\xdb\x35\x34\xa8\x90\x3e", 1, + 64, 136 }, + { 88, 192, 128, 256, + "\x87\xc5\x5b\x2f\x18\x5f\x17\x7f\xaa\xf4\xb1\x6d\x93\xaf\x6d\xad\x47" + "\x71\x46\x34\x5d\x0e\xa9\x92", + "\x09\x46\xc6\x99\x53\xf4\xb9\x52\xbc\x7c\x23", "", "", "", + "\x25\x7b\x3c\x59\x7e\xcd\x1d\x67\xc3\xdd\x35\xdc\x70\xc6\x8e\x48", 1, + 0, 0 }, + { 88, 192, 128, 257, + "\x82\x07\xe8\xd5\x7d\xcc\xdf\x54\x80\xf7\x02\xc1\xfa\x72\xd0\xc6\xd0" + "\x2f\x1b\xad\xc6\xfc\x08\xc5", + "\xc1\x8e\x46\xa7\x0c\x59\x29\x80\xa2\xcc\xc2", + "\xbd\x2e\x2a\x9d\xa3\x2a\x9d\x67", "", "", + "\x11\x45\xc2\x0b\x7f\x31\xd5\x7d\x45\x8a\xfc\x65\x0a\x6d\x45\x90", 1, + 64, 0 }, + { 88, 192, 128, 258, + "\x2c\x2f\x7e\x8b\xb7\x5b\xa9\x31\xa7\x11\xec\xa4\xd3\x19\xe1\x9a\xd8" + "\x97\x67\x24\x8f\xee\x53\x60", + "\x21\xa1\x04\x56\x47\x0d\x08\x3c\xa7\xbd\x7c", "", + "\x1e\x9f\x46\x74\x41\xe4\x87\xbf\x68\xd1\x0b\xe8\x53\xb2\x44\x79", + "\x18\x6b\x50\xf0\xed\xf7\xb5\x23\x02\x13\x84\xf5\xd8\xc0\x90\x49", + "\x8d\xb0\x12\x36\xb7\x15\xa7\x64\x32\xfc\xb0\x2c\xff\x2f\x6b\xa6", 1, + 0, 128 }, + { 88, 192, 128, 259, + "\x0a\x3b\xf0\xa9\x26\xfb\x14\xa3\xb7\x16\xbf\xa0\x21\xa2\x08\xda\x03" + "\x30\xe5\x7b\xed\x36\x82\x8a", + "\xa0\xba\x36\xed\xc4\x3d\x93\x5e\xe9\x42\x13", + "\x3f\xcd\x93\xcc\xb8\xe9\x79\x56", + "\x9a\xb6\xc1\x09\xc8\x06\x9d\x05\x4c\xcb\xb5\xc3\x3c\x6e\x70\xd0", + "\x27\xd1\xe3\x53\x23\x37\x55\xa2\xfe\x28\x23\x16\x37\x73\x9c\x46", + "\xb9\xb7\x47\x65\xed\x3d\x53\x03\x1b\xf3\xc7\x34\x9e\x74\x34\x0e", 1, + 64, 128 }, + { 88, 192, 128, 260, + "\xa6\x8d\xfe\x22\xce\xf2\xbf\xf0\xf2\x8d\x4b\x68\xc2\xa6\x93\x8b\x16" + "\xdc\x21\x09\xba\xb0\x9c\x38", + "\xfe\x76\xa0\x3b\x77\x0b\x43\x1d\xc6\x87\x2b", "", + "\xf5\x56\x91\x55\x30\x58\x00\xbc\x94\x18\x4b\x1e\xf1\xc1\x52\xe1" + "\x97", + "\x34\x28\x2b\x16\x48\x9e\x7b\xc7\x13\x6a\x24\x98\x32\x8b\xd2\x2e" + "\x76", + "\xe9\xb7\xae\x57\xe2\xb7\xf6\x0d\x09\xf5\x0b\xed\x23\xb9\x34\x38", 1, + 0, 136 }, + { 88, 192, 128, 261, + "\xff\xe5\xd2\xad\x71\xd4\x32\xd6\xcd\x5f\x10\x72\xec\x2a\xcb\x7d\x7c" + "\xde\x9c\x5c\x61\x5d\x0e\xb9", + "\xea\x21\x98\x30\x74\x02\xa1\x06\xea\x42\x93", + "\xf9\x3d\xb4\xf4\xae\xc8\xaf\xe8", + "\xc9\xdb\x4d\x10\xd4\x23\x40\xac\x73\x62\x71\xed\xf9\xf6\x58\x1c" + "\xe8", + "\xbd\x96\x0c\x7f\x60\x39\x2e\x8d\x0a\xfb\x28\xba\x16\xea\x63\xaf" + "\xd3", + "\xf2\xca\x88\xd5\x49\xdf\xf2\x07\xd9\x79\x75\x6d\x13\xe8\x65\xd9", 1, + 64, 136 }, + { 104, 192, 128, 262, + "\xd4\x65\x54\x4b\x5d\xb9\xeb\x1c\x49\x5c\xaf\xaf\x5d\x9a\xc7\xe1\x0f" + "\xaa\xe7\x45\x41\xa0\xa7\x18", + "\x95\x82\xaf\xc3\x05\x56\xca\x12\xd1\x54\xc4\x2f\x03", "", "", "", + "\x37\xf6\x18\xf8\xbb\x7f\xf8\x5e\xc6\x44\xb1\xcb\xcc\xa4\xc2\x8a", 1, + 0, 0 }, + { 104, 192, 128, 263, + "\x6f\x1c\xce\x6a\x35\x3a\xa4\x5f\x92\x6f\xac\xbb\x68\x65\xd3\x59\x82" + "\x60\xdb\x53\x90\xe9\x37\xad", + "\xae\x0f\xe0\x77\x39\x85\x87\x74\x7a\x64\x2e\x54\x22", + "\x32\x66\x99\xf5\x6a\xc2\x8d\xef", "", "", + "\x07\x52\x0b\x38\x4c\xcc\xbb\x80\xb9\x67\x9a\x0e\xf4\x8c\xb6\xa8", 1, + 64, 0 }, + { 104, 192, 128, 264, + "\x36\xf9\x7a\x97\xd1\xdd\x67\xe5\xf8\x3c\xcb\x52\x9d\xa2\x5a\x60\x4b" + "\x68\xb8\xda\x90\x4f\xe3\xf6", + "\x23\x93\xa0\xa0\xe0\xb8\xef\xdd\x59\xdb\x34\x36\xdc", "", + "\xc0\x2f\x28\x77\x32\x33\xff\xca\x81\x2e\xaf\x1c\x94\x6c\xd8\xd1", + "\xd4\x12\x86\xc4\x61\xfb\x65\xd4\x10\x66\xa1\x03\x88\xeb\x69\xc8", + "\x07\x36\x96\xdf\xfa\x20\x71\x44\x00\x14\xdf\xac\x4c\x6c\xad\xb2", 1, + 0, 128 }, + { 104, 192, 128, 265, + "\xf2\x58\xd3\x3f\x1f\x1f\x3a\xad\xe5\x10\x3d\x56\xc4\x35\x7b\x7a\x4f" + "\x8d\xd2\x05\xe4\x60\x65\x8e", + "\x14\xa9\xed\x95\x39\x52\x5f\x54\x0d\x9a\x46\xaf\x69", + "\xb6\x71\x96\xee\x87\x89\x0f\x55", + "\xf2\x22\xfd\xfd\x34\x3b\x57\xa7\x0d\x00\x2d\x14\xa3\x9c\xae\x59", + "\xd2\x7c\xda\x7a\xda\x56\x38\xdb\x59\x94\x5a\x31\xd9\x3e\xf2\x43", + "\xc2\xc6\x4d\xd8\xb0\x8e\x7b\x4b\x1c\xe6\x0d\x5b\x96\x83\x29\x89", 1, + 64, 128 }, + { 104, 192, 128, 266, + "\xee\x79\x41\x97\xf2\x0e\x64\x3c\x38\x77\xad\x08\x5f\x03\x1c\x75\x0f" + "\xf2\x32\x56\x8e\x53\xd7\xc3", + "\x31\xea\xac\xed\x4a\x01\x42\xb6\x45\x5c\xf7\x16\xe9", "", + "\x77\x2c\x05\xb2\x37\x7b\xe0\xb3\xbd\xfd\x9a\x35\x7c\x27\x66\x08" + "\xb0", + "\xe0\x6e\x98\xbf\x61\x2f\x13\x44\xfa\xc0\xc3\xef\x8d\x3a\x65\x6b" + "\xa0", + "\xfc\x26\xa2\x6f\x23\x14\xe5\xa2\x79\xc2\xc7\xd0\x7c\x04\x45\x85", 1, + 0, 136 }, + { 104, 192, 128, 267, + "\xb1\xbb\xea\xe5\xfb\xa3\x04\x41\xe1\x2b\x1a\xd2\xf7\x4e\x27\x2b\xc2" + "\x05\x22\x1f\xe3\x4a\x34\x95", + "\x99\x6c\xf4\xb0\xed\xed\x6a\xf6\x6c\xef\xfb\xe8\xb7", + "\xb3\xc3\x37\xb6\x58\x59\x6f\x4c", + "\x39\x11\x39\x00\xd2\x87\xd9\x0c\x54\x01\xd2\x19\xaa\x52\x82\xb9" + "\x1e", + "\xd1\x92\x8c\xe8\x58\x77\xf1\xd1\xfd\x56\x96\xe5\x6b\xb5\x05\x91" + "\xe7", + "\x7b\x85\x27\xe9\x81\x92\xd7\x11\x1d\xaf\xff\x55\x17\x82\xf7\x01", 1, + 64, 136 }, + { 56, 256, 128, 268, + "\x49\x18\x28\xf2\xdd\xdc\xff\x5f\x96\x6e\x66\x27\xf4\xb6\xa8\x5a\x2e" + "\xa7\x6f\xd1\xe0\xb6\x11\x7a\x13\xe9\x4d\x0e\x81\xc0\x63\xa5", + "\x4f\xee\xdf\x9d\x9c\x07\xe0", "", "", "", + "\x1c\xa0\xa4\x18\xf3\x37\xa4\xc0\x4f\x21\x23\xfe\xfd\x31\x79\x6d", 1, + 0, 0 }, + { 56, 256, 128, 269, + "\xa5\x79\x05\xb9\xeb\x31\xfb\xb1\xcc\x53\x96\x39\xe6\x70\xb2\xf1\xd1" + "\x2e\x27\x71\x39\xb5\x1a\x09\x8c\xfe\xbc\x18\x20\xfb\xa1\xa4", + "\x27\xcf\xf7\x6e\x28\xc6\x13", "\xdc\xd2\xf8\x4e\xd0\xea\xfa\xd0", + "", "", + "\x0b\x21\xf5\x0e\x20\x6c\x07\x21\xc6\xc0\x59\xf9\x20\x7e\x6d\x3a", 1, + 64, 0 }, + { 56, 256, 128, 270, + "\x55\xfa\x96\xeb\x3c\x94\x5c\xab\x67\x6c\x42\xb8\xca\xac\x34\xa1\x71" + "\x7d\x43\x37\xf4\xc9\x08\x06\xb2\x26\xd5\x68\x12\x1e\xc5\xe0", + "\x20\x97\x98\x00\x6d\x01\x2c", "", + "\x34\xef\x60\x3e\x3c\x8f\x93\xa0\xe4\xa4\x77\x3f\x7b\x57\xac\xea", + "\x79\x7c\xa0\x5b\x20\xa1\x49\xd4\x2e\x5a\xb3\x38\x35\x85\x5b\x5c", + "\x81\x91\xbd\x25\x4a\x6e\x98\x6e\x3c\x22\xe8\x10\x68\x94\xd6\x4f", 1, + 0, 128 }, + { 56, 256, 128, 271, + "\xe0\xd8\x2f\x60\x88\xec\x67\x5d\x92\xec\x6b\x44\xa6\x7d\xc6\xeb\x66" + "\x00\xf1\xb7\x42\xbd\xd5\xa8\x51\xb0\x36\xaf\x02\xee\xf8\x25", + "\x06\xed\xf6\xab\x0c\x7a\x92", "\xe9\x8f\xdd\x29\x22\x91\xdd\x01", + "\x5b\xb3\x63\x92\x65\xc8\x56\x3e\x6f\xb7\x38\xbe\xd8\xc8\x53\x2c", + "\xcb\x25\x13\x41\x7f\x9c\xb5\x46\xd7\x38\x30\xb9\x19\xb2\xcb\x33", + "\xd3\xc0\x6c\x16\x14\xf7\xca\x3b\x09\x52\xd6\x7a\x5b\xd0\xd0\x17", 1, + 64, 128 }, + { 56, 256, 128, 272, + "\x12\x36\x80\xa3\x5c\x43\xcf\x61\x8c\x69\xf2\x81\x29\x81\x99\xe5\x4e" + "\x40\x08\x0e\x16\x57\x7f\x31\x0f\x09\x6e\x36\x7e\xe3\xcd\x40", + "\xfe\xea\x3f\x0f\x2d\x0e\xca", "", + "\x33\xee\x63\x0f\x34\x58\x8d\xc6\x8f\x8f\x43\x9f\xa3\x19\xf4\xef" + "\x1e", + "\x0b\x9f\x38\xde\xa9\xdd\x82\x65\x6f\xc1\xc2\xe1\x65\x1b\x12\xe1" + "\xc1", + "\x03\x79\x8b\x2b\xaa\xf5\xaf\x45\xe6\x7a\x71\x6c\x7b\x2a\x2a\x17", 1, + 0, 136 }, + { 56, 256, 128, 273, + "\x7b\xeb\x1e\x06\xb5\x85\xfa\xda\x87\x5f\xc6\x10\xc3\xcb\xfb\x97\x88" + "\xfe\xa2\x91\x43\x64\x10\x48\x7d\x8a\x84\x4c\x21\x7d\xfb\xb7", + "\x37\xaf\x09\x74\xce\x28\x51", "\x23\x30\x13\xd9\x17\xf3\xad\x76", + "\xe0\x39\x63\x76\xc6\xe7\x4a\xaf\x27\xf9\x33\xb6\xd5\x9f\x1b\xcf" + "\x8c", + "\x81\xf8\x49\x9c\x64\xed\x65\xe4\xd9\x96\xf8\xb2\xc6\x48\x4d\xe1" + "\xe2", + "\x6d\xb4\xb7\xc5\x5f\xcc\xa5\xfe\xde\xe9\x71\xa4\xa1\x22\xbb\xfe", 1, + 64, 136 }, + { 64, 256, 128, 274, + "\x61\xba\x69\x48\x97\x92\x5d\x1b\x41\x74\xd4\x04\x01\x46\x9c\x3e\xf2" + "\x67\xcd\xb9\xf8\x29\xed\xb1\xa1\x06\x18\xc1\x6d\x66\x60\x59", + "\x0d\x10\xc5\xc8\x4b\x88\xd6\x88", "", "", "", + "\x8e\xc7\x1f\x7d\xaf\x93\x5e\xdf\xac\x9d\xe9\x68\xf1\xd7\x64\x77", 1, + 0, 0 }, + { 64, 256, 128, 275, + "\x30\xb7\x84\x51\x11\x93\x55\x5f\x16\x11\x23\xac\xad\x2f\x18\xae\x3b" + "\xde\x91\x2e\xa9\xcc\x4a\x9e\x55\x31\x6d\x82\x2e\xce\x96\x52", + "\x0f\x1d\x38\xc6\xf3\x0b\x44\x75", + "\xd2\xcd\xd6\x22\x80\x88\x8f\xe5", "", "", + "\x02\xfc\xa4\x1f\x06\xb8\xc5\x43\x80\x84\x44\x0f\xf4\xce\xa5\xc8", 1, + 64, 0 }, + { 64, 256, 128, 276, + "\x11\x58\x84\xf6\x93\xb1\x55\x56\x3e\x9b\xfb\x3b\x07\xca\xcb\x2f\x7f" + "\x7c\xaa\x9b\xfe\x51\xf8\x9e\x23\xfe\xb5\xa9\x46\x8b\xfd\xd0", + "\x04\x10\x21\x99\xef\x21\xe1\xdf", "", + "\x82\xe3\xe6\x04\xd2\xbe\x8f\xca\xb7\x4f\x63\x8d\x1e\x70\xf2\x4c", + "\x74\xc3\xb0\x03\x22\xc0\x91\x60\x80\x37\xd4\xa8\xeb\x5a\xfb\xec", + "\xa0\x98\xb6\x7a\x2c\x79\xdd\x93\x94\x72\xa1\x85\x02\x63\x27\x01", 1, + 0, 128 }, + { 64, 256, 128, 277, + "\xa6\x74\x2d\xd3\x38\x7b\x1e\x11\xdc\x00\x48\x34\x71\x20\xf9\x17\x6d" + "\xff\x30\x29\x5c\x03\x41\xd6\x9b\xc2\xde\xac\xe1\x93\x3f\xd8", + "\xbd\x3a\xbd\x10\x1a\x6c\x62\x5e", + "\x61\x51\x54\x63\xb6\x84\x95\xbd", + "\xb9\xbe\x89\xba\x08\xc5\x5a\xc0\x44\xb6\x10\x9b\xc4\xa1\xeb\x6b", + "\x2f\xbb\xa1\xef\x88\x55\x54\x5c\x67\xcf\xc5\x3e\xd4\x9b\x37\x24", + "\x1a\x82\xcc\x39\x05\x01\xd2\x99\x15\xc2\xc1\x9a\xf0\xb5\xae\x53", 1, + 64, 128 }, + { 64, 256, 128, 278, + "\xc4\x3a\x2f\xa6\xd3\x71\x17\xc1\xad\x70\xcf\x07\xcd\x5d\x60\x7c\x91" + "\x3c\xa8\xfa\x55\x84\x80\xaa\x0a\x24\x13\xe3\xd6\xe8\xb1\xaf", + "\x83\x9a\xe2\x4f\x13\xf2\xea\xba", "", + "\x2c\xf8\x40\x59\x46\xbb\x72\x3d\x40\x66\x62\xa3\x1d\xff\xfd\x51" + "\x41", + "\xdb\x8c\x02\xd3\x79\x87\x60\xbb\x40\x38\xd3\x70\xab\x6a\x93\xe4" + "\x51", + "\x3d\xd4\x24\xa6\x17\x50\x2b\x64\x48\x4a\x88\x95\x7f\xf0\x94\xa7", 1, + 0, 136 }, + { 64, 256, 128, 279, + "\xf9\x48\x9d\xda\x8a\x08\xab\x83\x3f\x2a\x65\x8f\x3e\x42\x5a\xd6\x77" + "\x07\xb0\xa5\x29\x11\x08\x16\x22\xe0\xe7\xef\x90\xa3\x3e\x84", + "\x6a\x1b\x55\x7a\x0f\x47\x08\x22", + "\xec\x6c\x76\xbc\xee\x1e\xbc\x6b", + "\xa5\xd3\x97\xbe\xbe\x7a\xc5\x70\xd2\x39\x93\x90\xe8\xf0\xec\xb2" + "\xb6", + "\xa4\xe3\xe0\x95\xa2\x00\x41\xae\x21\x7a\xcf\xfd\x45\x5a\x74\x2d" + "\xb5", + "\x31\x7b\xc9\xf1\xb5\x20\xe9\x8e\xd8\x82\x0d\xd2\x40\x29\xab\x52", 1, + 64, 136 }, + { 72, 256, 128, 280, + "\x66\xf6\xd7\x9b\x72\x3c\xcd\x31\x36\xd2\xcf\x78\x8f\xc5\xb1\xc2\xf4" + "\xb9\x84\x63\xa5\x7a\xe4\xdd\x29\xf3\x88\x8a\xba\x37\xd0\x86", + "\x0a\x0a\xab\x42\x30\xfc\x3e\xe8\xca", "", "", "", + "\x2e\x59\xd7\xb1\xd1\xad\xa4\xc5\xf4\xc7\x4b\x35\x39\x66\x87\x99", 1, + 0, 0 }, + { 72, 256, 128, 281, + "\xdf\x89\xe9\x4e\x19\x79\x57\x6e\xb8\x6b\x48\x19\xc9\x02\xaa\x5c\xdd" + "\xfd\x14\xe0\x22\x45\x48\xc0\x35\x31\xea\xa7\x9e\x9a\x22\x64", + "\x39\xd9\x12\xcc\x1c\xd3\xde\x7f\x18", + "\xfb\x13\x08\xe9\x08\x2d\xba\x57", "", "", + "\xdd\x8b\x28\x4b\x1b\xa7\x18\xff\x14\x9b\x29\xc0\xbe\x62\xe7\x08", 1, + 64, 0 }, + { 72, 256, 128, 282, + "\x3e\x67\x83\x07\x50\x9e\xa3\x1e\xd5\xf3\xbe\x53\x2b\xa6\x1a\x4f\x03" + "\xbc\x8e\x13\x75\x11\x36\x41\xd1\x09\x98\xb5\x0d\x1e\x42\xe3", + "\xc0\x63\x66\x67\xb3\x31\xa0\x81\x13", "", + "\x32\x0d\x03\x28\xd2\x16\x4a\xfc\xfb\x89\x92\x65\x93\x8b\xae\x67", + "\x7c\x56\x07\x16\x00\xb1\xa3\xc6\xd8\x7a\x4e\xd8\xbe\x56\x18\x7a", + "\x58\xc8\x51\x26\xd5\xa5\x29\x1b\x48\xa9\x39\x55\x6c\xa8\xf3\xd6", 1, + 0, 128 }, + { 72, 256, 128, 283, + "\x7c\x9b\x18\x43\x5f\x55\x63\xe0\x35\x05\xa6\xf5\xed\xfc\xb1\x04\xde" + "\xda\x40\xec\x89\x99\x8f\x68\x16\xe1\x08\xda\x97\x04\xcd\xda", + "\x98\x63\xce\x13\x79\xa0\x6a\x5d\xef", + "\xf0\x0d\xc0\x5b\xd0\x00\xfc\x70", + "\xb0\x37\x68\x45\xc0\x26\x97\x93\x5f\x91\x43\x98\x55\x5e\xc4\x27", + "\x49\xd7\xd0\xb7\x96\xd1\x6d\xff\x6d\x0f\x7a\xab\x8c\x02\x27\x76", + "\x6a\x25\x90\xae\xc1\x3a\x76\x5e\xd7\x73\xf4\xde\xd0\xf1\x21\x86", 1, + 64, 128 }, + { 72, 256, 128, 284, + "\x94\x33\x11\xd4\xa1\xf7\xd2\x11\x08\xcc\xce\xe9\x40\x35\xdd\x71\x7f" + "\xc3\xab\x41\xd7\x3c\x36\xc2\xff\xbc\x01\x7f\x82\x22\xe8\x57", + "\x82\x33\x9e\x77\x61\x51\x3c\x74\xa7", "", + "\xb6\x8f\x03\x3c\x45\xc6\x72\xb6\x96\xc0\x32\x07\x67\x4b\x39\x5b" + "\x89", + "\xc7\xa2\xfe\x3a\x9f\xcf\x3b\x0b\x5a\xd8\xdc\xc3\x00\xc4\x9b\xa4" + "\x85", + "\x26\xa6\xc3\x71\x43\x86\xa9\x70\x56\x02\x0d\x4b\xf2\x4f\x7a\xa4", 1, + 0, 136 }, + { 72, 256, 128, 285, + "\xb3\x6a\x33\x80\xf9\xbf\xce\x99\x2d\x15\x5e\x18\x47\x3e\xef\x8c\x7e" + "\xee\xd8\xc4\xfe\x8f\x54\x47\xa5\x5f\xfe\x88\xdd\xf3\xbb\x9e", + "\xa2\x15\x98\x49\xb3\x9d\x86\x28\x52", + "\xbd\x75\x19\x2f\xba\xcb\xef\xe6", + "\x3e\x27\x32\x60\x92\x43\x55\xf5\x94\x89\x64\x60\x80\x87\x0f\x19" + "\xda", + "\x19\x5b\xcb\x64\xad\x94\x74\xf8\x3d\xd1\x65\x9d\x47\xc2\x2a\x02" + "\x82", + "\x80\xa8\x3e\x5a\x8c\xc7\x44\xbc\xb3\x22\xab\x07\x17\x39\x52\x93", 1, + 64, 136 }, + { 80, 256, 128, 286, + "\x44\xab\x20\x4d\x15\x0a\xdb\x17\xf8\x3d\x1e\x52\x05\xb6\xe1\x41\x96" + "\x73\xfa\xde\xe6\x10\xfb\x9a\x38\x18\x5a\x96\x74\x10\x21\xeb", + "\xff\x39\x14\x98\x2b\xe3\x0b\x3b\x21\x12", "", "", "", + "\xf5\x00\xce\xf3\x10\x41\x0d\x89\x40\xcf\x34\x90\xf5\xf3\xb5\xd7", 1, + 0, 0 }, + { 80, 256, 128, 287, + "\xdd\xee\x6a\x7b\x13\x1d\x31\x27\x5e\xc1\xcb\x35\x65\x4f\x9d\x25\xc3" + "\x94\x98\x0a\x1d\xda\x37\xf7\x0a\xf0\xfb\x62\xdd\x77\xa9\xde", + "\x06\xd8\x4b\xae\x11\x70\x8c\x42\x80\x23", + "\x04\xc1\x27\x1e\xf5\x2c\x04\x1b", "", "", + "\xba\xc4\x36\xba\x98\x5f\xdf\x3f\x14\x44\x6b\x92\xdd\xf3\x5d\xd1", 1, + 64, 0 }, + { 80, 256, 128, 288, + "\xd3\xb4\x4b\x8d\xfc\x35\x30\x40\x4a\x63\xb3\xca\x04\xcc\x71\xcf\xc7" + "\x1a\x55\x38\x44\x8b\x26\x25\xc9\x81\x85\x6c\xb7\xda\xed\x0f", + "\x7c\x3c\x42\xfa\x17\x34\x7e\x1d\xf7\x97", "", + "\x1d\x17\x75\x57\x96\x56\xf7\xf6\xc6\x89\x14\x01\xd7\x33\xe2\xab", + "\x61\xd6\xde\xba\x72\xb4\x1e\x9d\xa6\x25\x9f\xa8\x05\xd7\x7e\xb9", + "\x51\x3f\x57\x31\xbd\x00\x0c\xe6\x8a\x6e\xaa\xdf\x3c\x92\x53\x5a", 1, + 0, 128 }, + { 80, 256, 128, 289, + "\xfe\xec\xec\x22\x5f\xcf\x20\x09\x38\x18\x88\x09\x94\xfe\xda\xd5\x3d" + "\xc0\xc1\x74\x3a\xa9\x96\x71\xce\xfe\x29\x29\xa5\x03\xe0\xc6", + "\xab\x8d\x23\x83\x0b\x91\xdc\x6a\x89\x8e", + "\x24\x88\x9b\x2e\xf1\x2a\x31\x8a", + "\x7b\x70\xe1\xbf\xe1\xa7\x76\xe8\xf4\x4c\xa4\x32\xdd\x9e\xf9\x99", + "\x46\xf6\x07\x30\xdb\x04\x13\x36\xcc\x05\x1d\x4e\xf4\xde\x02\x9d", + "\x77\x17\xad\xcf\x7d\x3e\xd0\xf7\x87\x8e\x1a\xc3\x3a\x35\xcb\x47", 1, + 64, 128 }, + { 80, 256, 128, 290, + "\xcf\xb7\x33\x08\xa8\x30\x90\x16\x1f\xed\x74\x33\x68\xf5\x48\x08\x72" + "\xea\xa6\x2d\xf5\xa8\xea\x07\x7d\xee\x54\x0f\xd5\xa2\xef\x15", + "\x1d\x37\x5b\x8e\x07\xc3\xc4\xde\x82\xf8", "", + "\xfa\x0b\xc3\xe2\xcb\x70\x18\x3c\xd5\x6f\x47\xfa\x12\x91\x30\x1f" + "\x47", + "\xd9\x2c\x03\x75\xa4\xdc\xb1\x84\xbc\x90\x25\x15\x85\x06\x1d\xb0" + "\x36", + "\xa0\xdd\xef\x0c\x6b\xd9\x4f\xcd\xec\x39\xa7\xf0\x7f\x0a\x2c\x13", 1, + 0, 136 }, + { 80, 256, 128, 291, + "\x81\x5b\xda\xab\x3c\x18\x7a\xd7\x3b\x12\x7c\x8d\x39\xa1\x33\xb4\x1b" + "\x66\xe2\x99\xba\x24\xfb\x44\x6e\x35\xe3\xb1\x12\xdb\x8e\x66", + "\x0c\xd0\x55\x1f\x0c\x74\x17\x60\xb7\x47", + "\xc1\xd0\x1e\x47\x31\xf3\x60\x66", + "\x55\x87\x69\xc6\xd4\xf5\x0a\x0d\xb6\x20\xc2\x3f\xe1\x07\xa7\xfb" + "\xe4", + "\xc1\xce\x19\x2f\xb6\x71\x89\x2b\xb8\x3b\xd2\x2d\xbc\x82\xd6\x40" + "\x82", + "\x4e\x0a\x0b\xe1\xaa\x0e\x75\xcd\x7b\xb1\xdc\xb2\x7a\x01\x09\x10", 1, + 64, 136 }, + { 88, 256, 128, 292, + "\x46\x94\x8c\x6d\x69\x84\x54\x99\x10\x4d\x5d\xc2\xfa\xe4\x48\x80\xcf" + "\xe7\xd0\xbc\xbc\xee\x57\xef\xc0\x13\x3c\x26\x6b\x6d\x26\x21", + "\x05\xf7\x15\xfd\x0a\x56\x03\xdd\x84\xaf\x76", "", "", "", + "\x11\xe6\x78\xc0\xb2\x60\xc5\xa3\xea\x70\xf6\xa4\x6e\x4e\xf4\x36", 1, + 0, 0 }, + { 88, 256, 128, 293, + "\xe6\x89\xeb\x77\xa5\x78\x39\x9f\xa1\x7a\x75\x08\x3d\x25\x01\x8f\xfb" + "\xb6\x8f\x24\xd7\x7a\x02\x97\x57\x54\x1d\x65\x39\xbf\xfb\xdf", + "\x4a\x41\x96\x18\xb2\x83\x2d\xe7\xa4\xf9\x9b", + "\xec\xf4\x6e\xac\xa8\x41\x76\x9f", "", "", + "\x4a\x26\x15\xd2\xd5\xf8\xe9\x7b\x92\x74\x3b\x0e\xf2\xf4\x86\xab", 1, + 64, 0 }, + { 88, 256, 128, 294, + "\xaa\xcb\x13\x36\xd6\xee\x4d\x96\xa9\xa1\x2e\x5b\x8f\x25\xf0\x48\x00" + "\xd4\xaa\xba\x55\xf3\x79\x21\x8d\x64\xed\xb3\x46\x0f\xe2\x15", + "\x99\x4c\x19\x1e\x7a\x29\xc0\xef\xc1\xeb\x4d", "", + "\x7b\xea\xd5\xab\xcc\xc8\x76\xef\xb0\x10\x9e\x41\x2f\x06\xc7\x51", + "\x5a\xa3\x1a\xa5\xeb\x10\x36\x55\xb7\x8c\x4f\x7b\xcf\x08\xc9\x17", + "\x64\xe4\x1d\x31\xeb\x0d\xf8\x0a\xdc\xee\x32\x8f\x08\x1c\x4a\xeb", 1, + 0, 128 }, + { 88, 256, 128, 295, + "\x60\xd6\x84\x1e\x9e\x62\x18\xa2\xc8\x60\x5a\x77\x94\xe7\x4f\xb2\x15" + "\xdc\xf3\xa7\x0a\x00\x15\xd4\x97\xed\x16\x56\x4f\x2a\x83\xa0", + "\x4c\x93\xf5\x91\xaf\x92\xf1\x65\x96\x55\x4e", + "\x7c\xb0\xeb\x9a\xa2\x1f\xe8\x59", + "\xc0\xd1\xe6\x35\x58\x6b\x0e\xf8\x35\xc0\x14\x79\xa3\x21\x75\xa3", + "\x0c\x26\x61\x13\x54\x4d\x7a\x90\x1c\xe7\x21\xe1\xea\xd6\xd8\xf9", + "\x8a\x14\x9e\xaa\x05\xc8\x72\x2b\x26\x63\xc3\x45\xa6\xa5\x41\x8c", 1, + 64, 128 }, + { 88, 256, 128, 296, + "\x9d\xcd\xe5\x7c\xec\x27\xde\x6b\x58\x4d\xb4\xbd\x81\x09\x35\xbd\x3b" + "\x3c\x4e\xa8\xf2\x2a\x16\xa7\xb2\xa6\x2e\xf5\x16\x79\xa1\x3b", + "\x5c\x34\x39\xbd\xca\x45\x7b\x02\xb0\x49\x25", "", + "\x40\x2c\xb1\xfd\x3b\x07\x96\x20\x0d\x88\x8f\x7b\x39\x92\x35\xf5" + "\xb5", + "\x43\xf7\xd9\x9c\x1a\x0e\x50\x4a\xad\xd8\xf8\xb2\x98\x1b\x4a\xea" + "\x52", + "\x95\x8e\x19\xb5\xc1\x4e\xeb\xd2\xd2\x55\x09\x33\x6a\xa6\xd4\xc2", 1, + 0, 136 }, + { 88, 256, 128, 297, + "\xb3\xa0\x6e\x00\x10\x0f\xfc\x42\xdb\xdd\x53\x17\xf4\x3d\x2b\x48\xf8" + "\xc1\x1e\x4b\xd6\xe9\xe3\xed\xab\x58\xe9\x94\x4c\x55\x92\x78", + "\xf3\xd2\x4b\x36\x2b\xf1\x2b\x84\xb8\xa6\x6a", + "\x02\x42\xb5\xe8\x04\xa7\x91\x88", + "\x03\xb0\x3b\x45\xf6\xf3\x20\xb9\x9d\x81\x58\xff\x8b\x00\xf0\xad" + "\x92", + "\xd1\x27\xfd\x42\xb2\xca\x4b\xec\xa9\xf9\xac\x86\xb6\x3a\x16\x22" + "\xef", + "\x8a\x49\x9a\xab\x9e\x8f\x40\x96\xda\x60\x3c\x6c\xcf\xb9\xac\x4a", 1, + 64, 136 }, + { 104, 256, 128, 298, + "\xa4\x99\x4b\x65\x14\x35\x36\x70\x7b\x15\x1e\xe6\xe7\x9e\x69\xab\x9c" + "\x6d\x73\x00\x08\x19\xfd\x29\x91\xdd\x28\xab\xed\xe6\xb3\xe8", + "\xac\x64\x44\x49\x72\xd7\x78\xd5\x2f\x55\x31\xae\x88", "", "", "", + "\x8f\x35\x30\xc8\xad\xf8\x6e\xbc\x6c\x44\x97\xce\xde\x15\xcc\xd9", 1, + 0, 0 }, + { 104, 256, 128, 299, + "\xf9\x9f\x27\x20\xf0\x3e\xc3\xa9\xd0\xda\xd3\x7e\x3a\x91\x5b\xea\x3a" + "\x11\xcb\xa4\xbb\x0f\x60\xcd\x8f\x54\x2b\x33\x01\x63\xbc\xd0", + "\x7e\x0f\x99\xa0\x48\xb6\xe2\x87\x97\x20\xfe\x43\x18", + "\x94\xba\x97\x7e\x74\x45\x5e\xd8", "", "", + "\xcd\xc3\xbe\xf3\x9e\xa5\x3a\xf6\x80\x19\x9e\x36\x26\x09\xdc\x29", 1, + 64, 0 }, + { 104, 256, 128, 300, + "\x50\x5b\x26\xd1\x66\xa6\xeb\xd3\xdb\x69\xcf\x12\xbe\xe2\x5b\x73\x65" + "\x1d\x0d\x33\x2d\x0f\xb2\x48\xb5\x0c\xe9\xa1\xfb\x3a\x13\xf1", + "\xfb\x04\xf7\xbb\x3c\xd3\x82\xcb\xc0\x89\x37\x19\xaa", "", + "\xae\x8b\xb1\xcb\xc9\x2c\x73\xe7\x3e\x59\xa0\xd7\xa9\xd7\xf5\x28", + "\x43\x35\xb7\x08\xf2\x7d\x1c\x1b\x4d\x6e\x98\x5f\x18\xab\xa7\xc5", + "\x06\x9e\xe6\xf5\x27\x9d\xab\x73\x59\x3e\x11\x44\x0d\x23\x9e\xb8", 1, + 0, 128 }, + { 104, 256, 128, 301, + "\xa6\x93\x8b\x2e\x56\xd5\xdc\x55\x66\x59\x56\x84\x0a\xc6\x90\xc8\xac" + "\x9f\xf4\x21\xcc\x06\x2f\xc3\x42\x09\xf7\x71\x5f\x2d\x52\x6e", + "\xad\x37\xde\x72\xd3\x52\x15\x46\xd5\xff\x51\x46\x2b", + "\x0a\x38\x09\xbc\x56\x3c\x66\x75", + "\x9d\x28\x6b\xcc\x11\x5f\x10\xb2\xca\xa8\xc5\xd8\xda\xa9\x1e\xc7", + "\x4e\xd4\xdb\xc8\xaa\x8c\xf6\x37\x50\x21\xd1\x5e\x43\xc1\xf6\xc3", + "\xbf\xba\x9c\x41\xec\x63\xaa\x29\x6b\x14\x46\xb8\x88\xb6\x25\x1c", 1, + 64, 128 }, + { 104, 256, 128, 302, + "\x45\xe5\x9f\x44\x29\x08\x73\x60\xb1\xb2\x40\xfb\x9b\x59\x1d\x86\x1a" + "\xd4\x93\x68\x8b\x0a\x5e\x8f\x85\xff\xea\x2a\xcf\xf8\x39\x3a", + "\x20\xe8\x93\xf4\x56\x2b\xc1\xc5\x6c\x32\xc0\x0c\xc3", "", + "\x3f\xbc\x33\x8a\xd7\xbb\xd6\x77\x8c\xab\xe1\x34\xa0\x2c\x68\xe5" + "\x3c", + "\x67\x63\x7f\x9c\x9f\x7d\xa4\x1d\x4b\x36\x37\xe3\xa0\x54\x36\x2b" + "\x30", + "\xb3\x46\xd7\x00\xcf\xec\x0f\x2d\x8e\x17\x6d\x3e\xb9\xc5\xbe\xc9", 1, + 0, 136 }, + { 104, 256, 128, 303, + "\x00\x4c\x6c\xa0\x40\x78\xbd\xfd\x55\x7f\x91\x50\x25\xa8\xec\x93\xb5" + "\x36\x8b\x86\xca\xf3\xd6\x57\x43\x2a\x5e\x1d\x1c\xef\x99\x17", + "\x99\x9d\x95\xed\xb9\x25\xe7\x74\x4e\x32\x87\x40\x09", + "\x08\x2c\x14\x33\xbb\x64\xe1\x10", + "\x05\x52\xfc\xb5\x2c\x49\x8d\x91\xb8\x98\x97\xae\x6f\x64\x0e\x1f" + "\x5a", + "\x5b\x47\x08\xb7\x2b\x68\x46\x66\x39\xe4\xb5\xf3\xd3\xda\x1d\x1e" + "\x84", + "\xf7\xf0\xc0\xe0\xc0\x1b\xf7\x72\xcb\xd9\x0c\xb9\x80\x93\xdf\xb8", 1, + 64, 136 }, + { 96, 128, 32, 304, + "\x10\x3e\x85\x9d\x3a\x23\x87\x24\xbf\x85\xb2\x10\x0f\x44\x2f\x1d", + "\xf9\x1d\x64\x78\x41\x61\xfa\xbd\x6c\x96\x2e\x50", "", "", "", + "\x2b\x31\xc2\x1b", 1, 0, 0 }, + { 96, 128, 32, 305, + "\x71\x46\x6d\xc3\x04\x6b\x1e\x6c\x08\x38\xba\x6c\x9e\xf4\x1e\x79", + "\x29\x28\x09\x5b\xd7\x96\x2e\x9e\x60\x24\xa2\xb9", + "\xa6\x17\xcc\xe7\x4d\x04\x39\x90\x05\x97\xcb\x3d\xdc\xfc\x25\xfb", + "", "", "\x38\x3f\x8a\xbc", 1, 128, 0 }, + { 96, 128, 32, 306, + "\x3c\xc9\x38\x04\xe2\xd6\x99\x61\x92\x78\xa9\x41\x38\x9c\xec\x3c", + "\xc7\x75\xdd\xa3\x14\xaf\x64\xc3\x10\xa7\xc1\xd3", "", + "\x12\x4c\xe7\x1e\x08\xc1\x32\x4f\x91\x65\x70\xd5\x33\x03\x29\x19", + "\xf2\x46\x75\x4c\xd3\x2a\x99\x60\xd3\xd5\xe5\x35\x2f\x1d\x73\xc7", + "\x60\xdb\xd6\x76", 1, 0, 128 }, + { 96, 128, 32, 307, + "\x90\x89\xe1\x78\xf3\xf9\x0b\xfc\x0f\x68\xe5\x59\xd3\x38\xc3\x9d", + "\x8b\x2b\xfc\xa6\x47\x75\xb5\x09\x35\xb4\x82\x21", + "\xca\x9b\x40\x50\xb6\xbd\x0f\x0e\xba\xef\xfb\x78\xf2\x4a\x41\x1f", + "\x33\xd9\x02\x09\x3b\xa5\x21\x69\x33\x23\x6c\x08\xfa\x5c\x0c\xb2", + "\xe0\x57\x18\x08\xbf\x38\x9c\x1a\x07\xca\x7e\x5b\xbf\x49\xa1\xff", + "\xcc\x34\x6e\x6d", 1, 128, 128 }, + { 96, 128, 32, 308, + "\x33\xcf\xf6\x80\x61\xe3\xf5\xf9\x41\xc8\xc2\x0c\x89\x60\x8b\x77", + "\xeb\x58\x1f\x66\xcc\xc7\xf1\xda\xa2\x35\xbf\x27", "", + "\x24\xa9\xd8\x95\xf6\x04\x6b\x93\x68\xb0\xb6\xb0\xfb\x39\x6c\xd1" + "\x0e", + "\x12\x1f\x88\xe8\x1d\x27\xda\x5c\x5d\x5c\x9a\xb3\x97\xc7\xb2\x05" + "\xf7", + "\x2f\xa4\x51\x6d", 1, 0, 136 }, + { 96, 128, 32, 309, + "\xc0\x1b\x91\x5d\x2d\x21\x12\x28\x8e\xd0\x4d\x2c\xdd\x38\x9b\xd7", + "\x98\xac\xed\x46\x74\xfa\xeb\xe3\xfd\x48\x81\xcc", + "\x20\x90\xbd\x59\x34\xb2\x0e\x26\xc7\x04\xaf\x9f\x85\xc9\xc4\x10", + "\x63\xda\xb2\xe2\xa2\x2a\x63\xa7\xe5\x50\x46\x67\x63\x45\x55\x93" + "\x4d", + "\x13\xef\xab\xa6\x39\x13\xe7\xa9\xd8\xa4\xfd\x89\xe3\x49\xc4\xa0" + "\xa0", + "\x65\x53\xc6\x47", 1, 128, 136 }, + { 96, 128, 32, 310, + "\xa9\x68\xcf\xd7\xf6\x3f\x3a\x27\x68\x71\xe3\x03\x83\x07\x7d\xe8", + "\x54\xbf\xc8\x2e\x1c\xb5\xc0\xb9\xa6\x5b\x25\x2c", "", + "\x03\x0f\xa6\xb0\xed\xde\x45\xcb\x65\x88\x13\xd0\xd7\x97\xae\xdf\x1b" + "\xa2\x7f\x43\x5d\xf4\xf4\x43\xa3\x46\x9e\xa0\xe4\x1e\x63", + "\x60\xd5\x64\x55\x60\xb0\xfa\x0b\x29\x57\x0b\xaf\xd2\xb6\xe1\x8d\x83" + "\x9c\xea\xf8\x82\x42\xcb\x6c\x7c\x60\x8d\x30\x01\xb7\xcd", + "\xa9\x2b\x60\xf6", 1, 0, 248 }, + { 96, 128, 32, 311, + "\x76\x45\x87\x4c\xc7\xa9\xf0\xe7\x44\x32\x03\xab\xec\x23\x45\x5b", + "\xce\x56\x6b\x86\x6e\xf0\xfd\x3b\x09\x6f\x3e\x9e", + "\x1e\x00\x46\x9a\xd4\x5b\x2c\x24\xcd\xdb\xa5\x29\x85\x16\x9a\xab", + "\xb6\xb0\x50\x21\xae\x99\xe4\xaf\xe0\xec\x92\xc0\x09\xd0\x6c\x42\x86" + "\x02\x0f\xab\xca\x1c\x1a\xc7\x68\xfa\xf1\x84\x50\x61\x91", + "\xea\x62\xf4\x28\x31\x70\x94\x24\xf8\xa8\x13\x83\x02\x47\x7d\x51\x6b" + "\x05\xe3\x1f\x23\xb4\x5e\xf3\x77\x03\x3b\x79\x23\x29\x2f", + "\x77\x4a\xae\x96", 1, 128, 248 }, + { 96, 128, 48, 312, + "\x1a\x85\x2b\x34\x56\x35\x3c\xfd\x21\x72\x6d\x11\x22\x10\x9f\x1d", + "\xbd\xe9\x16\x5d\x65\xf3\x01\xa2\xe4\xff\x1d\x4a", "", "", "", + "\xbd\x22\xf7\x19\x5c\x49", 1, 0, 0 }, + { 96, 128, 48, 313, + "\x44\xb5\x29\x8a\x67\x7b\xaf\xf5\xc3\xa6\x5d\x51\x2a\x65\x19\x92", + "\xcb\xb2\x50\x28\x3f\x75\xa6\x60\x82\xf1\xa7\x85", + "\x19\x03\x1c\x68\x8c\xee\xe8\x4e\x2d\x25\x25\x3a\xcc\xba\xe6\x8e", + "", "", "\x93\x10\xab\x0e\x0d\x1c", 1, 128, 0 }, + { 96, 128, 48, 314, + "\x63\xc7\x47\xbe\x2f\x30\x69\xd5\x00\x15\xf6\x9d\xba\xe0\x98\x76", + "\xbc\x2c\x94\x05\x25\xe5\x14\x40\x98\x15\xab\x19", "", + "\xad\x5c\xa7\x0a\x32\x53\x63\xc3\x4b\x2f\x3d\x5a\x85\x76\xb9\x64", + "\xac\xb6\x2f\x8c\x47\x81\x27\x9d\x5c\x81\xcc\xae\xe4\xf6\x1e\xbe", + "\xcb\xbc\xa0\x32\x69\x50", 1, 0, 128 }, + { 96, 128, 48, 315, + "\xa4\x65\xbe\x21\xf5\xb4\x20\xcd\x39\x00\x9b\x0e\xf8\x9d\xbe\xc2", + "\xd8\xb2\x87\xca\xee\x5a\xf6\x9b\xc8\x95\x45\xe9", + "\xb4\x52\xe6\xc1\x12\x64\x7d\xe6\x74\x24\x9d\x1e\xec\x10\x9f\xfc", + "\xc1\x90\xd1\x27\x03\x34\x01\x6d\xae\xeb\x12\xf0\xdd\xd5\x59\x05", + "\xcd\x9e\x9c\xb0\x1e\x77\x37\xcf\x87\xa7\x36\xa5\x0a\x40\x69\x4b", + "\xae\x86\xc7\x00\x51\x83", 1, 128, 128 }, + { 96, 128, 48, 316, + "\x1c\xb1\x73\xba\x47\x85\xbc\x6b\x72\x8c\x86\x29\x29\xda\xea\x5f", + "\xd7\x6c\xdf\xb5\x95\xc3\xfe\x3d\x7c\xc0\x65\x4f", "", + "\xee\xf6\x69\x1b\xa8\xe2\x28\xb7\xbd\xa4\xb2\x6f\xd3\x53\x95\x07" + "\x57", + "\xe8\xad\x83\x31\x15\x84\xd4\xb4\xbd\xb2\x1b\xa0\xf6\x2f\xbb\x13" + "\xd2", + "\x0f\x8f\x63\x95\x41\x3d", 1, 0, 136 }, + { 96, 128, 48, 317, + "\xa4\xa1\x07\xff\x7a\x4c\x09\x78\xaa\x17\xc5\x51\xff\x9f\x8a\x04", + "\x50\x2d\x4b\xc4\x40\xc3\xf6\x6d\xb3\x9a\x09\xf4", + "\x20\xd6\x43\xe0\xaf\x5c\x67\x3b\xe4\x54\xe5\x31\xd9\x29\x95\xc1", + "\x51\x0d\x64\x55\x1a\x78\xca\x2c\xd8\xd3\x22\xf8\x2f\x6e\x2c\xd6" + "\x17", + "\x57\x85\x87\x6e\x6f\xd0\x45\xa0\xce\xa1\x85\xec\xb0\x75\x10\x2f" + "\x97", + "\x6c\x21\x04\x66\x57\xd9", 1, 128, 136 }, + { 96, 128, 48, 318, + "\xb7\x8d\xd7\x5d\x16\xec\xe4\x9b\xfd\x01\xe8\xf4\xbc\xd0\xd5\x2e", + "\x1f\xff\x0b\x5a\x56\x6f\x3d\x1b\x25\x2e\x51\x66", "", + "\xf4\x38\x00\x03\x59\x44\x8e\xd5\xd7\x91\xbe\xab\x63\x72\x99\xa1\x8c" + "\x9d\xf4\x5e\x6a\x03\x04\x28\xcc\xa6\xcc\x05\xb2\xc2\x5e", + "\x65\xc3\x63\xa0\xcf\x88\xf9\xea\x74\xc4\x7f\x46\x98\x1f\xc9\xa8\x45" + "\x40\x2c\x52\x05\xb1\xd0\xc1\xbd\xb4\x24\x9c\x78\x87\xfb", + "\x39\xa8\x32\xac\x3b\x9f", 1, 0, 248 }, + { 96, 128, 48, 319, + "\x22\x8b\xf7\x86\xa9\x37\x1d\x98\x75\x18\x96\x78\xa4\x0f\x55\xa0", + "\x41\x48\xce\x9b\x64\x72\x28\x75\x1f\x31\x3c\x2d", + "\x33\x90\xa6\x5d\x1a\xce\x02\xbf\x67\x26\x52\x54\xbe\x9c\x34\xd7", + "\xa6\x0a\x55\x5b\xa4\x8a\x06\x5d\xa2\x99\x9a\x45\x26\xce\xce\x66\xe8" + "\x60\x0f\xe1\x20\x96\xdb\x76\x67\x71\xe4\x0f\xcf\x40\xd9", + "\xfd\x06\x73\x7f\x69\x5a\xd8\x7d\x70\x35\x4b\x67\xc2\x40\xcc\x80\xe4" + "\x1e\xea\x60\xf3\x58\x34\xfa\x1c\x86\x43\x9a\x3a\x26\x93", + "\x22\x28\x51\xc9\x6f\xc9", 1, 128, 248 }, + { 96, 128, 64, 320, + "\x5a\xfb\x73\xf3\x7d\x05\x14\x75\x66\xa7\xac\x97\x34\xeb\xa3\xff", + "\x02\x6d\xd1\x25\xc9\x8e\xf1\x50\x7f\x6d\x1d\x15", "", "", "", + "\xa4\xc4\xb1\x36\x62\x5f\x02\x43", 1, 0, 0 }, + { 96, 128, 64, 321, + "\x3c\xf9\x38\x73\x3c\xb7\x6e\x43\x3a\x5b\x5c\xcb\x06\xbe\x34\x21", + "\xa5\xbc\x5b\xd3\x83\xce\x11\x08\x10\x2c\x3c\x7b", + "\xbe\xfd\xff\x73\x13\xd3\x3c\xa6\x39\x8f\x84\xb3\x2e\xf7\x7c\x65", + "", "", "\xd6\x65\xa6\xea\x1a\xc4\x64\x9a", 1, 128, 0 }, + { 96, 128, 64, 322, + "\x22\xed\x64\xb5\xb9\x4a\x3c\x41\x16\xd0\x2b\x4f\xbd\x4e\x58\x81", + "\xf4\x98\xfd\x65\xda\xb2\x34\x52\x0d\xe5\x29\x20", "", + "\x94\xb0\x3b\x07\x77\x2b\x70\x56\x2b\xc7\x29\x50\x5b\x4a\xd4\x26", + "\x4c\x4d\xfe\x97\x11\xb3\x20\x26\x4f\x3a\x57\xec\xdc\xd5\x98\x50", + "\xb1\x3a\xea\x29\x80\x76\x7f\xd7", 1, 0, 128 }, + { 96, 128, 64, 323, + "\xea\x5a\x91\x5f\xd7\xbe\x0a\xaf\x14\xb8\x8f\x5d\xc4\xfd\x71\x9a", + "\xae\xec\xf1\x9f\x7d\x33\x79\xee\x55\xba\x64\x68", + "\x13\x79\x1a\xad\x58\x12\xa3\x62\x29\x1a\x4f\x6d\x63\x68\x7d\x33", + "\xd3\x13\xe0\x9c\xd4\x8b\x06\xf1\x6e\xf9\x17\x8e\x42\x62\x4b\xd0", + "\xf9\xbc\x9a\x66\x18\x6b\x6a\x60\x03\x5d\x14\x4d\xfb\x34\xc4\xaf", + "\x2f\xb6\x37\xff\x91\xd6\xfd\x9e", 1, 128, 128 }, + { 96, 128, 64, 324, + "\x89\x12\x11\x03\xc3\x50\xe2\x9f\x7c\xd5\x80\xf0\x5b\xbf\xea\xac", + "\xf6\xd6\xe8\x02\xab\xdf\x43\x23\x00\x30\xa8\x96", "", + "\x63\x68\x40\xff\xbc\x66\x19\x1b\xc3\x7b\xf2\xe6\xbd\xdf\x28\xbd" + "\xa9", + "\xc6\x91\x20\x62\x54\x8d\xba\x55\xe6\x18\x4e\x8f\x50\x7d\x7f\x9c" + "\x7d", + "\x1b\x30\x0d\xe3\x55\x38\xc2\x52", 1, 0, 136 }, + { 96, 128, 64, 325, + "\x03\xad\x5f\x47\x2b\x97\x8c\x5f\x72\xb7\xb1\xc2\x90\x80\x37\x4c", + "\x77\x0d\xcc\x2e\xa1\xc2\xd9\xf6\xc9\x04\x94\x7d", + "\x97\x2c\x90\xe3\x87\xf0\xaf\x93\x6b\x1c\x9d\xb0\xeb\xfe\xbb\xe9", + "\x78\x47\x05\x11\xca\xf1\x2c\xb8\x82\x62\x80\x92\xbb\x57\x3b\xde" + "\x8c", + "\x3f\xb2\x2c\x2c\x36\x6c\x0a\x46\xba\x16\x40\xec\xcb\x54\x4d\xbd" + "\xd2", + "\x3e\xc7\xc4\x88\x8a\x12\x88\xfc", 1, 128, 136 }, + { 96, 128, 64, 326, + "\x48\x56\xb1\x07\xdb\xbc\xe7\x02\xc7\xcd\xaa\x7e\xc1\x74\x0f\x35", + "\x6f\x41\xac\xab\xda\x1e\x03\x48\xc4\x29\x0f\x0f", "", + "\xd3\x2d\xec\xc5\x5d\xbd\x0c\x08\x91\x6c\x9a\x9e\x3d\x08\x46\xae\x2c" + "\xac\xae\xb1\xba\x0e\x04\xeb\x02\x77\x2c\xf6\xa5\x0e\x46", + "\x2f\x3f\x13\x3c\xa5\x44\xea\xa5\x15\xa1\x6f\x8b\x1c\xf1\x2e\x17\x4a" + "\xa8\x0d\xb6\x08\x26\x8e\xad\x25\xac\xe1\xca\x4e\xef\xed", + "\x2f\xfa\x78\x6a\xdc\x94\xae\x2a", 1, 0, 248 }, + { 96, 128, 64, 327, + "\xc0\x83\x39\xa6\xf8\x0b\x84\xe2\x01\xe3\xd6\x03\x0c\xdb\x3f\x02", + "\x1c\xbf\x2c\xa3\x13\x30\xab\xe7\x49\xdb\x58\x8b", + "\xb5\x35\xa8\x47\xdf\xc9\x62\x01\x2d\x91\x3a\x40\x76\xf5\x8f\x9f", + "\x4f\x9f\xd6\xad\x16\x56\xcc\xe9\x9a\xf7\x46\x99\x60\x07\x3a\x24\x15" + "\x69\xce\x32\xda\xd5\x58\x11\x1b\x50\x30\x60\x53\xa0\xb6", + "\xc9\x1d\x4c\x8b\xf7\xfd\xba\x49\xb8\x70\x01\xfc\x3e\xc9\x5f\x45\x5b" + "\xa3\x2b\xc0\x5b\xa3\x36\xbc\x3d\x58\xf4\xad\x08\xb5\xbc", + "\x34\xd6\x22\xfe\x4b\xa3\xca\xc5", 1, 128, 248 }, + { 96, 128, 80, 328, + "\x1f\xaf\x80\x05\xf7\x75\x53\xf5\xee\x26\x86\x5e\x31\xf5\x08\x7b", + "\x40\xdf\x77\xe5\x37\xc8\x95\xab\x71\x46\x4a\xcc", "", "", "", + "\x22\xff\xed\x9c\x2d\xca\x19\xfa\x32\xef", 1, 0, 0 }, + { 96, 128, 80, 329, + "\xf2\x0c\x79\xf8\x45\xbe\xd4\x06\x46\x9c\xf1\xcd\x3f\x7d\xaa\xc5", + "\xf7\xbc\xa6\x6e\xcc\xd7\xd4\x94\xde\xc7\x58\xf5", + "\x6e\x45\x36\x57\x58\x83\x92\x5a\x92\x9c\xed\x31\xad\x8f\xb6\xdd", + "", "", "\x2f\x25\xa1\xd0\x02\x61\x58\x9f\x3f\x00", 1, 128, 0 }, + { 96, 128, 80, 330, + "\xb8\xba\xe0\x12\x60\xce\xd6\x19\x4e\xf8\xdf\x72\x2d\x65\x9b\xe6", + "\x71\xd1\x0b\x7c\xbb\xbe\xcb\x84\x3e\x67\x8a\xb5", "", + "\x38\x7c\x03\x24\xcd\x47\xd3\xf2\x2c\xc9\xd9\x68\xa7\x2e\x43\x4d", + "\x0c\x36\xe3\x03\xe2\x95\xa2\x89\xbb\x13\x47\x40\xe2\x1a\x66\x64", + "\xd3\x58\x7e\x21\x86\x55\x3f\xd9\xd4\x09", 1, 0, 128 }, + { 96, 128, 80, 331, + "\xb8\x00\x30\xb8\x3c\x4b\xca\xfd\x1b\x7e\xc9\xc7\x0a\xb9\x22\x4c", + "\xa7\x89\x45\x7f\x80\xbd\xc5\xb8\xf1\x5f\xea\x91", + "\x12\x30\x97\x7b\x9a\x5b\x12\xc8\xee\x10\xa3\xb4\xab\xb4\xf0\x6e", + "\x19\x7a\x27\xed\xfc\x49\x95\x3b\x6d\xad\xfb\xe7\x17\x0f\xc7\x50", + "\x2b\xc4\x76\x3b\xa5\xb4\x24\xa1\xf2\x6b\xb6\x25\xd9\xf6\xd5\x15", + "\xd5\xbd\x4f\xa2\x3a\x45\x39\x5c\x71\x6f", 1, 128, 128 }, + { 96, 128, 80, 332, + "\x95\xe5\x17\x9e\xb1\x97\xf1\xa5\x15\xe1\x99\xbd\x93\x79\x48\xcd", + "\x49\xd4\x07\x7a\xd5\xd8\xbb\x84\xee\xcc\xf7\x11", "", + "\x45\xd9\x09\x5c\xf3\x20\xc5\x82\xc8\x97\xf0\xab\xb5\x3e\x3a\xed" + "\xec", + "\x36\xb3\xc9\xe7\xc2\x54\x39\xf2\x05\xff\x0e\x38\xff\x46\x79\x61" + "\xb8", + "\xc6\xb8\x39\xab\x6e\xe9\x97\x8e\xae\xdb", 1, 0, 136 }, + { 96, 128, 80, 333, + "\xfa\xc8\xd9\x8a\x8a\xf9\x32\x39\xb0\xd9\x55\x16\x57\xc5\x95\x1d", + "\xff\x1f\x41\x9b\xed\x64\xbf\x4a\x02\xc3\x57\xe5", + "\x2e\x3f\x10\x2d\xe4\x45\xb4\xec\x11\x7b\x63\xfb\xa7\x08\x9d\xe8", + "\x11\x97\xd7\x6a\x46\x9c\x17\x43\x82\x01\xef\x40\x00\xfa\x05\xf0" + "\xa9", + "\x96\xe2\x39\x10\xda\xa8\x64\xeb\x12\x68\xdb\xf2\x33\x9e\xd4\xbb" + "\x62", + "\xeb\xd1\x52\xd5\xf2\xd0\x0c\x60\xec\xd4", 1, 128, 136 }, + { 96, 128, 80, 334, + "\xde\x54\x50\x44\xb8\x14\xf3\x13\xc2\x3b\x5c\xb8\x54\xf7\x39\xa2", + "\x99\xfc\xac\xe8\xe5\x9b\xdd\x6b\x88\xdd\x96\x0c", "", + "\x83\x52\x5d\xbb\xb5\x4d\xe0\xfc\x1d\x24\x87\x49\xa7\x16\xd9\xde\xbc" + "\x65\xfe\x44\xc7\x9b\x16\x3b\x36\x14\xfb\x8d\x62\xee\x2e", + "\xa0\xe6\xe3\xd5\x31\xb8\x63\xb9\xe6\xf3\x8c\xf0\x3d\x60\xf1\xd6\x93" + "\x0c\xb1\x7a\xa4\x1a\x78\xa6\x6d\x5b\x94\x9c\x5f\x7e\xc7", + "\x01\x38\xc4\x33\x9b\xfe\xd8\x18\x96\x4b", 1, 0, 248 }, + { 96, 128, 80, 335, + "\xbb\xbf\xa9\x44\x44\x93\xdd\x2f\xbf\x72\xba\xf3\x87\xa4\x09\x00", + "\x29\x35\x56\x7a\xa5\x72\x90\x8e\x49\x91\x71\x30", + "\xc0\xdb\x66\x6f\x38\x14\xfd\xc2\xcf\x7c\xb3\xd4\xce\xfa\xf2\xd3", + "\x59\xfc\x37\x65\x4b\x0a\x5e\x3b\x86\x87\xa3\xd8\x5b\x32\x64\x4d\xc7" + "\xa1\x56\xb6\x0d\xd7\xa6\x4d\x22\x98\x37\x3e\x15\x8f\x21", + "\x46\x4f\x1d\x04\x17\x28\x0f\x22\xf0\x60\x53\xcb\xea\x16\xe2\x8e\xb0" + "\xf7\x90\x82\xa6\x82\xb5\x8c\xb7\x19\x42\x36\x93\xe6\x6c", + "\x19\x87\x01\x52\x03\x23\xf4\x61\x3b\x59", 1, 128, 248 }, + { 96, 128, 96, 336, + "\x0a\xd9\xda\x99\x4d\xb2\xed\x7b\x9e\x35\xe9\x88\x95\x19\x4c\x4b", + "\x57\xe3\x64\xc1\x6b\x36\x89\xbc\x15\x6b\x31\x15", "", "", "", + "\x73\x58\x6e\xab\x8c\xed\x75\x40\x62\x0f\xea\x72", 1, 0, 0 }, + { 96, 128, 96, 337, + "\x97\x04\x95\xf7\x0d\xc6\x4f\x0f\xe4\xe8\xc1\x09\x46\xdf\x2e\xd1", + "\x69\xb1\xa3\x19\x5c\x16\x55\x17\xfe\xd6\x65\x95", + "\xbf\xe8\x73\x6a\x21\x13\xf7\x74\xc6\x82\x8e\x5b\x93\x0f\x1c\xb9", + "", "", "\x18\xd7\xc5\x4f\x8f\xcb\xec\x44\x2b\x31\x39\x87", 1, 128, + 0 }, + { 96, 128, 96, 338, + "\xf3\x63\xf1\xa7\xd3\x3c\x96\x94\x9f\xd0\x8f\x44\x0c\xfb\xa0\x00", + "\x67\xb9\x20\x07\xf5\x7b\x83\xfd\x9f\x3e\xe6\xfa", "", + "\xa6\x51\xd2\xca\x4b\x16\x98\x0b\x0e\x4a\x7a\x10\xc7\x5c\x47\xed", + "\x20\xc2\xa2\xf1\x8d\x07\x53\xac\xd3\x6e\x20\x49\x85\x14\x95\x28", + "\x4a\x44\x22\xd3\xb9\x9c\x8d\x77\xdb\xde\x2a\xb2", 1, 0, 128 }, + { 96, 128, 96, 339, + "\x6b\x74\x89\xd1\x58\xf3\x77\xe6\x69\x2d\x84\xa9\x77\x27\xff\x41", + "\x9a\xc0\x91\xef\x05\xd0\xce\x74\x28\x82\x7e\xd3", + "\x38\x46\x9f\x10\x49\xa7\xea\x3d\xa0\x55\x1c\xfb\x34\x01\x0b\xf6", + "\x35\x2d\xbd\x5b\xb8\xab\xf0\xa0\x97\xb9\x29\x16\x0b\x8f\x8c\xec", + "\x00\xd3\x94\x8b\xac\x85\x72\xed\x1e\xd5\x9c\x26\x55\xb7\x69\xf7", + "\x32\x3b\xa1\xc8\x06\xf3\xea\xc6\x73\x01\x5a\x88", 1, 128, 128 }, + { 96, 128, 96, 340, + "\x38\xd4\xd2\x7c\x30\x83\x49\x68\xb5\x28\x5b\x99\xac\x18\x73\x4a", + "\x18\x3a\xe3\x52\xd9\xc3\x40\xee\x61\x67\xc3\xb6", "", + "\xf1\x53\xd0\x1c\x5b\x9a\xb2\x02\x45\x56\x87\x53\x7e\x83\x52\xd2" + "\x94", + "\x3c\xa2\x62\xd9\x2d\xb8\x40\x4d\x5d\xb0\xe5\x5c\xcc\xdd\xff\x06" + "\x5b", + "\xd2\x7a\xd6\x86\x6e\xa9\x2b\xa2\x68\x0d\xce\xef", 1, 0, 136 }, + { 96, 128, 96, 341, + "\x74\xb9\x75\x6c\xb2\xac\x63\x61\xce\x9d\x68\x44\x77\xb8\xd0\xc9", + "\xc2\x60\x6f\xc9\x64\xb6\x13\xa0\xb1\x53\xfc\x0f", + "\xdc\xe9\x30\x9c\xf7\x1c\xed\x35\xeb\x22\x0c\x70\x9f\xdd\xc4\x14", + "\x66\xf3\x21\x69\x11\x74\x80\x38\xf9\x14\x32\x34\x49\x14\xee\x8a" + "\x35", + "\xc8\xae\xe8\x33\x0a\x37\xca\x70\x6f\x47\x6f\x77\x4f\xf3\x57\x00" + "\xca", + "\xb3\x7e\x7e\x62\xd0\xb1\xae\xab\x26\x78\xbc\xf3", 1, 128, 136 }, + { 96, 128, 96, 342, + "\xe1\xe4\x5f\x35\x00\xa4\x05\xdf\x5a\xbd\xcb\x3b\x86\xbe\xa1\x4d", + "\x59\x28\x9d\xa2\xd5\xf1\x3e\xea\x49\x95\x61\x1f", "", + "\x5c\xda\x60\x60\xa7\xe1\x05\xcc\x57\xc7\x75\xa0\x2a\xf9\x21\x75\x73" + "\x50\xc9\x69\x2b\xc4\xfa\x40\x4a\xce\x98\xeb\x1e\x61\x71", + "\xf2\x67\x01\x4a\xc2\x64\x66\x05\x8b\x80\xd2\x8c\x0d\x82\x52\x1d\x69" + "\xb2\x30\x2c\x36\x56\x74\x0c\x23\x78\x31\x85\x9a\x0f\x24", + "\x9e\xdc\x28\x31\x74\x36\xd6\x6d\x75\x2a\xd9\xb7", 1, 0, 248 }, + { 96, 128, 96, 343, + "\xbc\xe5\x99\xcb\x75\xa2\x27\x10\x70\xe6\x19\x9c\xb0\x96\x65\x6b", + "\xf9\xa8\x33\x3f\x46\x73\x68\x9e\x39\x59\xc9\xe0", + "\xf6\x1e\x10\x35\x17\x1c\x92\xb0\x22\xae\x55\x9e\x86\x57\x93\x0e", + "\x41\x89\x4a\xcc\x83\x8d\x4a\x8f\x62\xe6\xcc\x92\x71\xf1\xd6\x5d\xf7" + "\xf3\x65\xa3\x8e\x9a\x94\x11\x0f\x4c\x8d\x57\xb8\xbe\x18", + "\x0e\xb5\xc0\x3d\x69\x15\x3d\xbe\x79\x4c\x53\xcb\x29\x3b\x25\xd3\x8c" + "\xaf\xa1\x36\x72\xc9\x15\x60\x68\xa4\x02\x6d\xb0\xd7\x08", + "\xd4\x06\x38\x9c\xe2\x22\x8e\xbb\xbc\x40\x0b\xbf", 1, 128, 248 }, + { 96, 128, 112, 344, + "\xf2\x1f\x07\x00\xf1\x6a\xa0\x98\xd6\x61\x7c\xc3\x68\x30\x12\xc1", + "\x42\x39\x4a\x30\xfc\xd2\x52\x55\x6b\xf2\xcb\x36", "", "", "", + "\x39\xd0\x70\x3d\xcf\x7d\x0c\x31\x62\x22\xd7\x16\xaf\xec", 1, 0, 0 }, + { 96, 128, 112, 345, + "\x7d\x28\x27\x73\x9a\xd3\xce\x2d\xc7\xf2\x7e\x35\xf6\xcd\x83\x7f", + "\x09\xa2\x49\x07\x7d\xb1\xf8\x4e\x98\x4a\x98\x29", + "\xb5\xe5\x9d\x8c\x3f\x81\xda\xe7\x78\x9a\x82\x6a\x0d\x32\x00\xf9", + "", "", "\x45\x55\x3d\x58\x83\x9d\x45\xa3\x77\xbe\x85\xe9\x5a\x41", 1, + 128, 0 }, + { 96, 128, 112, 346, + "\x1e\x70\xde\x0c\xba\x8f\x88\x48\xdb\xc8\xdd\x9c\xfa\x53\xc1\x61", + "\xd4\xe6\x77\xbd\xb0\x4b\xf9\x35\xd1\x30\xce\x15", "", + "\x71\x02\xb7\x71\x0b\x1d\xb1\xa0\x74\x84\x74\xf8\xe3\x7b\x6d\xd8", + "\x55\xdf\xe0\xe8\x8c\x81\xbf\xc5\x61\x97\x5d\xfa\xba\xa2\x1a\x12", + "\x02\x4e\x3b\xf1\x98\x5a\x7f\x7e\xcc\xda\xa0\xee\x2a\x18", 1, 0, + 128 }, + { 96, 128, 112, 347, + "\x0b\xb3\x80\xf7\x25\x73\xe6\xd4\x97\x17\x23\x81\xf5\xf4\xeb\x6a", + "\xe7\x9f\x20\x84\x0e\x41\x82\xac\x6b\xf0\xf8\x48", + "\x44\x9d\xce\xa2\x7c\xd6\x10\x31\xf9\xbf\xaf\x87\xd3\xbc\xf9\xc4", + "\x4a\xbf\xe6\x0e\xd6\xbf\x24\x19\x0e\x41\x6e\x68\x09\x71\x8f\xa0", + "\x12\xe6\x3d\x6d\x51\xc1\x4a\x2e\xe7\x81\x0a\x24\x0f\xfe\xbc\x13", + "\x5c\xfc\x7d\xf5\xf7\x0a\x81\xf9\x3a\x8f\x47\x14\xe1\x43", 1, 128, + 128 }, + { 96, 128, 112, 348, + "\xc4\x8d\xe4\xc9\x1e\xfd\xf7\xb2\x4c\x8f\x80\x07\x3f\x6f\x17\xd2", + "\xdd\xa7\x45\x36\x01\xd5\x16\xe0\x87\x32\x0e\xbf", "", + "\x01\xbe\x64\x0c\xaf\xc9\xeb\x72\x88\x27\xfc\xf1\xc9\xcb\xa5\xe0" + "\xcc", + "\x23\x5b\x7e\xdd\x4b\x3d\xf0\x3c\x4a\xd8\xff\x11\x12\xcc\xb3\xd9" + "\x28", + "\x7b\x95\xbc\x04\x20\xce\x86\xac\x2f\x2a\x37\x5d\x5f\xe0", 1, 0, + 136 }, + { 96, 128, 112, 349, + "\x21\xf8\x4c\x57\xb6\xd4\xa2\xd2\xd3\x0d\x4c\x37\xec\xf1\x10\x30", + "\xd3\xce\x63\xa1\xaf\x0b\xef\x6c\x9e\x0b\xbd\x81", + "\x0f\x78\xdd\x2b\x4e\x56\x6c\x9d\x15\xd0\x52\xb0\x1f\x6f\x85\xba", + "\x33\x58\x54\x3c\x39\xc1\x00\x25\xe0\x1c\xe8\x90\x06\xba\x00\x43" + "\x26", + "\xcb\xb9\x21\x40\x6c\xe9\xe4\x4c\x19\xb0\x19\xca\x26\x9a\x6b\x7b" + "\x13", + "\xf6\xba\x3a\x6a\x43\x3a\x50\xc4\xab\xe0\x0d\xa2\x41\x1f", 1, 128, + 136 }, + { 96, 128, 112, 350, + "\xa0\x8b\xe6\x8d\x04\x4e\x76\xe4\x7d\x04\xd0\x93\xee\x54\x8e\x59", + "\xf1\x7b\x6f\x94\xa8\x86\x42\x05\xc7\x57\xa6\x35", "", + "\x05\xe4\x54\x20\x55\xfa\x84\xe2\xd3\x49\x83\x7d\xef\x40\x23\x53\xfe" + "\xa7\xec\x56\xd3\xfd\x81\xa4\x48\x31\x40\x3f\x1f\x7f\x72", + "\x60\x19\x8b\xdf\x8b\x37\x8d\xbe\x55\x32\xdb\x73\x29\xaa\xcc\x58\xba" + "\x32\x5b\x82\x79\x65\xd4\x69\xb4\xec\x7d\x06\x98\xb9\xdf", + "\x05\x99\xec\x3c\x8c\x56\xae\x5d\x1e\xe6\xea\xc6\xca\x05", 1, 0, + 248 }, + { 96, 128, 112, 351, + "\xd9\x5f\x7f\x6a\x07\x53\x0e\xe3\xf9\x67\xfd\x1f\xf9\x90\x8a\xfe", + "\xba\xbe\x02\x81\xf3\xcb\xa8\x06\x67\xc3\x65\xaf", + "\xf4\x9c\x18\xd6\x5a\x19\x7b\x97\x3d\x26\xfd\x29\xa1\x43\x74\x60", + "\xe0\x2f\xec\x4c\x11\x8b\xcc\x96\x67\x01\x58\x72\xd8\x96\xc8\x86\x8c" + "\x15\x90\xf8\x47\x34\xce\xc6\x5c\xe9\x0b\x3d\xc0\x76\xd3", + "\xbc\xd3\x79\xde\x4b\xd6\xea\x47\xf2\x93\xa2\x5c\x41\x1d\x14\xd0\x1f" + "\x5d\xe7\x3a\x99\xc5\xe2\xe2\xe5\x93\x0f\xd3\xbb\x3b\x23", + "\x43\xb9\x4d\xf5\x7d\xf5\xf7\x6a\xc5\xa0\x1f\x52\x51\x38", 1, 128, + 248 }, + { 96, 192, 32, 352, + "\x85\xe0\x17\xfb\xc8\x60\x56\xc8\xc1\x89\x15\xb3\x69\xc0\xc9\x2d\xd3" + "\xaf\x3f\xc6\x77\x78\x2f\x8c", + "\x3d\x57\x51\x1e\xaf\xfb\xe4\xe9\xe9\x0d\x6e\xc3", "", "", "", + "\x4f\xc4\x19\x2c", 1, 0, 0 }, + { 96, 192, 32, 353, + "\x0d\xf9\x7b\xa4\x0e\x5f\x24\xcd\x5f\x1b\xd0\xec\xf4\x74\xac\x9a\x4a" + "\x8b\x4c\xf1\x38\x80\x65\x49", + "\x70\x5b\xc1\xa6\xcb\x54\xc1\x43\xd4\xfa\x10\x02", + "\xdf\xf5\xca\xd8\xf5\xb6\xcc\x65\xdf\x4e\x4e\x12\x80\x2b\xd0\xe6", + "", "", "\x48\xde\xd0\x1f", 1, 128, 0 }, + { 96, 192, 32, 354, + "\x79\x50\x63\x24\x8c\x61\x9c\x9c\xe6\x1b\x56\xc1\x7d\xb6\xc0\x23\xfa" + "\xb1\x2c\xa6\x10\x31\x92\x5c", + "\xd3\xaa\x2d\xfb\x01\x9b\x56\x46\x7f\xdb\x36\x8b", "", + "\x01\x7e\x69\x0c\x00\x69\xbf\x92\xd6\x9f\x27\x0d\x32\xaf\x15\xef", + "\x3b\xf2\x4b\x2a\xda\x60\x4a\xd0\xad\x9f\xa5\x38\xe3\xb4\xe3\x8a", + "\x84\xd2\xcf\x30", 1, 0, 128 }, + { 96, 192, 32, 355, + "\xa7\xac\xdc\x89\xa8\x6a\xda\x31\x90\xda\x95\x4e\x02\x9b\xd3\xd5\x8e" + "\x05\xbb\xee\x02\x72\xcc\x94", + "\x4a\xca\x59\x38\xa8\x8a\x69\x8e\xc7\x45\xb4\x43", + "\x9c\xfc\xfd\x28\x4e\xd3\x5f\xa1\x11\x04\xe5\x28\x56\xfa\x3d\x08", + "\x64\x18\x73\x87\xc7\xcf\x3b\x56\x20\x63\xab\x35\x45\xca\x71\xaa", + "\x97\x02\xe5\x11\x9a\x1b\x3a\x09\xe7\xc8\x0e\x65\xe8\x2b\xb8\xc8", + "\x46\x12\xd0\xbf", 1, 128, 128 }, + { 96, 192, 32, 356, + "\x63\x69\xc4\x28\x16\x2c\xd7\xc8\x61\xaa\xf2\x8c\x4c\x36\xb8\xe5\x38" + "\x89\x5e\x46\x9e\x0d\x1f\x48", + "\xc5\xf0\x9c\xb9\xd0\x30\x8a\x13\xfc\x73\x19\x12", "", + "\xb1\x98\xa9\xce\x48\x23\xd7\x47\x79\x36\xf5\xcf\x9c\x73\x9a\x83" + "\x0c", + "\x1a\x39\xea\x1b\xec\xc6\x94\x11\x65\x66\x98\x7a\x67\xd8\xff\x3e" + "\xd7", + "\x3c\x31\x93\xc5", 1, 0, 136 }, + { 96, 192, 32, 357, + "\x12\x40\xf2\x45\x5c\x1e\xf9\xa7\xe7\x6f\xe9\x3f\x61\x79\xd9\xbd\x6a" + "\x24\x9b\x66\xba\x26\xbb\x0a", + "\xc4\xf3\xa0\x72\xf2\x0f\xc2\x2a\x9f\xeb\x74\xc8", + "\x41\xda\x67\xad\x57\x37\xcd\x4d\x60\x1b\x37\x8d\x31\x2f\x87\x40", + "\xb4\xf3\xa0\x29\x07\x6a\x1b\xce\x99\xe8\x36\x5b\x1b\x12\x70\x5f" + "\x17", + "\xaa\xd2\xfa\xc7\x98\xf0\xf4\x3c\x70\x30\xaa\xa3\xae\xe1\xfa\x50" + "\x8f", + "\x09\x73\x36\x97", 1, 128, 136 }, + { 96, 192, 32, 358, + "\xc3\x35\xb7\x6e\xa5\x97\xd4\x44\xa1\xe6\x63\x50\xbd\xce\xfe\x5c\x96" + "\x84\xaf\x17\xee\x9e\x39\xa4", + "\xab\xd4\xd1\x25\x1f\x48\x1d\xfe\xe3\x5c\xfc\x71", "", + "\xa8\xca\xad\x10\xe1\x60\x20\x41\xa0\xa2\x92\x76\x3c\xe5\xf9\x03\x23" + "\xec\xfd\x3c\x93\x17\x05\x33\x3f\x3b\x00\xe6\xfb\xe2\x62", + "\x6e\xaf\xde\xed\xbe\x5b\x36\x7b\xaf\x14\x06\x43\x68\xe6\xf3\x2e\x2e" + "\xd0\x7b\x0e\xa6\x22\x18\xcf\x50\x56\x9f\x79\x6b\xb6\xb4", + "\x45\x16\x64\x97", 1, 0, 248 }, + { 96, 192, 32, 359, + "\x6a\xcc\xd4\xbd\x6b\x18\xb9\xf6\x59\x36\xd8\x7f\x5b\x3f\x83\x39\xd8" + "\xae\x08\xa2\xa8\x6b\x67\x05", + "\xa3\x9d\xd2\xe2\xc2\xe2\x15\xce\xbd\xa0\x0e\x13", + "\xdf\x01\x44\xcb\x65\xec\x35\x29\x9d\x30\x45\x8b\xd6\x1a\x60\xcd", + "\x97\xe6\xba\x8a\x7e\x71\x7f\x8c\x16\x0b\x9b\x4b\xf5\x2e\x5b\xa0\x39" + "\x89\xd1\xfb\x17\xe0\x80\x78\xd7\x7f\x7c\x26\xa6\x53\x00", + "\x3a\x66\x3f\x78\x97\xc8\xf7\x74\xac\x8d\x74\xbb\xf0\x53\x04\xe4\xd7" + "\xfe\xf9\x2b\xb5\x96\x1d\x0c\x88\xe4\x13\xae\xb4\x7f\x36", + "\x04\xd5\x07\x14", 1, 128, 248 }, + { 96, 192, 48, 360, + "\x21\xac\xb0\x9b\xb4\x8d\xc6\x41\x7e\x4d\x87\xa3\x16\x8f\xcb\x84\xe3" + "\x19\x50\x51\x93\x31\xdb\x93", + "\x5d\x3e\x03\x63\x37\x46\xd3\x72\x9b\x60\x9d\xd1", "", "", "", + "\x40\xd0\x42\x96\xcf\x7e", 1, 0, 0 }, + { 96, 192, 48, 361, + "\xeb\x47\x0c\x2b\xe6\xb3\x25\x75\xc4\x2c\xf9\x08\x53\xbd\xbe\xd1\xe6" + "\x41\x2c\xae\x16\x15\x33\x0f", + "\x9e\xd7\x46\x01\x5a\xd7\x2f\x1f\x4a\x86\x88\x37", + "\xe8\x15\x4f\xb5\x03\xcb\xa6\x64\x91\xa7\xa9\xad\x2f\x31\x02\x82", + "", "", "\x8d\x8f\x8d\x39\x46\xb8", 1, 128, 0 }, + { 96, 192, 48, 362, + "\xb7\x85\x8c\x86\xb3\x55\x19\xe9\xc4\x28\x62\x4c\x41\xf4\xda\x83\x79" + "\xd0\xf8\x55\xb3\xdc\xd6\x22", + "\xd6\xcb\xa3\x5f\x02\x78\xd6\x67\xd9\x3d\x43\xe3", "", + "\x77\xae\x65\x5c\x3d\x5f\x9a\x6e\xc0\x6c\xce\xc7\x14\x82\x7d\x87", + "\xd6\x74\xd8\x3e\x11\x21\xbe\x22\x6b\xd7\x33\x55\xdd\x33\x65\x7e", + "\x57\x82\x10\x5b\x45\xbd", 1, 0, 128 }, + { 96, 192, 48, 363, + "\xa9\x98\x8d\xf0\xc0\x01\x13\x2d\xee\x87\x30\x6d\xaa\x4a\x48\x06\x2c" + "\xa0\xa7\x3a\x61\xd3\x8b\x2e", + "\xd0\x2d\x4c\x03\x6d\x75\xb4\xc2\xbd\x03\x86\x05", + "\x6d\x9d\xf5\x3d\xc7\x1e\x44\x76\x61\xb5\xd6\x4b\x31\xc2\xa6\x6d", + "\x66\xda\x05\xe7\xd6\xdd\x8f\xb9\x99\x82\x7f\xd5\xcc\xe8\xa1\xe8", + "\x65\x70\x4e\x76\x07\x60\xfd\xcb\xed\x42\x8a\x29\xef\x60\x48\x84", + "\xe8\x9d\x18\xb4\x39\xc6", 1, 128, 128 }, + { 96, 192, 48, 364, + "\x50\xa0\x22\xe8\xd8\x56\x41\x33\x7e\x86\xd1\x4d\x75\xf5\x37\x74\x78" + "\xaf\x29\x7d\x20\x91\xf5\xcc", + "\xad\x59\x6a\xfb\x65\x49\x09\x81\x62\xca\x53\xed", "", + "\x0d\x7d\xe7\x6a\xf7\x7e\x8d\x11\x8e\x97\x19\xd5\x42\x9b\x3b\xe4" + "\x5b", + "\xf2\x0d\x2d\xa7\x45\xeb\x30\xc5\x16\x63\xa8\x4e\x9e\x1e\x00\x27" + "\x84", + "\x90\xe8\x41\x8f\x11\x3d", 1, 0, 136 }, + { 96, 192, 48, 365, + "\x97\xc5\xaf\x8e\x0f\x0c\xa6\x9d\x77\x13\x7f\xec\x21\xdb\x36\xc9\xde" + "\xa6\xc8\x36\xa9\x2a\x29\xd2", + "\x7b\x4e\x8a\x9a\x66\xf6\x82\xbd\x9a\x2f\xd5\xe1", + "\xbd\xa1\xce\xb6\x3c\x2c\x5f\x54\xee\x92\x6a\x83\x20\x94\xe8\x87", + "\xcf\x14\xe1\xfd\x8c\x85\x7c\x3b\xa2\x74\xaf\xd4\x23\xec\xc1\xd8" + "\xd9", + "\x67\x21\x31\x0f\xca\x31\x2d\x96\x14\xe4\xe2\x9d\xff\xc7\x3f\xdf" + "\xdb", + "\x80\xab\x89\x81\x90\xdd", 1, 128, 136 }, + { 96, 192, 48, 366, + "\x44\x33\xa4\x40\x38\x99\x57\x49\xfc\x7b\x84\xf3\x76\x4f\x9f\xc9\xf8" + "\xf9\x1d\x20\xed\x19\x00\xa6", + "\x6a\xac\x37\x28\x40\x93\xec\x85\x9b\x3c\x0a\xf4", "", + "\x33\xd5\x96\x7e\x20\xad\x5c\x2d\x98\x8c\x6c\xd2\x6a\x92\x15\xb5\x2e" + "\x0f\xb4\xdf\xbb\x37\x53\x0a\xd4\x4f\x4b\x0e\xc4\x1e\x12", + "\xcb\xe3\x7d\x72\xb0\xc3\xea\xf2\x81\xab\x34\xac\x47\xb6\x39\xf4\x40" + "\xd2\x18\xe0\x9b\x14\x80\x8b\xc3\xa8\xe2\xf6\x48\x4b\xa5", + "\x69\xe1\x9b\xaa\x18\xe3", 1, 0, 248 }, + { 96, 192, 48, 367, + "\x2b\x05\x07\xb1\x6f\xa4\x9e\x08\x8b\x18\x4e\xb0\xd3\xe1\xd3\xf0\x53" + "\xea\x3f\x8e\xaa\xf3\xe5\x3d", + "\xb0\xac\xcf\x6f\x28\x26\x2e\x0e\x1f\xce\x23\xfd", + "\xa4\xbd\x9d\xa3\xad\x1f\x44\xf5\xdc\x19\x71\x8f\x67\x8d\xe5\xbf", + "\x3e\x83\x20\xfe\x8a\xbf\xbc\xbf\x29\xd7\x24\xdc\x33\x07\x15\x6e\x6b" + "\xe5\x3b\x21\xe9\xbc\xc3\xcd\xa9\x1b\x38\x0a\xd5\x80\xda", + "\x44\x51\x76\x14\x12\x89\x25\xef\xdb\x7f\xb1\x3a\x6c\x4a\x75\x9b\x73" + "\x7c\x82\xd3\x09\x86\xd2\xaf\xde\x97\x3c\x88\xd6\x01\x3c", + "\xea\x19\xec\xad\x57\x16", 1, 128, 248 }, + { 96, 192, 64, 368, + "\x5c\x04\x9b\x3e\xdd\x2b\x92\x6f\xcd\x34\x34\xc4\x21\x53\x2b\x7e\x79" + "\x08\x71\x2a\x85\x05\x72\x26", + "\xb2\xb7\x93\x46\x9d\x4a\xc1\xdf\xc3\x75\x6c\x8f", "", "", "", + "\xc2\x78\x16\xbb\x97\xe9\x8d\x11", 1, 0, 0 }, + { 96, 192, 64, 369, + "\x5b\x04\xc3\x42\xef\xd5\xe8\x9a\xa5\xd3\x8e\xf3\x2e\xed\xea\xf2\xac" + "\x03\x5f\x43\xb9\xb4\x20\x1d", + "\x14\xd4\x78\x1e\x21\x59\x2e\xfc\x44\x09\xb9\x44", + "\x3f\xd3\xb6\x91\xd0\x51\x1d\x71\xf5\xdb\xec\x4f\x13\x20\xfc\x8c", + "", "", "\x2f\x84\xac\x2d\x50\xbe\xf7\x5e", 1, 128, 0 }, + { 96, 192, 64, 370, + "\x1a\x2f\x92\x13\xe6\x6c\x96\x93\x06\xb9\x8c\xe3\x3d\x32\xca\x91\x26" + "\xe7\x65\x78\x35\x5a\x67\xab", + "\x6e\xda\xe6\x28\x13\x3c\x51\x0f\x00\x96\x58\x5a", "", + "\xd4\xa9\x42\x70\x12\x40\x3f\x9c\x51\x8c\x7b\x23\x60\xce\x0a\xb3", + "\x92\xbd\x38\xe1\xfd\xfa\x11\x75\xdc\x23\x0b\xe5\xf5\x41\x76\x0b", + "\x99\x02\xe9\xda\x26\x90\x82\x95", 1, 0, 128 }, + { 96, 192, 64, 371, + "\xa5\x93\x74\x68\xdd\xde\x3c\x31\x2b\x6f\xce\xc7\xd5\xd1\x9a\x92\x85" + "\x3d\x2c\x66\xad\xa9\x7a\x18", + "\x5b\xf9\x54\x7b\x27\x53\xdd\x71\x2a\x5d\x8f\x95", + "\xe5\x6b\xc3\x35\x6c\xbd\xdb\x3e\xf0\x99\xca\xd5\x89\xbb\xe6\x84", + "\x58\x13\xc3\xc7\x56\xa8\xf2\x72\x1a\x08\xbe\x97\xc4\x43\x92\x69", + "\x1b\x77\xaa\x30\x34\x01\x89\xfe\x4f\xbb\xd7\xea\x1c\x96\xd5\xc4", + "\xee\x2f\x19\x5b\x66\x7a\xa2\x67", 1, 128, 128 }, + { 96, 192, 64, 372, + "\x4d\x85\x76\xff\x63\x5e\xc7\xd9\x9c\x47\xbe\x74\x12\xa2\x84\x6f\xc6" + "\x38\xc9\xf9\xfb\x0f\x55\x31", + "\x8a\x53\x40\xf4\xa8\x5e\x3a\x9c\xf7\x43\x0f\xeb", "", + "\x0b\x89\x63\x37\xa5\x9a\xf8\xe9\xca\x15\xf3\x3c\xd6\xda\xaa\xe0" + "\xac", + "\x5f\xdf\x4a\x0f\xce\x8b\xe9\xcf\x74\x0b\x61\xd1\x20\x88\x3b\xcc" + "\x1e", + "\x56\x63\x21\xb1\x2e\xce\xc6\x87", 1, 0, 136 }, + { 96, 192, 64, 373, + "\x9c\xa4\x67\xaf\x0c\x27\x29\xf4\x3f\xbd\x86\x63\x73\xef\x4b\x8f\x2b" + "\xca\xbe\x43\xf5\xa1\x0f\x97", + "\xe3\xec\x43\x9d\x33\x4b\x9f\xc0\x7d\x65\xdf\xf5", + "\x60\xd9\xbe\x32\xc5\x62\x66\x6a\x19\x01\x42\x84\x74\x04\xe8\x04", + "\xf0\x61\x11\x0e\x43\x63\x6e\xb5\x25\xcd\x2f\x94\xf6\x31\xf1\x28" + "\x2d", + "\x5f\x8d\xd3\x0c\xb7\x0f\x49\x5e\xb5\x77\x77\x30\xb0\xc7\xda\xde" + "\x30", + "\xb0\x1f\xee\xdd\x3e\xd3\x64\x0b", 1, 128, 136 }, + { 96, 192, 64, 374, + "\xe9\x23\xbb\xfb\xbd\xb8\x1c\xec\x86\x32\x63\x49\x40\xc9\x24\xbc\x9a" + "\x23\x0f\x15\x87\xf0\xed\x63", + "\x41\x90\x00\x4b\xf9\x66\xaf\x35\xe0\x49\x44\x5d", "", + "\xa3\x8f\x8e\x64\xa3\x91\xa0\x9b\x8a\x29\x8d\x4f\xeb\x01\x13\xe3\x08" + "\xcb\xfc\x6e\xdb\xc3\xcd\x59\xa2\x5a\x31\xa3\xf0\xd5\x34", + "\x01\xc7\x76\x5b\x13\x96\xfc\x6d\x36\x2c\x00\x77\xa3\xa1\xef\x9c\x3f" + "\xe5\x4b\x87\x68\x8b\x7a\x64\x12\x0d\x8a\x20\x2d\xe3\x9c", + "\x89\x90\xa6\xb1\xf3\x86\xcc\x7c", 1, 0, 248 }, + { 96, 192, 64, 375, + "\x7b\xcc\xcd\x49\x44\x60\xa7\x55\xcf\x82\xee\xdc\xca\xe6\xb1\x41\xb3" + "\xc5\xb8\x36\x0f\x09\xdc\xfd", + "\x3a\xb4\x79\x8a\xd9\xc0\x5c\xc7\x93\xf5\xc3\x3d", + "\x90\x55\x30\x0f\x9a\xf4\x4b\x8c\x4a\x7f\xdd\xbd\xd8\xe2\x49\x72", + "\xe1\xeb\x07\xd7\x97\xf5\xfe\x2a\x31\xc2\x8c\x03\x82\xb5\x21\x61\x2c" + "\xbb\x0a\x6f\xdc\x6e\x53\xe2\x7b\x29\x14\x08\x78\x82\xd0", + "\x6a\x7c\xb6\x7d\x39\x58\x97\xd5\xce\x8c\x59\x73\x09\xd5\x10\x20\x14" + "\x9b\x1f\xeb\x13\x13\x61\xdc\x1a\x23\x6e\xe9\x2b\x40\xa8", + "\x1f\xfc\x13\xf7\x4d\x3c\x07\x76", 1, 128, 248 }, + { 96, 192, 80, 376, + "\x89\xc4\xe7\x97\x06\x2e\x49\xad\x02\xd2\xbc\xf2\xeb\x0f\xf6\x5f\xc1" + "\x7c\xd2\x9c\xd5\x5c\x8b\xbf", + "\x68\xde\x74\x04\xe6\xd1\x37\xa5\x83\x89\x0b\x0f", "", "", "", + "\xba\x50\xe0\x4e\xf8\x87\x14\x55\x62\xf1", 1, 0, 0 }, + { 96, 192, 80, 377, + "\xc9\xb9\x28\x03\x80\x27\x6a\x99\xc7\xe8\xb0\x0b\x03\xa0\xac\x35\x93" + "\x66\xa9\x25\x53\x2c\x4b\x08", + "\x3d\x69\x75\x92\xa7\x80\x07\xee\x3f\xc9\xf8\x71", + "\x56\x6f\xce\xd9\xa2\x4e\x20\xae\x05\x5f\x03\x4d\xe8\x9f\x76\x2a", + "", "", "\x7e\x43\xc9\x45\xa8\x82\x6a\x9f\x71\x64", 1, 128, 0 }, + { 96, 192, 80, 378, + "\x19\x90\x4b\xd0\xb5\x44\xa2\x9e\x2c\x0a\x30\x5a\x12\x43\x23\xa1\xde" + "\x6f\xae\xb7\x1b\xdd\x0f\x87", + "\x1d\x15\xb8\x7d\xfe\x88\xc8\x31\xb1\x05\x45\xaa", "", + "\xa6\x89\x6e\x25\x78\x68\x9e\x31\xd3\x05\xf3\xce\x21\x41\x5f\xfd", + "\xbc\xbd\x19\x43\x82\xf5\x21\x49\x8c\x93\x0f\x05\x2f\x81\xf5\xc5", + "\xca\x50\xc7\xa2\xd0\xe3\x9a\x64\x2e\x92", 1, 0, 128 }, + { 96, 192, 80, 379, + "\xfd\x10\x95\x28\x5c\x2d\x1d\x6a\x65\x45\x00\x45\x3e\x12\x41\xf1\x3a" + "\xda\x13\x64\x23\x4d\x16\x6c", + "\x95\x48\x7c\xb5\x3c\x6f\xae\x13\x29\x00\x52\xdc", + "\x7d\x97\xf4\xb8\x61\xa8\x33\x6c\xe9\xb4\xc7\x25\x0c\xbd\x82\x5b", + "\xfe\x05\xd5\xfd\x3a\x3d\x4a\x70\x7b\x4a\x63\x09\x7c\x48\x3c\x9c", + "\x49\x85\x19\x00\x2f\xf3\x26\x6d\x85\x84\xe5\x64\x17\xa8\x55\x11", + "\x82\x4e\xfa\xaf\x7b\x11\x98\xbe\x4d\x3e", 1, 128, 128 }, + { 96, 192, 80, 380, + "\xb2\xfb\x99\x1a\x1a\xbc\xfd\xba\xfa\x87\xb4\x15\xc8\xfa\x0f\x03\x95" + "\xe3\x2f\x23\xd7\x8a\x1a\x88", + "\x0b\xbb\x61\x0c\x4f\xb7\x55\x18\x0e\xfe\xe9\xcb", "", + "\x17\x3a\xbd\xf0\xe8\x4a\x4d\x6b\xce\x7f\x84\x9c\x50\xee\x54\x80" + "\xc5", + "\x19\x5c\xe4\xaa\x74\xd9\x9f\xd8\xe5\x44\x4e\x29\x6e\x6a\x5d\x13" + "\x9e", + "\x29\x08\xe7\xb3\xa0\x07\x2a\xb8\x64\x6d", 1, 0, 136 }, + { 96, 192, 80, 381, + "\x00\x1c\xca\xa8\x5e\xd7\xda\x56\xfa\x3f\x1e\x9b\x47\xe3\xa2\x04\x2c" + "\x18\xf2\x1c\x19\xe6\xe9\x64", + "\x8c\x16\xa9\x44\xb8\x0e\xef\x9d\x32\x5e\x1b\x71", + "\x21\x8a\xd9\xdb\x9c\x23\x92\x14\x87\x58\xec\x3c\xc4\x8f\x9c\x12", + "\xed\x07\x99\xeb\xa5\x04\x59\x5e\x80\xa7\x32\x5d\x13\x4c\x5d\xe3" + "\x9a", + "\x2e\x3e\x78\xff\x00\xe5\x83\xa6\x39\x45\xdc\x2c\xb7\x28\xa2\x84" + "\xfb", + "\x3c\x89\x31\x64\xf1\x45\x6d\x54\xb9\xdd", 1, 128, 136 }, + { 96, 192, 80, 382, + "\x1d\x75\x7f\x84\xc6\x47\xa4\x2c\xe3\x95\xb5\x4d\xb5\xd9\x21\x79\x86" + "\x27\xba\x1b\xcf\xcc\x7f\x64", + "\xc0\xa2\xa0\xfc\xf5\xc2\x00\xe1\x7c\x32\xc3\x94", "", + "\x51\xa2\xa8\xd9\x95\x84\x4e\x4e\x78\xf9\xb2\x0b\x1a\xf6\x73\x20\xb1" + "\x80\x90\x3c\xbb\xf4\xef\xbc\x60\x1b\x99\xb4\x1f\x07\xf8", + "\x6b\x8b\xc9\x3c\xb3\x48\xd8\x42\xf3\x23\x6b\x66\x58\xde\x7e\xe3\xf5" + "\x57\xe9\x34\x69\x25\x3e\x8a\xfc\x7f\xee\xa8\x7f\x78\xf4", + "\x81\xd5\x96\xe3\x77\xa3\xa3\x01\x64\x0e", 1, 0, 248 }, + { 96, 192, 80, 383, + "\x54\xf1\x0b\xae\xb7\x56\x4e\x94\x7b\xb6\xe1\xe2\xa8\x37\xc1\x6d\xda" + "\xe0\x64\x6a\x8b\x7e\xb3\x38", + "\x8e\xc4\xd8\x54\x4f\xd2\x1e\x6a\x51\x32\xab\xc9", + "\x87\x96\xea\x33\x62\x18\xd2\xa0\x99\x1b\x4c\xb4\x23\x01\xf6\x5d", + "\x1d\x8a\xab\x01\x08\xd7\x29\x90\x92\x8b\x9d\x1a\x8a\x48\x0b\x93\xaf" + "\x27\x63\x4b\x16\x60\x77\xe3\x13\x4e\x2e\x87\x91\xca\x13", + "\x00\xf6\x49\xa1\xfb\x32\x1a\x48\xfc\x1d\xac\xd5\xb9\xfc\x19\x77\x9d" + "\x7f\xb4\x94\xca\xd6\x0e\x2c\x2d\x72\x77\x13\xcd\xe9\x3f", + "\x7f\xc9\x19\xa9\x2b\xf3\x57\x7b\xd0\xb0", 1, 128, 248 }, + { 96, 192, 96, 384, + "\x08\x71\x13\x72\xdb\xac\xac\xbb\x68\xef\x12\xe5\xef\x59\xb6\x9f\xd4" + "\x6c\x9b\xe4\xc2\xfb\x83\x24", + "\x83\x21\x74\x84\x12\x38\x0b\x0e\x7b\x14\xa7\xef", "", "", "", + "\x6c\x1e\xe8\xd9\x71\x8f\x72\x4c\xe8\xd9\x6b\xeb", 1, 0, 0 }, + { 96, 192, 96, 385, + "\x0a\x12\x32\x6b\x7e\xfa\xc1\x79\x42\x1f\xdb\xce\xd8\x0d\x52\xf2\x40" + "\x7e\x99\x3e\xf5\x04\x77\xf9", + "\x34\x47\x21\x31\x54\x78\x40\x26\x3d\x9e\x9f\xd3", + "\x17\xbe\x4b\xb1\xe4\xa4\x0f\xac\x70\x68\x79\x38\x1a\x2d\x6f\x47", + "", "", "\x6f\xb7\x2e\x37\x68\x83\x2a\x7b\xab\x90\x7a\x75", 1, 128, + 0 }, + { 96, 192, 96, 386, + "\x56\xd5\xf5\xeb\x69\x7d\x96\xfa\x9c\x11\x52\x8b\x19\x1e\xaf\xc4\x15" + "\x9c\x2c\xb2\x90\x2f\x06\x95", + "\x8e\x04\xdf\x79\x13\x42\x92\x99\xcf\x2f\x23\x37", "", + "\x9f\x6e\xda\xfc\x71\xef\x15\x61\xd7\x00\x5a\x53\x3a\x5c\xde\xb5", + "\x53\x61\x42\xd2\x7a\x03\x12\xb8\x0e\x53\xff\x32\xbe\x18\x9e\x29", + "\xf1\x6a\x36\x4c\x64\x22\x9f\xab\x13\x90\x87\x61", 1, 0, 128 }, + { 96, 192, 96, 387, + "\x25\x94\x30\x07\xa4\x49\x78\x39\xbc\x13\x38\x69\x45\xb4\xbc\x46\xbf" + "\x10\x5e\xaf\x6e\x6b\xec\x2c", + "\x56\xad\xa6\xe5\x59\xc2\x68\x75\x50\x92\xbd\x6f", + "\x25\xfe\x12\xa5\x28\xd1\x26\xb4\xcc\xbf\x68\x10\x17\x0d\xc2\x8c", + "\x20\x21\x1e\x52\xeb\xbd\x1b\xba\x78\x38\xd4\x02\xe8\xeb\xba\x93", + "\x4c\x41\x2a\xc4\x1e\xc5\x22\x82\x5a\x88\x44\xf7\xd5\xf8\xf6\x07", + "\x81\x77\x42\xf2\xa0\x1b\x9d\xbe\x9a\x7f\x39\x02", 1, 128, 128 }, + { 96, 192, 96, 388, + "\xeb\x3c\xa2\x96\x88\x8a\x76\x28\x98\xe5\x10\x3f\x0d\x54\xd5\x38\x74" + "\xfc\x7f\x4e\x4b\x9d\x21\x5d", + "\xf6\xa1\x64\xa5\x5c\xbe\x06\x44\x72\x39\x71\xb2", "", + "\x96\xda\x54\x41\xe8\x83\x12\x53\x6d\x28\x92\xb1\xe2\x7b\x41\xe6" + "\x41", + "\x35\x8c\x85\xd8\x3d\xce\x34\x5c\xc5\xa1\x56\x60\xad\xb0\x16\xa9" + "\xf8", + "\xf9\x3d\x30\x5c\x3c\xf6\x92\x93\x28\x9e\x09\xd6", 1, 0, 136 }, + { 96, 192, 96, 389, + "\xaf\xe1\x2b\xcd\x5e\xf3\x54\x90\x71\x3d\x20\xfe\xd4\x8f\x6b\x94\x2b" + "\x08\x1b\x9f\x24\x44\x41\x83", + "\x95\x3e\x94\x4b\xea\xa7\x6f\xd2\x46\x3c\x27\x8b", + "\xc9\x02\x81\xef\xb0\xb9\x48\x9b\x61\x72\x2f\x1f\xc7\xde\x5b\xa6", + "\x94\x9f\x81\xce\x36\xdb\xe8\x5e\xac\x14\xa7\x2b\x8b\x77\x58\xea" + "\x47", + "\x69\x7c\x99\xee\xe5\x05\x6d\xec\x98\x51\x60\xab\x2a\xfc\xbf\x0c" + "\x3f", + "\x03\xbb\x58\xfa\xcd\xe9\xaf\x29\x08\xb5\x2e\x55", 1, 128, 136 }, + { 96, 192, 96, 390, + "\x73\x8e\x95\xe8\x89\xdc\x79\x3e\x29\xf3\x3b\x9e\x35\xee\x3c\x10\x30" + "\xd7\x53\xe5\xe9\x9b\xdd\xbd", + "\xc3\x29\x71\x81\x6c\x7d\x84\x34\x2f\xf7\x64\x88", "", + "\x7f\x64\x2c\x9e\x9d\x91\x57\x1b\x87\x45\x0d\x59\xa9\xbe\x2b\xe6\xb4" + "\x5c\x5b\x8a\x0e\xeb\x32\x69\x32\xc3\xe8\x75\x11\x84\x85", + "\x4a\xef\xf5\xa7\xca\x46\xa8\x80\x4e\xab\x6f\x23\xcb\xeb\x24\x02\x72" + "\x40\x8a\xf0\x64\x47\xb7\xa6\x73\x8f\x91\xc4\xa9\x0f\x20", + "\xc7\xee\xdf\x4f\x03\x33\x29\xa5\x12\x50\x44\x55", 1, 0, 248 }, + { 96, 192, 96, 391, + "\x70\xbc\x9c\x8a\x60\xda\xc5\xf2\x53\xec\xf3\x2c\x7d\x1e\x6d\xe1\x31" + "\xea\xb7\x9f\xaa\x83\x1e\x76", + "\x45\xeb\xab\xe2\xb6\xa0\x3f\xbb\x15\x97\x85\x31", + "\xab\xd0\x9a\xe3\x17\x84\x91\xea\x28\x98\x2b\xc8\x39\xe3\x97\x21", + "\xee\x8c\x1a\x65\xe1\xc2\x49\x1e\xe7\x25\xa2\x85\xad\x1f\x3a\x22\x75" + "\xc2\xec\x4a\xf8\x2b\xa3\x2a\x66\xcd\x7e\x87\xdb\xff\xea", + "\xf4\x2c\x5b\xeb\x2a\xf7\xa9\x71\x5d\x53\x5c\xb7\x21\xba\xdd\x42\x1d" + "\x47\x2f\xab\xee\x43\x4c\x77\xf4\x2d\x0e\x4b\x16\x3e\x4c", + "\xb5\x0f\x8c\x07\x0e\x11\x70\xb1\x0b\x9a\x99\x32", 1, 128, 248 }, + { 96, 192, 112, 392, + "\xcd\x2a\xdc\x91\xb1\x9d\x56\x4b\xab\xc9\x7e\x12\x03\x7c\x8b\xc9\x1a" + "\xf6\x87\xf9\x59\xda\xe1\xd4", + "\x67\x8b\x89\xfd\xf4\xe1\x35\xdd\x67\xe3\xf2\x8f", "", "", "", + "\xa7\x54\x07\xb6\x29\x89\xf1\xbe\x04\x13\x1a\x43\xce\x16", 1, 0, 0 }, + { 96, 192, 112, 393, + "\x59\xb4\xb1\x81\x6f\x2a\x9f\xb6\x26\x6a\x39\xaf\x3e\x49\x7e\x2b\x89" + "\xb1\xfa\x51\xfc\xb9\x65\xef", + "\x99\x05\x97\x9f\x5b\x03\xa5\x0d\x54\x40\xaa\x08", + "\xe8\x2c\xb1\x13\x39\x72\x77\xe2\x20\x05\x2a\xc5\x53\x04\xd7\x93", + "", "", "\x67\xf6\xe8\xc4\x46\x5b\xb6\x47\xc0\x3e\xf4\xfc\x5f\x1d", 1, + 128, 0 }, + { 96, 192, 112, 394, + "\x2d\xe7\xb9\x83\x7d\x63\xf5\x31\xdb\x27\x05\xc5\xe2\xc8\x00\xaf\xbf" + "\x5c\xce\xf7\x3b\x80\xf7\x9d", + "\x1a\x0c\xe3\xa2\xe9\x28\x3d\x06\x92\x85\x41\x6c", "", + "\xc1\x26\x51\x83\xd4\x09\x5f\xbe\xa0\xbf\xa3\x5b\x27\x81\xd5\x2a", + "\x34\x0b\x2f\xd7\xc3\x9b\xba\x1a\x1a\x93\x91\xb6\x01\x0e\xf8\xb4", + "\x99\xb1\x9c\x91\xeb\x23\x12\xff\x5c\x42\xbd\x88\x90\x68", 1, 0, + 128 }, + { 96, 192, 112, 395, + "\x7f\xda\x00\x39\x31\xc5\xea\x09\xaf\xa3\xc9\x3b\xca\xa9\xcd\x33\xaf" + "\xfa\x55\x06\x1d\xf3\xc4\xda", + "\xc7\xa7\xaf\xe9\xd8\xd9\xda\x3a\xc8\x1c\x7b\x58", + "\x32\x09\x2c\x8d\xc6\x2f\xf2\x57\x0f\xae\x6e\xcc\xcf\xd9\x2b\xe2", + "\xc5\xbd\xa3\xc8\x6f\x31\x16\x0c\x96\x23\x98\x4d\xf8\x85\xf9\x2a", + "\x50\x22\x4b\xff\x26\xb6\xb9\x66\x69\x32\x50\x37\xf1\x29\x4a\x2a", + "\xf1\xfe\x37\x1a\x39\x74\xcd\x23\x80\xda\xb2\xc7\xdb\x62", 1, 128, + 128 }, + { 96, 192, 112, 396, + "\x0b\xb8\x9e\xe6\x66\xcc\x14\x3c\x89\x58\x3c\xe3\x05\x5d\x02\x80\xa3" + "\xff\x65\xdd\x5b\x0a\xc9\xa8", + "\xca\x9e\x52\xc9\xf7\x5a\xe6\x26\x25\x6d\x21\x0d", "", + "\x4c\x8c\xdd\x0f\x6c\x9e\x8a\x00\x91\xb7\x30\x70\x42\x98\xb9\x0e" + "\xaa", + "\x93\x18\x5d\xe9\x8b\x9b\x95\xa1\x18\x55\x09\x64\x40\x02\x7f\xf5" + "\xd5", + "\x8e\xdf\x2a\x34\x08\x3c\xd4\xfc\x82\xee\x34\x90\x4d\x55", 1, 0, + 136 }, + { 96, 192, 112, 397, + "\x44\xde\xf0\x2b\x19\xd8\xb7\x4b\x25\x80\x1e\xc2\x52\x73\xb6\x8f\x50" + "\xde\xa1\x26\xec\x4a\x36\x66", + "\x21\x5d\xbc\x07\x2f\x69\x8b\xa9\x6f\x85\x50\x48", + "\x20\x28\x29\x92\x7e\x08\xe4\x0a\xed\x36\x96\xff\xde\xdd\x10\x7d", + "\x49\x25\xd7\xf7\x0c\x12\xa6\xb8\x48\x7d\x0c\x9f\x16\xf4\x8e\x8e" + "\x8d", + "\xc5\x4f\xb9\xe5\x55\xfb\xcb\x5e\x1e\x70\xaa\xaa\xef\xbc\x12\x25" + "\x00", + "\x1b\x59\x84\x53\x8b\xea\xfa\x71\x14\x2f\x0c\x0e\xc4\x2a", 1, 128, + 136 }, + { 96, 192, 112, 398, + "\xa2\x9a\x0c\x3f\x1e\x2e\x3e\xe8\x8d\xfd\x3f\x01\x9b\x42\x5a\x5f\x64" + "\x75\x26\xd3\xf3\x68\xa1\xbe", + "\xf2\xba\x8a\x66\x20\xd4\xe5\x34\x87\xb8\xd6\x6d", "", + "\x23\x59\x87\x89\xd7\xa7\xa2\x48\xd1\x7e\xc0\xc6\xaa\x31\x32\xb4\x10" + "\x2c\x0d\xf2\xfd\xab\xa4\x3e\x4e\x45\x81\x43\x9b\xdf\xdb", + "\xb0\x80\x13\x0f\x95\xe4\x6d\x79\xed\x5f\x67\xf0\xdc\x88\xa2\x3c\x34" + "\xdc\xf7\x4f\x6e\x61\xb6\x56\x21\xb6\xef\x53\x75\x82\x79", + "\x67\x85\x6e\x34\x44\x53\xca\xb3\x35\xa8\xf0\x7b\x1f\x63", 1, 0, + 248 }, + { 96, 192, 112, 399, + "\x90\x95\x4f\xb2\x2a\xa8\x48\x60\xdd\x7c\x3f\xad\xb3\x19\xdd\x1d\x16" + "\x85\x77\x23\xbd\x1b\xad\xb1", + "\x7c\xa5\x17\x8d\x42\x79\xad\xb8\xd2\x2b\x48\x70", + "\xfc\x77\xe3\xc5\x01\x03\xc5\x86\x08\x82\xe8\xce\xd3\x40\x29\x33", + "\xea\x6e\x7a\xa0\x10\xe1\x9a\x7c\x76\xa4\xd7\xd3\x44\x0d\xc6\x1e\xed" + "\xa4\x4a\x5a\x6f\xb7\xfa\x82\x44\x17\x18\x5d\x4a\x55\xce", + "\x87\xb7\x53\xfa\x7c\xe3\xe7\xd1\x62\x92\x59\x46\xe8\x57\x19\x93\x3a" + "\x0d\xd1\x0e\xaf\x72\xb5\x25\x9c\xba\xce\x8c\x41\xc5\x3b", + "\x2b\x55\x64\x55\xa6\x57\xee\x61\x71\xf4\xed\xe6\x43\xb5", 1, 128, + 248 }, + { 96, 256, 32, 400, + "\x99\x48\xed\xf5\xcf\xb2\xf5\x33\x63\xed\x83\xbf\xb1\x5e\x7c\xb5\x02" + "\xf0\x62\x8d\xc9\xf2\xb8\x72\x23\xf2\x23\x34\xc4\x0b\x89\x23", + "\x44\xe1\x54\xe9\xb3\xf7\xfd\x47\xa9\x7f\xc7\xbe", "", "", "", + "\x92\xa5\x99\x22", 1, 0, 0 }, + { 96, 256, 32, 401, + "\x93\xb7\x5a\xc1\x29\xec\x19\x5f\x8c\x18\x0e\x9b\x91\xdf\xed\xaa\xe2" + "\xb2\xfa\xcd\xc1\x55\x93\xb3\xe4\x25\x8c\x78\xd2\xff\x94\xd7", + "\xdb\x53\x5f\xe7\x23\xba\x65\x0b\x66\xd2\x30\xfb", + "\x7b\x3d\xd4\x20\x60\x71\x39\xc1\x9c\x6d\xb7\xa4\xef\xe0\x9a\x0b", + "", "", "\x50\xe4\x2c\x1f", 1, 128, 0 }, + { 96, 256, 32, 402, + "\xc7\x67\x91\x45\xa1\x5e\x53\xdb\x5c\xd6\x16\x61\x43\xa9\xfe\xfe\x67" + "\x46\x71\x5f\x5d\x84\xd9\xdf\xa6\x04\xf1\xd3\xdc\x33\x7e\x6c", + "\x6a\xc0\xd6\xaa\x44\x6e\x86\xff\x32\xf8\xfc\x76", "", + "\x83\xb0\x83\x05\x52\x6f\xbc\xbc\xde\xeb\xb3\xd7\xa8\xac\x44\xf5", + "\xdc\xb5\x25\x05\x59\xa0\x3c\x8e\x70\xe5\xc0\x10\x71\x21\xcf\x58", + "\x00\x81\xc1\xfd", 1, 0, 128 }, + { 96, 256, 32, 403, + "\xbc\x5c\xaa\x30\x6c\x42\x3e\x6f\x85\x0c\xd5\x64\x4b\x09\xdd\x4b\xa7" + "\x76\xb3\x01\x71\xc7\x2e\x00\x50\xe5\xa6\x0a\xfe\x9c\xb7\x7c", + "\xf9\x56\xd4\x14\x22\xd8\xeb\x63\x23\x1b\x38\x26", + "\xfa\x6e\x34\x46\x33\x18\x23\x7e\x98\x5d\xd2\xf7\x2b\x0d\xd0\x14", + "\x46\xc9\xda\x60\x2a\x54\xfe\x80\x37\xcf\x0b\xee\x72\xaf\xfc\x72", + "\xdb\xb6\xb4\xec\x70\xf9\x32\x4f\x4b\xc2\x2b\x59\x24\x09\xd4\xac", + "\x80\x3a\x69\x48", 1, 128, 128 }, + { 96, 256, 32, 404, + "\xa9\x59\xc6\x10\xa8\xef\x46\x8b\xb8\xe8\x66\xa0\x9b\x26\x27\xa6\xc3" + "\x9e\xe2\xed\x51\x0d\x22\xe8\x72\xaf\xa6\x3e\xba\xb7\xcf\xb0", + "\xf6\x48\x00\x2f\xfd\x7c\xff\x0b\xd2\x6d\x1c\x45", "", + "\x34\x65\xe9\xb8\x35\xc2\x16\x95\xbf\xd9\xa5\x20\xa9\xe0\xf0\x79" + "\xd1", + "\xbc\xa3\x38\xdc\x06\xcc\xf0\x3c\xbf\x30\x25\x1c\xce\xac\x64\x8a" + "\xaa", + "\x97\x6e\xd7\x31", 1, 0, 136 }, + { 96, 256, 32, 405, + "\xe2\x61\x31\x4c\x54\x0a\xef\x81\x14\x5a\xc2\x23\xff\xc7\xbd\xe0\x11" + "\x65\x67\x93\x57\xe7\x86\xcd\x2f\x88\x15\xe2\x3f\x1d\x69\xdf", + "\xfd\x69\xf4\xb9\x39\xe3\xbb\x09\x00\x6f\x2d\x2b", + "\xae\x20\x5a\x7a\xcc\x94\x57\x16\xf7\x52\xf0\x95\x42\xb7\x8c\x5a", + "\x90\x64\x8b\x56\xd3\x5b\xf1\xca\x99\x0e\xa2\x59\x50\x35\x4f\xf1" + "\xa3", + "\x66\x6d\xe4\x14\xb3\x38\x90\x81\xd0\x70\x28\xd5\xa6\xa3\xf8\x5d" + "\x5b", + "\x49\x5a\x49\x8b", 1, 128, 136 }, + { 96, 256, 32, 406, + "\x04\x79\x81\x7a\xfb\x26\xc2\xce\x77\xb7\x15\xbb\xb0\xd6\x43\x02\xfb" + "\x09\xff\x92\x5d\x34\x98\x35\xcd\x1d\xd3\x27\x9f\xbb\x72\x38", + "\x4f\xa9\x0e\x2d\x99\xc7\xa6\xd2\x5d\x38\xbc\xb4", "", + "\xdd\xeb\xd4\xe2\xaf\x2e\xfe\x97\x20\xc9\xe2\x72\xe4\x01\xb9\x3a\xc1" + "\x1b\x0b\x8f\xf9\x76\xad\x2d\xea\x0c\xbb\x3e\x8c\x5a\x7f", + "\x6e\x20\x73\xfa\xa6\x80\xe0\x58\x85\xa5\x9b\x7a\x75\xdd\xe2\xd3\x0f" + "\xd6\x33\x32\x33\xfc\x9d\x03\xe9\x9c\x49\x0f\x8c\x94\xef", + "\xbe\x02\x37\xf8", 1, 0, 248 }, + { 96, 256, 32, 407, + "\x22\x8e\xdf\xfb\x10\x35\x24\x83\x59\x07\x72\x3f\x7a\xf9\xec\x18\x02" + "\x3c\xb8\x2c\x71\x96\x97\xb3\xa1\xc5\xdf\x0f\x2c\x30\xab\x18", + "\x44\xf7\xcb\xcb\xf2\x5c\x4c\x0f\xaf\xea\x93\xf2", + "\x2d\x70\x18\x20\x3f\x67\x83\x38\xef\xb6\xb3\x41\x14\x97\x94\x1f", + "\x10\x64\x7f\xb1\xe5\x04\x0f\xa0\x09\x09\xd3\xfe\x51\x71\xf0\x4c\x1c" + "\xe9\x45\x40\x83\x5e\x19\xe6\x25\x35\x5b\x81\x3d\x81\xe7", + "\x2e\x15\x81\xea\x47\x4d\x67\x07\xa6\x94\xbb\xab\xb2\x6e\xfb\xad\xe1" + "\xeb\x8d\x8e\x8c\x06\x3f\x7c\x05\x82\x09\xeb\x1b\x33\xb5", + "\xb0\x6b\x64\xb5", 1, 128, 248 }, + { 96, 256, 48, 408, + "\xf1\x62\xc3\x19\xc6\xcf\xf0\x93\xd5\x95\x6a\xee\xde\x37\x01\x18\x19" + "\x42\x88\x82\x11\x08\x78\x24\x81\x78\x27\xa4\x32\xf8\x6d\x9f", + "\x69\xd7\x42\xd9\x4f\xee\x25\x11\x40\xe6\xd7\x79", "", "", "", + "\xfc\xdc\x5a\xa3\x39\x14", 1, 0, 0 }, + { 96, 256, 48, 409, + "\xed\xdc\xcd\x5e\xda\x6e\xaf\x42\x1b\xbf\x87\xd9\x19\x54\x9c\x1f\x3a" + "\xc2\x04\x5c\x0a\xc2\xbc\xc2\xef\xa5\x0e\xc8\x40\x50\xb3\x6e", + "\x30\xf3\xdb\x31\x2e\x76\xd2\x93\x45\xed\xde\x59", + "\x07\x8e\x76\xef\x2d\xee\xbd\xd8\xf2\xd5\x49\x08\x9f\x4a\x93\xe3", + "", "", "\x0d\xf6\xd2\xdc\x83\xdf", 1, 128, 0 }, + { 96, 256, 48, 410, + "\x1c\x9b\x20\xe6\x4a\xd7\x83\xbf\x04\xf8\x01\xbe\x53\x2f\x6b\x08\x8e" + "\x00\x4d\x3a\xa2\xd7\x2d\x77\xf3\x9e\xc8\xfe\x9d\xdc\x51\x89", + "\x49\x7f\xa4\x1d\xf3\x08\x58\xe3\xfb\xb3\x6a\x68", "", + "\x03\xd3\x3c\x0a\x11\xa6\xcd\xa9\x9d\x76\xe9\x8f\x75\x05\x9f\xbf", + "\xde\xea\x99\x43\x7d\x38\x5b\x21\x1f\x3d\xeb\xda\x65\x86\x9d\xaa", + "\x81\x8e\xa9\x63\x04\x2c", 1, 0, 128 }, + { 96, 256, 48, 411, + "\x5c\x8b\x72\x87\x01\x76\x56\xc3\x10\x8d\x7e\xb6\x14\x37\x10\x4b\x41" + "\x1f\xd2\xd6\x15\x24\x5b\xf2\x3c\x82\x7d\x3d\xab\xe4\x30\xa5", + "\x99\x6a\x93\xe4\x7c\x2d\xab\x38\xc9\x35\x29\xae", + "\x23\x33\xe2\xc9\x58\x02\x88\x3f\xb3\xcf\x98\x73\x4d\xcf\x9c\x64", + "\xd1\xa2\x2a\x8d\xa2\x20\x07\x2c\x49\xd8\xaa\x1e\x28\x33\x34\xa6", + "\xd6\xc3\xe9\x29\x18\x13\xd3\x9a\xd9\x19\x48\x79\x03\xc6\xa7\xa5", + "\xe6\x39\x54\x04\x16\xfd", 1, 128, 128 }, + { 96, 256, 48, 412, + "\x96\x4c\x2d\x69\xf7\xb5\x3c\x40\x62\x88\x41\x01\xd5\x62\xf5\x23\x16" + "\xcc\xbc\x81\x4a\x29\xb0\xfe\x6e\xfe\x7f\x1e\xc7\xf7\xdd\xfe", + "\x63\xe4\xd4\x4f\x41\xf3\xce\x45\x14\xb7\x37\x00", "", + "\x52\x36\xea\x08\x20\xe8\x37\x45\x21\x2c\xdc\xd7\xc1\x0a\x5f\x35" + "\x29", + "\x05\x67\x5f\xa4\x2a\x07\xd4\x3f\xe9\x1b\x53\x39\x7f\x74\x60\x9c" + "\xff", + "\x54\x53\x41\x14\x41\x5d", 1, 0, 136 }, + { 96, 256, 48, 413, + "\xc0\xa9\xd3\x35\xf3\x29\x96\x6a\x5b\xc8\xcd\xef\x38\x60\x90\x80\xb8" + "\x5a\x2e\x6e\x96\xf6\xac\x82\x03\x67\x94\x96\x6e\x7c\x82\xc1", + "\x58\x29\xfb\x77\xbb\xdf\xb3\x87\x21\xa5\x91\x00", + "\x49\xa3\xed\x9c\xd3\x09\x68\xfd\xb7\xff\x73\xd1\x2d\x30\xe1\x55", + "\xff\x00\xf8\xc6\xcc\xbc\x90\xa8\x4f\x94\xfc\x98\x8c\xba\xb8\x2c" + "\xa1", + "\x29\x9b\x30\xe3\xd3\xc6\x06\x0b\xf5\xd2\x1f\x7f\xc0\x13\x89\x69" + "\x68", + "\xa5\x96\x5c\x20\xfd\xc0", 1, 128, 136 }, + { 96, 256, 48, 414, + "\x48\x71\xb9\x1e\x5e\x7f\x3c\xf9\xcc\x1b\x01\xd5\x0b\xc6\x20\x36\x10" + "\x75\xad\xa3\xed\xc4\x23\x39\x8d\x47\x40\xde\x72\x1f\x8c\xe1", + "\x7b\xd8\xcf\x2f\xc2\x4a\x3a\x83\x5c\xf9\x1b\xf7", "", + "\xe4\x39\x36\xc2\xd0\x5a\x3a\x35\xf7\xc2\xdd\xd1\x65\xd3\x97\xe5\xd3" + "\xc2\xec\x2b\x48\x23\x60\xd3\xf2\xe6\x21\x7c\xe0\x00\x37", + "\x43\xb6\x65\x99\xdd\x97\x82\xbe\xcf\x88\x4f\x04\x4f\x0c\x85\xb4\xae" + "\x6f\x7f\xd0\xf6\xce\x2a\xfb\xba\x84\x2e\x6b\x59\x4b\x3a", + "\x55\xd8\xf7\x6e\xa7\xe6", 1, 0, 248 }, + { 96, 256, 48, 415, + "\x71\x70\xed\x6d\xbf\x43\x4b\xfd\x0b\xcb\x6b\xd6\x92\xa3\x69\x36\x52" + "\x51\xfa\x31\x90\x9b\x4a\x2e\x3b\xee\x10\x66\x3a\x01\xe0\x0f", + "\xd1\x3e\xe3\x9b\x84\x2f\x86\x0a\x5f\x4d\x78\xe3", + "\xf8\x7d\x88\x71\xa8\x95\x1c\x39\x85\x73\x21\xe3\x20\xb8\xb8\x36", + "\x14\x5b\xe0\xa7\x8b\xdb\x38\x01\x4e\xe6\x19\x31\x45\x13\x1e\xc8\xa3" + "\xfd\x7c\x89\x79\x3a\x30\x05\x36\x4f\xf1\xe7\x93\xf6\x7b", + "\x6b\x91\xe6\x79\x92\xf8\x70\x30\x6f\x24\x24\x70\xc5\x11\x31\x87\x3e" + "\x2a\x6c\x07\x6c\xda\x25\x9c\x33\x49\xc9\x49\x4c\x39\x04", + "\xc8\xcb\x36\x0f\x80\xc5", 1, 128, 248 }, + { 96, 256, 64, 416, + "\xd2\xa4\x1c\xd9\xce\x5e\x91\x7d\x16\xb9\xab\x55\x81\x9e\xf8\x50\x1e" + "\x06\xaa\x78\xef\x13\x2f\xd3\xeb\xe6\xfe\xcd\x91\xbe\xb3\x9b", + "\xf7\x1b\xf6\xbc\x21\xc6\xd6\x35\x4e\x4b\x4c\xdf", "", "", "", + "\x23\x92\x8a\x00\x9d\x21\xa1\x0f", 1, 0, 0 }, + { 96, 256, 64, 417, + "\x88\x21\x07\xab\x29\x05\x3d\x4b\x44\xc8\x7b\x5b\xb9\x49\x37\x21\x1c" + "\x20\x52\x8d\xa9\xac\x49\x0f\x6c\x57\x4c\xae\xcd\xcd\x2f\x17", + "\xe6\xa1\x35\x37\xbb\x7f\x2a\xf7\x49\xb3\x18\x23", + "\xe9\xee\x32\xe6\xf1\x97\xe4\x02\x04\x68\x2d\xac\x42\xdd\x4c\x75", + "", "", "\x57\x73\xc7\x25\xf2\xf9\x46\x17", 1, 128, 0 }, + { 96, 256, 64, 418, + "\xb9\x67\x09\x1c\x98\xbb\x64\x92\x24\x30\x83\x3d\x1b\x55\x33\x26\xb8" + "\xe9\x1b\x6e\xf7\x14\x19\x71\xcc\x8e\x8c\xc5\xf6\xef\x61\x70", + "\xa5\xdd\x07\x6d\x8a\x9d\xc3\xd7\xec\x43\xd0\x4f", "", + "\xc8\xa3\x31\xb5\x54\xe6\xc7\xb0\x78\x3c\x53\xfe\xe6\xf1\x61\x8e", + "\x99\xb5\xc2\x22\x25\xe5\x32\x5f\x9a\xa9\x59\x9a\x34\xde\xec\x59", + "\xe9\xc9\x36\x19\xd3\x3d\x26\x8d", 1, 0, 128 }, + { 96, 256, 64, 419, + "\x71\x60\x43\x47\x20\x50\x4d\xce\x28\x84\x56\x25\xa3\x42\x31\x66\xd9" + "\xb5\x02\x5d\x97\x5c\x6e\xe4\x72\x99\xbb\x5b\xd6\x77\xdb\xeb", + "\x02\x96\xc9\x5b\x44\xc1\x74\x63\x43\x4c\x7e\x19", + "\x19\x08\x2b\xf5\x7b\x6c\x41\x30\xea\xc5\x8c\x05\x26\xa0\x44\xee", + "\xc6\x6a\x48\x61\x5b\x62\xd2\xd8\x5e\xa8\x2e\xe4\xd5\x28\xa0\x3a", + "\x89\x34\xea\x7a\xfb\x44\xfd\xca\x40\x27\xed\x9b\xbb\x24\x73\x58", + "\xb3\x33\xf0\xe1\x38\x3c\xf3\xe8", 1, 128, 128 }, + { 96, 256, 64, 420, + "\x1d\xd5\xa0\x92\x94\x9b\x67\x63\x5d\xb0\xc4\x8a\x03\x74\x0d\xa8\x06" + "\xdb\xe9\x7a\xad\x5b\x84\x12\x30\x0d\x68\x5c\xec\xfe\x84\x07", + "\x3f\x9e\xa3\x93\x62\xc8\xd8\xe4\x92\xea\x8b\x41", "", + "\x84\x40\x8d\x8b\xcc\xb4\x28\x8e\x62\x2b\xf7\xc6\x31\x40\x1d\x99" + "\x08", + "\x4f\x19\xd2\x7e\x0a\x40\xe4\x83\x5e\xbc\x3b\x89\xce\x8b\x51\x94" + "\x1b", + "\x44\xd9\xe5\x0d\xca\x91\x5c\x2e", 1, 0, 136 }, + { 96, 256, 64, 421, + "\x47\xf6\x64\xe6\x79\x0f\x3e\x25\xbc\x41\x0d\x84\x7f\x38\x66\x2f\x04" + "\x5f\x0a\xa3\x64\x14\x29\xed\xf8\x09\x9f\x4b\x4d\xf3\x2f\x06", + "\xf0\x92\xa3\x57\xb5\xef\x0c\x97\x5e\xe1\x69\xc4", + "\x33\x8b\x4c\xc6\x0e\xc1\x51\xfa\x28\x3c\x1c\xb1\x0e\x72\x2d\x9d", + "\xb0\x1d\xfe\x72\x41\x66\xa2\xbc\x98\xcb\xb9\x6c\xf5\x40\x02\x8a" + "\x0e", + "\xd7\x74\x6f\x18\x6a\xab\xfa\x36\x68\x54\x81\xec\x8a\x7f\x00\x22" + "\xe8", + "\x41\x15\x82\x92\xa1\xd8\x7c\xfd", 1, 128, 136 }, + { 96, 256, 64, 422, + "\xa4\x7a\xbe\x6e\x86\x7f\xbc\x16\xc4\x6a\x6f\xd7\xf1\x0b\x77\x92\x9b" + "\xaa\x12\x93\x69\xc8\x98\xd2\x52\x65\xb0\x17\x00\x56\xf9\xd0", + "\x65\x0b\x12\x68\x7c\xa8\x5a\x50\xe6\x50\x98\x84", "", + "\x20\x09\x14\x57\x1d\xd0\x38\x27\xf0\x7c\x2b\xd9\x38\x2e\x7d\x19\xd6" + "\x2f\x1e\xa4\xa7\xc7\x26\x9d\x86\x73\x3e\x43\xe4\x5a\x4d", + "\xbc\x23\x14\xa5\x89\xdb\xdd\x95\xb3\x58\xcd\xad\x30\xb1\x5e\x86\x7d" + "\xcd\x8d\xbd\xe4\x28\xb4\x7e\x39\x0a\xc4\x37\x62\xf6\x34", + "\x88\x1f\xa5\xfe\xcb\x51\x4c\xcf", 1, 0, 248 }, + { 96, 256, 64, 423, + "\x11\x04\x80\xea\x9c\x9f\x4c\x5e\x6b\x5b\xe0\x1a\x2a\xaf\xc8\x61\xd1" + "\x37\x0c\x24\x3a\xff\x9f\xaa\xfd\x0a\x92\xa9\xd1\x8e\x58\x45", + "\x0e\x5c\xf6\x83\xe1\x32\x04\xcf\x91\xa2\xd4\xb6", + "\xc4\x90\xa5\xfa\x19\xb9\x7c\x3e\x3a\xdf\x20\xbc\x4d\xf5\x11\x40", + "\xc9\x2e\xc3\xd6\xa2\xc2\xfa\x19\xc4\x5b\xe7\x10\x7a\x48\xa9\xea\x0f" + "\xe4\x6a\x92\x97\x8b\x5d\xab\xb3\xf9\x4b\x45\x7b\x5f\xbd", + "\xbb\x51\x10\xdd\x12\xbd\x3d\x12\x14\x4c\x8d\xe5\x5b\x3b\x26\x77\xfc" + "\x70\x84\xd5\x6a\xfc\xc6\xa7\x6a\x52\x28\xff\xf8\xdb\xd3", + "\xe3\x9b\x0d\x11\x74\xf7\x60\x9b", 1, 128, 248 }, + { 96, 256, 80, 424, + "\xa0\x91\x7e\xbe\x15\x17\x78\xcb\x88\xbb\x2e\x35\x61\x69\xad\x1a\x4b" + "\x9e\xbe\x2b\xcc\x2a\x35\x2b\xc7\x89\xa5\x0b\x4f\x31\x2d\x3e", + "\x32\x81\x13\x54\x38\x26\x08\xbd\x07\x6d\x8a\x87", "", "", "", + "\xb4\x1d\x1d\xaf\xd0\xd2\x59\x31\xd2\x85", 1, 0, 0 }, + { 96, 256, 80, 425, + "\xaa\x8a\x48\xf8\xb6\xd1\x86\x34\xec\x96\x33\x8e\x82\x0f\x7e\xb9\xf0" + "\xfe\xa8\x86\x4b\xb9\x27\xa5\x7c\x65\xf8\x34\x49\x90\x19\x9b", + "\xa0\xf7\x30\x46\x48\xf9\x7a\x30\x34\x91\x6d\x35", + "\xf4\xab\xe3\x08\x15\xce\x6a\xe9\xcf\x2f\x4e\xaa\x8b\xd0\x04\xcb", + "", "", "\xf7\x60\x5f\x52\x01\x93\x6d\xa1\x6d\x39", 1, 128, 0 }, + { 96, 256, 80, 426, + "\x8a\xb5\x05\x16\xb0\x53\xa3\xed\x51\xb9\xf8\x4f\x76\xdb\xf9\x30\xbd" + "\xe2\xb5\x5a\xa4\x99\xa0\x16\x19\x43\x50\x46\x1f\xf0\xc7\x08", + "\x14\x0a\x72\xad\x89\xb2\xfa\x23\xc3\x85\xe8\x04", "", + "\x53\x6b\x90\x06\xa4\x1f\xeb\xbe\x7a\x10\xd1\x6a\xe2\xb6\x44\x88", + "\xf1\xed\x66\x67\xa2\x18\x87\xa3\x94\xd8\x16\xa4\x5a\xe0\x6a\x5d", + "\x45\x55\xc7\x16\x14\xa7\x65\xc6\xa8\xfc", 1, 0, 128 }, + { 96, 256, 80, 427, + "\x14\x50\xb9\xd4\x36\x61\xc2\x7d\xbd\x08\x00\xd6\x61\x6c\xac\xf4\xe2" + "\x83\x10\x99\x0e\x74\x4f\x8a\x89\x66\x54\xae\x43\x87\x2b\xcb", + "\x91\xb8\xa7\x08\xbe\x02\xcb\x63\x35\xc2\x85\x83", + "\xa4\xa3\xe0\xca\x16\x5b\xff\xcc\x30\x52\x05\x66\x7c\x38\x68\x6b", + "\x6c\x03\x01\x32\x6a\x61\x33\xf5\xd5\xfa\x87\x17\xda\xe4\xe1\x90", + "\x37\xd8\xf3\x8e\x20\x4c\x36\xc0\x29\xcf\x15\xf7\xff\x3a\xc5\xda", + "\xfd\x94\x29\x18\xf7\xaa\xf3\x08\xe5\x6e", 1, 128, 128 }, + { 96, 256, 80, 428, + "\x1b\x05\xcb\xa5\x87\x24\x72\x13\xa0\xd9\x59\xd6\x4a\x29\xa5\x9e\xe2" + "\xd0\xee\xa2\xd9\x7e\xfa\x29\x68\x61\x43\x4d\xb8\xe5\x27\x54", + "\xb8\x3b\x0e\x7a\x52\xdd\x50\x7a\x8d\x67\x36\x61", "", + "\xd0\x76\x3c\x30\x60\xb7\xf9\xeb\x2d\x42\x75\x8c\xcb\x3e\xbb\x03" + "\x11", + "\x27\xb4\x4a\x64\xbc\xd0\xa0\x73\xd7\x70\xa7\x1d\x38\x2b\xd4\xec" + "\x99", + "\x9a\x1c\xff\x81\x02\x66\x69\xdb\x70\xd1", 1, 0, 136 }, + { 96, 256, 80, 429, + "\xe2\x54\x11\x83\x8a\x5a\x8d\xc7\xfa\x86\x6e\xa7\x43\x47\xab\x00\x3f" + "\x2a\x86\x62\x27\x5b\x69\x3a\xe8\xa6\xdd\xba\x97\x9e\x18\x87", + "\xbd\xad\x47\x51\x7b\xb9\xb6\x6b\x5e\x64\xc2\x19", + "\x9a\x1b\x3c\xda\xc0\x76\x7c\xb2\x34\xe5\xe4\x68\x78\x6c\xe3\x27", + "\x88\x98\x29\xa2\xb4\xa8\x86\xd3\x9f\x1f\x7f\x68\xc2\xcd\xb4\xf3" + "\x65", + "\x80\x66\xe4\xa8\xcd\xf7\xac\x2f\x70\xe2\xf5\xba\x51\x26\xd5\xe3" + "\x47", + "\x39\x29\x24\x8c\x35\x8b\xdc\xf3\x62\xf1", 1, 128, 136 }, + { 96, 256, 80, 430, + "\xde\x1c\x7d\x37\x84\xb9\x82\x50\x42\x2e\x6f\xff\xed\x88\x57\x71\x54" + "\xc1\x93\xf7\x2d\x4a\x97\x96\xd4\xff\x4d\xfc\x88\x23\x5a\x17", + "\x9d\x14\xa6\xb7\x93\x32\xee\x97\xc4\x8f\x07\xe1", "", + "\x55\xdc\x11\x79\xcd\xad\x38\xd4\x5e\xd4\x39\x39\x5c\x67\xa8\x72\x4d" + "\x75\x13\xa9\xa4\xc6\x2f\xb5\x9a\x78\x8b\x0a\xc6\x7b\x7d", + "\x94\x82\xb6\x00\x66\xc9\x99\xcc\x89\x5c\xf9\x80\xe8\x1a\x29\x23\x7f" + "\x80\x9e\x9b\x80\xb3\x24\x90\xe6\x0a\xc8\x57\x30\xca\xfc", + "\x67\x5e\xb8\x19\x7e\x60\x5b\xdd\xf2\xe5", 1, 0, 248 }, + { 96, 256, 80, 431, + "\xbd\x02\x55\x52\xc3\x4a\x55\x2f\x07\xde\x3a\x34\x8b\xf7\xdf\xb3\x08" + "\xbe\xc3\x6c\x47\x89\x3a\xd2\x9f\x3f\xe4\x41\xe2\x4f\xb2\x55", + "\x18\x51\x40\xaa\xc8\x3f\x26\x1a\x8c\x0d\xce\xa4", + "\xa7\x21\xa6\x9f\x3a\x24\xdd\xbc\x2e\x16\x01\x52\x28\xc8\x48\x3a", + "\x05\xba\xbe\x1d\x63\xf8\x12\x06\x9d\xfc\xd0\xf5\x92\x62\xfe\x05\xbd" + "\x45\xb3\xc1\x1a\x3d\x6b\xdf\xea\x5a\x0c\x80\xd1\x32\x20", + "\x74\xc9\x70\x0f\xb3\xc7\xbd\x4d\x65\xbd\xcd\x0d\xf8\xcc\x73\xa4\x14" + "\xad\x9c\xd7\x87\xb0\x5c\xc9\xff\xbf\xb6\x3c\x84\x8d\x1a", + "\x4d\xd4\x3d\xc3\x2b\x30\x16\x73\xf4\x04", 1, 128, 248 }, + { 96, 256, 96, 432, + "\x07\x37\x42\x4e\x0c\x2f\x40\x48\x63\x81\x33\xa1\x8d\x67\x6d\xc1\xd8" + "\x3a\x23\x38\x77\x61\x3a\xcc\x0e\xb5\xa6\x81\x30\x53\x66\xc0", + "\xf0\x28\xd0\xec\xf2\x6c\x31\x2b\x9f\x62\x33\x95", "", "", "", + "\xd3\x7f\x07\xc4\xec\xef\x1f\xca\xf0\xfe\x44\x4a", 1, 0, 0 }, + { 96, 256, 96, 433, + "\x8e\x4c\x13\xc9\x82\xa0\x6f\x3a\x98\x29\x59\xeb\x7c\x2e\x9f\x0e\x41" + "\xa8\xe0\x54\x36\x0e\x5b\x93\x11\x1b\xc6\xd9\x39\x70\xee\x8d", + "\x8c\x08\x1e\xb6\x0f\xa0\x90\x35\x95\x71\x3a\x73", + "\x24\xf1\xed\x7c\xad\x53\x54\x68\x02\xe2\xe5\xf5\xed\x51\x62\x47", + "", "", "\x97\xfe\xd4\x10\xc9\xfd\xb0\x6b\xcd\xb3\x85\x85", 1, 128, + 0 }, + { 96, 256, 96, 434, + "\x62\x7f\xfd\x55\x17\x6d\x65\x55\xda\x82\xb4\xeb\x87\xe6\x51\x90\x44" + "\xb8\x81\x33\x4c\x95\x78\x9d\x67\x07\x29\xaf\x05\x84\x12\x8b", + "\xc1\x5c\xb8\xab\xce\x00\x8f\x01\x5e\x27\x15\xae", "", + "\x00\x02\x24\xe6\x3d\x99\xe8\xb1\xa0\xa2\xab\xb4\xb4\x5b\xca\x15", + "\x59\xa1\xb9\x55\x22\xe9\x6a\x5f\xea\x0a\xe7\x7d\x17\x92\x23\xec", + "\xaa\xb2\xf3\x4d\xe6\xe5\xba\xc7\xcc\xf9\x36\x18", 1, 0, 128 }, + { 96, 256, 96, 435, + "\xbb\x65\xd8\x0b\x7a\x47\x82\xe0\x5f\xfc\xb7\x77\xe5\x95\x28\xba\xb8" + "\x7e\x20\xaa\x84\xdb\xe4\x58\x8e\x2a\x17\x03\xf8\x8c\x68\xca", + "\x56\x41\x0b\xb8\x2b\xb0\x54\x23\x4b\x5e\x62\xc1", + "\xff\xe0\x9f\xb3\x4f\x17\xb5\x17\x95\x6f\xbb\xb5\x8a\x62\x62\x3a", + "\x3a\xf8\xc0\x49\xa1\x93\xb1\xca\x39\x52\xee\xd0\xf5\x8f\x09\xdd", + "\x54\xee\x65\x4f\x5c\x44\xb8\x58\x76\x43\xd4\xc5\x8d\xe4\x02\x67", + "\x32\x37\xb2\xfa\x6a\xd7\x85\xa8\x82\xa3\x8e\x72", 1, 128, 128 }, + { 96, 256, 96, 436, + "\xde\x59\xc6\xda\xa2\x10\xca\x6b\xed\xd9\xdb\x7b\x30\xe8\x86\x03\x04" + "\x9b\x18\x0f\x6e\x31\x96\xb4\xc3\x3d\x8c\x51\x89\xb5\xc4\x50", + "\x1f\xc9\x68\x4e\x4d\x96\x8b\xfe\x27\x77\x50\x00", "", + "\xcd\x42\xfb\x94\xb1\x07\xa8\x89\x1b\x15\x9b\xf3\xbd\xb3\xed\xa8" + "\x44", + "\xfb\x48\xf5\x71\x63\x3d\x67\xd5\x34\xcd\x20\xb6\xc8\x81\x7e\x96" + "\x33", + "\x55\x1d\x3b\xb6\x86\xee\xdf\xdf\xf7\x76\xef\x19", 1, 0, 136 }, + { 96, 256, 96, 437, + "\x22\x23\x4e\x83\x14\x09\xb5\xfb\xec\x25\x2c\x78\x5d\x69\x4b\x00\x4a" + "\x59\xff\xda\x15\x6c\xff\x62\xf5\x70\x2b\x72\xfb\xf1\x00\xad", + "\x5b\x38\xb9\x53\x11\x5e\x80\x88\x43\x0e\xbb\xd8", + "\x0d\xfb\xea\x34\xbe\xbb\x2c\xcd\xeb\x12\x77\xe0\xb4\x4a\xcc\xfb", + "\x60\x04\x18\xcb\xef\x85\x64\x39\xe4\x0d\x83\x9f\x7b\x57\xc5\xe3" + "\x2e", + "\xda\xaa\xb3\xcc\xa5\xab\x11\xf9\xe1\xf4\x4c\xdb\xfe\x82\xb6\x0c" + "\x8f", + "\xae\xf2\xf1\xa9\x0f\xfa\x6e\x96\x89\x2f\x37\x28", 1, 128, 136 }, + { 96, 256, 96, 438, + "\xb8\x74\xb8\x69\xd0\x04\x50\x51\x4f\xa1\xf8\xfb\x94\x7c\xc0\x87\xe8" + "\x73\x2e\xd0\x76\x0b\x41\xb2\x21\xc6\x9c\xda\x04\x9c\xba\x02", + "\x41\x01\x2a\x5f\x5c\x6b\x70\xac\xee\x93\xbb\xa1", "", + "\x1b\xe4\x61\x3b\xb9\xa8\xa1\x24\x60\x66\x50\xde\x32\x62\xf2\x57\xfd" + "\x6b\xae\x4b\x7c\x27\xb4\xf0\xff\x36\xba\xee\x97\xbc\xb8", + "\x83\xcc\x85\xa0\x13\xc8\x2f\xe0\x7d\x24\xb3\x84\x80\xf3\x0d\x6e\x09" + "\x27\x4a\xf8\x80\xf1\x14\xe0\x8b\x56\x28\x54\x7a\x04\x2b", + "\xb7\x8d\x7f\x57\xe5\xa6\xad\x5d\x77\x08\x38\x76", 1, 0, 248 }, + { 96, 256, 96, 439, + "\x10\xf9\xd3\x90\xd9\xe8\x9f\xdd\x3b\xde\xd9\xcb\xcb\x6c\x98\x5f\x9c" + "\xfa\xe0\x07\x49\xfe\x7c\xd4\x0c\x83\xa6\xeb\x95\xb4\xdc\xeb", + "\x21\xa2\x28\x6f\xee\xe9\x73\x86\xec\x1d\x2a\x49", + "\x2d\xee\x72\xe8\x9b\x03\x97\x93\xf6\xa2\x8c\x92\x02\xd6\x26\x59", + "\x2c\xa3\x70\xd1\x4c\x09\xa5\xab\xa5\x32\x7b\x4d\xe3\x0a\x98\x3f\x6e" + "\x50\x21\xea\xa7\xb5\x74\x50\x89\x1e\xaf\x38\x6b\x7a\xe9", + "\xa7\x51\xee\x90\x93\x08\x18\x07\xb5\x24\x07\x59\x19\xfc\x64\xca\x80" + "\x6b\x3f\x5a\x29\xca\xb2\x6b\x06\x57\xe1\x63\x04\x2f\x96", + "\x74\x3d\xf3\xe0\x1f\x34\x49\x63\x45\x73\x57\x15", 1, 128, 248 }, + { 96, 256, 112, 440, + "\x08\xf5\xfe\x4c\x8f\x63\x93\xac\xcd\xcb\x56\x0a\x3c\x27\x10\x96\xff" + "\x0d\x9d\x67\x43\x8f\xff\xd3\x4d\xf7\x18\x65\x2c\x6b\x8e\xfe", + "\x25\xc5\xf8\x4f\xe6\xec\x3c\x2f\x7c\x1b\x7c\xc5", "", "", "", + "\x66\xb2\x30\x23\xe6\x08\xcd\x93\x91\x56\x7a\xa8\x5f\x5a", 1, 0, 0 }, + { 96, 256, 112, 441, + "\x84\x95\x29\x8b\x9c\x20\x8e\x6c\x5b\x23\x4e\x85\x6e\xec\xff\x6a\x11" + "\x4c\xd8\xb3\xae\xee\xb7\x45\xa1\x60\xff\xa3\x30\x5c\xf5\xef", + "\xcc\xe0\x26\x35\xc3\x77\x1f\xb5\xb6\x73\xf8\x8c", + "\xcc\x9a\xf3\xcc\xa9\xf3\xc2\xc1\x21\x1b\x23\x58\x1e\xc5\xfd\xd1", + "", "", "\x0d\x34\x89\x6c\x64\xb6\x78\x7d\xa0\xac\x7c\x03\xfa\x93", 1, + 128, 0 }, + { 96, 256, 112, 442, + "\x4f\x49\x65\x5c\x76\xa6\x29\xe5\x8c\xfb\x94\xc8\x51\xa9\x15\x10\xc2" + "\xf1\x28\xdc\x4b\xce\x1f\x1f\x11\xc3\xdc\x99\x43\x6d\x26\x8c", + "\x96\x7f\xde\x29\x67\x1d\x46\x54\xf9\xf6\x70\xc0", "", + "\xb4\xd1\x2c\x3e\xdf\x38\x02\xe2\x1f\x62\x4b\x71\x8b\x63\xfd\x6c", + "\x3b\xa4\xe2\xa4\x50\x7c\x0b\x6f\x5a\xe1\xbe\x29\xc3\x0b\x25\xe9", + "\x8c\xef\xa2\x49\x54\x73\xee\xe1\xb2\x2c\x3f\xa6\xef\x12", 1, 0, + 128 }, + { 96, 256, 112, 443, + "\x4d\xac\x9a\x0c\xbf\xc3\xdd\x29\x1d\x40\x6e\x68\x38\x89\xfc\x10\xe2" + "\xa0\xdd\x25\xd4\xd0\xb4\x3b\x11\x11\x1a\xa8\x28\x27\x39\xe9", + "\x86\x4a\xa8\xc8\x65\x58\x8c\x9a\x21\xae\xa7\xfd", + "\x5d\xb8\xe0\x96\x97\xd1\xff\x79\xa8\x86\x39\x5e\x40\xfb\x1a\x1d", + "\x5d\x2c\x63\x29\x60\xf8\x23\xcf\x72\x42\xbf\x61\xf9\x39\x13\x17", + "\xfd\x80\xa3\x82\x7d\xb1\x72\x42\xb2\xdf\x0c\xd8\xca\x96\xd9\x97", + "\xdf\xb5\x8a\x6b\xe4\xe7\xe0\x01\x0f\x7c\x74\x04\xb4\x67", 1, 128, + 128 }, + { 96, 256, 112, 444, + "\x37\x76\xa8\x4b\x86\x9e\xc4\xa7\x1e\xd8\x4a\x74\xe6\xa9\x8c\x42\xc0" + "\xff\xa2\x3f\x6e\xb2\xe2\x97\x0f\x13\x11\x21\xc5\xba\x69\xfe", + "\x72\xcd\xa6\xef\xb0\x82\x5c\x74\x0d\x19\xf4\x85", "", + "\x05\x05\xd2\x88\x90\x8d\x5c\x28\xe4\x72\x3d\x9d\x4b\x8b\x0f\xc0" + "\xba", + "\xdc\xd6\x2b\xdc\x23\xee\x8b\xdb\x7a\xfd\xec\xd4\x49\xcd\xb4\x99" + "\x4a", + "\x9a\xa0\xf8\xf0\x32\xbe\x13\x42\xad\x5d\x40\x99\xc3\xae", 1, 0, + 136 }, + { 96, 256, 112, 445, + "\x01\xfc\xbc\x4a\x3b\x2e\xe3\x21\x09\xcd\x0f\x27\xd8\x29\xe2\x0d\x1d" + "\x92\x03\xd6\xff\x81\x2e\xd9\x84\x1e\xf9\x08\x90\x4d\x74\xa8", + "\xae\x9e\xe7\xf9\xf5\x28\x78\x21\x58\x38\xf5\xcb", + "\x1b\xe1\x77\xd6\xe8\x86\x51\xc4\x0f\x6a\x1b\x53\x38\x17\xc2\x79", + "\x26\x8a\x75\xfb\x89\x0e\x8a\xf7\xc2\x4b\x63\xcf\xb8\x70\x80\xe0" + "\x28", + "\xe1\x44\x0c\x49\x55\x62\xf6\xc8\x56\x28\x81\x83\x23\x4e\x0a\xd2" + "\x2e", + "\xd9\x3e\x1c\x12\xbc\xc7\x66\x60\x12\x7d\xfd\x8c\x28\xa0", 1, 128, + 136 }, + { 96, 256, 112, 446, + "\xa0\x17\x45\xf5\x2f\x1f\x35\x64\xda\x0a\xdf\x84\x5f\xdb\xd4\x7a\x5b" + "\xd1\x86\x50\x92\x57\x95\x58\xf6\x7f\x67\xba\x07\xf2\x38\xa0", + "\x87\xd7\xce\xc6\x30\x1b\x81\xe3\xe0\x66\x6e\x27", "", + "\xd5\xc0\x12\x80\xac\xf0\xaf\xe7\x7d\xf7\x67\xff\x3c\x02\x8f\x52\xe3" + "\xd3\x78\x6a\x84\xcc\x7c\xc0\x07\x06\x61\xa8\x1c\x1f\xbd", + "\xcb\x07\xfc\x59\x62\xf7\xd3\x26\x86\x06\xf1\xd2\x24\xfd\x92\xb3\xc2" + "\x30\x26\x20\xf0\x33\x20\x78\x4a\x71\x18\x0d\x72\x65\x01", + "\x7f\x64\xeb\xeb\x84\xbc\xad\x46\x34\x7f\xf1\xf2\x74\x47", 1, 0, + 248 }, + { 96, 256, 112, 447, + "\x24\x0e\xf4\xec\x0a\x7b\x24\x01\x7c\x13\xe4\x61\x22\x7d\x11\xf6\x08" + "\xc4\x16\x98\x45\x7e\x94\x8f\x65\x7d\x82\xa1\x9d\x97\x05\x44", + "\x52\xec\x46\xf5\x27\x81\xbb\x7c\xd0\x0f\xbf\xd3", + "\x89\x9c\x64\xab\xbe\xc1\x46\x8e\xc5\xb8\x42\x7e\x61\xb9\x90\xab", + "\x2b\x25\x88\x2f\x82\x4b\x41\xea\xf4\xb2\x15\x0e\xb1\xfe\x8d\xc0\xf9" + "\xc7\x15\x6a\x41\x88\x1b\x39\xd1\x3d\xae\xc1\xf9\xb0\xb1", + "\xe9\x4e\x44\xb5\xe7\xbb\x26\xb2\x49\xb4\x8c\xaa\xf2\xa9\xab\x5a\x75" + "\x06\xff\x39\x66\x8f\xfe\xa6\xf6\x2b\xb0\x30\xfe\x5c\x87", + "\x1a\x85\x9a\xa8\x06\x26\x04\x72\xa5\x39\x79\xcc\x4e\xaa", 1, 128, + 248 }, + { 0, 128, 96, 448, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xbe\x17\x22\xa5\x81\x71\x95\xc5\x03\x81\x4b\xe1\xbd\x09\x31\x10", + "\x6f\x79\xa8\xcf\x92\xc8\x56\xb8\xf1\x6d\xee\x92", 0, 0, 128 }, + { 8, 128, 96, 449, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x21\x03\x89\x22\x69\x58\xde\xf4\xb4\x4f\x1e\x16\x86\x32\x11\x3c", + "\xd4\xe9\x7a\x10\x08\x00\xa5\xc1\x6b\xea\x4f\xdf", 0, 0, 128 }, + { 16, 128, 96, 450, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\x41", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x95\xbf\x20\x8e\x86\x73\xb9\xf9\xa3\x8f\x96\x09\xb5\xe7\x8f\x2a", + "\xdf\x81\x41\x91\x69\x6c\xf3\x12\x9f\xb4\x0d\xc0", 0, 0, 128 }, + { 32, 128, 96, 451, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\x41\x42\x43", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xa7\xda\xb4\xbf\xcf\x3e\x2a\xfa\x4b\x31\x9c\xfd\xc1\x7f\x15\xf1", + "\xc4\xe8\xb1\xfa\x0d\x79\x17\x77\x41\x7c\xe5\x2c", 0, 0, 128 }, + { 48, 128, 96, 452, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\x41\x42\x43\x44\x45", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xa8\x9a\x23\xf4\xf3\x2b\xa0\x09\xc3\xaa\x8f\xa0\x19\x1f\x84\xc5", + "\x65\x96\x21\xc2\xad\x5b\xc6\x1d\xe2\xce\x80\x46", 0, 0, 128 }, + { 112, 128, 96, 453, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xf9\xd0\x07\x8b\xcd\x56\x8b\xf9\x79\x24\xe6\xd7\x1f\x40\x60\x87", + "\x09\x5c\x0d\xa2\xea\x6d\xda\x5a\x87\x12\x1c\x2a", 0, 0, 128 }, + { 120, 128, 96, 454, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x67\x0c\xd0\x68\xe6\x5b\x08\xfc\xef\xb9\xc0\xaf\xa7\xbb\x5c\x33", + "\xda\xa0\x28\x10\xe9\x97\xe7\x97\xdd\xa9\x75\x5d", 0, 0, 128 }, + { 128, 128, 96, 455, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x5e\xdb\xeb\xec\x6c\x53\x3d\xac\x8c\xa8\x9f\xaf\x60\xd8\xc1\x37", + "\x94\x53\xd6\xa8\xff\x91\xb7\x14\xf3\x2c\x0b\x71", 0, 0, 128 }, + { 160, 128, 96, 456, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x5e\xdb\xeb\xec\x6c\x53\x3d\xac\x8c\xa8\x9f\xaf\x60\xd8\xc1\x37", + "\x94\x53\xd6\xa8\xff\x91\xb7\x14\xf3\x2c\x0b\x71", 0, 0, 128 }, + { 256, 128, 96, 457, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x5e\xdb\xeb\xec\x6c\x53\x3d\xac\x8c\xa8\x9f\xaf\x60\xd8\xc1\x37", + "\x94\x53\xd6\xa8\xff\x91\xb7\x14\xf3\x2c\x0b\x71", 0, 0, 128 }, + { 512, 128, 96, 458, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xc3\x2a\x76\x43\xab\x0f\x6e\xa3\x45\x8d\x7e\x63\xb0\xed\x64\x99", + "\x8f\xc9\xb6\xf4\x0b\x3a\x81\xfd\x5f\xd6\xc5\x3c", 0, 0, 128 }, + { 1024, 128, 96, 459, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43" + "\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54" + "\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60\x61\x62\x63\x64\x65" + "\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76" + "\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xc3\x2a\x76\x43\xab\x0f\x6e\xa3\x45\x8d\x7e\x63\xb0\xed\x64\x99", + "\x8f\xc9\xb6\xf4\x0b\x3a\x81\xfd\x5f\xd6\xc5\x3c", 0, 0, 128 }, + { 2144, 128, 96, 460, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43" + "\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54" + "\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60\x61\x62\x63\x64\x65" + "\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76" + "\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98" + "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9" + "\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba" + "\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb" + "\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc" + "\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed" + "\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe" + "\xff\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xc3\x2a\x76\x43\xab\x0f\x6e\xa3\x45\x8d\x7e\x63\xb0\xed\x64\x99", + "\x8f\xc9\xb6\xf4\x0b\x3a\x81\xfd\x5f\xd6\xc5\x3c", 0, 0, 128 }, + { 0, 192, 96, 461, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x15\x69\x9c\x20\x19\x86\x88\xb9\xe4\x88\x2a\x65\x42\x81\x1a\xda", + "\xc6\x93\x17\xb9\x9b\x43\x08\x6b\x62\x1e\xb1\x4a", 0, 0, 128 }, + { 8, 192, 96, 462, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x40", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x7e\x84\xb5\xaa\x41\xd2\x21\x2b\x3a\x5d\x73\x0d\xf5\xb2\x0e\xaa", + "\x59\x24\xd8\xbd\x85\x31\x8b\x03\x3b\xf4\xf2\xfd", 0, 0, 128 }, + { 16, 192, 96, 463, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x40\x41", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x81\x33\x67\xe3\xd1\xfa\x4e\xe4\xc4\x02\x45\x0f\x29\x46\xd1", + "\xcf\x6e\xee\x49\x5f\x94\xc0\x8f\xef\x7c\xe5\xb5", 0, 0, 128 }, + { 32, 192, 96, 464, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x40\x41\x42\x43", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\xe9\xc6\x21\xdb\xe0\x57\xb8\x6a\xca\xef\x8a\xd0\xe3\x8e\xe0", + "\x6d\xd3\x3d\x42\x27\x8b\xb2\xf2\x7e\xab\x7a\x0b", 0, 0, 128 }, + { 48, 192, 96, 465, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x40\x41\x42\x43\x44\x45", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x41\xbd\x6e\x62\x6e\xf1\xd4\xfa\x33\xe3\xe6\x2b\x6b\x71\xb2\x47", + "\xb0\x35\x06\xdf\x38\x08\x5e\x4f\x93\xee\x9e\xa4", 0, 0, 128 }, + { 112, 192, 96, 466, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x39\x0f\x24\xac\xc1\x13\xb4\x33\xe4\xb7\x85\xe9\x18\x3d\x48\x38", + "\x9b\xc0\x39\x7f\xee\x59\xe6\x99\x0c\x3b\xbc\x81", 0, 0, 128 }, + { 120, 192, 96, 467, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\xd1\x08\x59\x06\x25\xee\x4a\xf6\x6b\x7c\xb6\x63\xdf\x50\xc1", + "\xcf\xea\xbe\x25\x26\x50\xd1\x25\x84\x47\x8b\xd0", 0, 0, 128 }, + { 128, 192, 96, 468, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x17\xef\x53\xd9\x25\xbb\xa4\xe1\xf3\x35\xd0\x01\x86\xb7\xc3\x8e", + "\x8f\x64\x77\xda\x94\xef\x6f\x63\xc4\xd1\xa0\x71", 0, 0, 128 }, + { 160, 192, 96, 469, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x17\xef\x53\xd9\x25\xbb\xa4\xe1\xf3\x35\xd0\x01\x86\xb7\xc3\x8e", + "\x8f\x64\x77\xda\x94\xef\x6f\x63\xc4\xd1\xa0\x71", 0, 0, 128 }, + { 256, 192, 96, 470, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x17\xef\x53\xd9\x25\xbb\xa4\xe1\xf3\x35\xd0\x01\x86\xb7\xc3\x8e", + "\x8f\x64\x77\xda\x94\xef\x6f\x63\xc4\xd1\xa0\x71", 0, 0, 128 }, + { 512, 192, 96, 471, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x81\xa8\xa0\x87\xcf\x96\xd5\x8f\x64\x86\x8b\x18\x9e\xdd\x0b\xb4", + "\xc5\xcc\x4f\x2b\x5a\x46\x0c\x1d\x22\x9a\x6b\xa8", 0, 0, 128 }, + { 1024, 192, 96, 472, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43" + "\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54" + "\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60\x61\x62\x63\x64\x65" + "\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76" + "\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x81\xa8\xa0\x87\xcf\x96\xd5\x8f\x64\x86\x8b\x18\x9e\xdd\x0b\xb4", + "\xc5\xcc\x4f\x2b\x5a\x46\x0c\x1d\x22\x9a\x6b\xa8", 0, 0, 128 }, + { 2144, 192, 96, 473, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43" + "\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54" + "\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60\x61\x62\x63\x64\x65" + "\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76" + "\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98" + "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9" + "\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba" + "\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb" + "\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc" + "\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed" + "\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe" + "\xff\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x81\xa8\xa0\x87\xcf\x96\xd5\x8f\x64\x86\x8b\x18\x9e\xdd\x0b\xb4", + "\xc5\xcc\x4f\x2b\x5a\x46\x0c\x1d\x22\x9a\x6b\xa8", 0, 0, 128 }, + { 0, 256, 96, 474, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3c\xd9\xba\xce\x5f\x5d\xc7\x7c\x89\xc2\xbc\x13\x90\x65\xe7\x97", + "\x99\xf6\x05\x8e\xeb\x8e\x3a\x80\x36\xaa\xda\xb8", 0, 0, 128 }, + { 8, 256, 96, 475, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x40", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb5\xf0\x44\x21\x1e\x18\xb1\x35\x72\xe2\xea\x70\xed\x17\x83\x53", + "\x24\xcf\x3f\x3f\x36\x9b\x69\x2f\xe7\x30\x97\x0c", 0, 0, 128 }, + { 16, 256, 96, 476, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x40\x41", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xc0\x00\x74\x6e\xd8\x24\x6d\x20\xed\xda\x90\xc0\x4f\x38\x0b\xa8", + "\xaf\x82\x09\x34\x87\xd3\xa5\xd4\x87\x2f\xf9\xe2", 0, 0, 128 }, + { 32, 256, 96, 477, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x40\x41\x42\x43", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb0\x0b\xd7\xb8\xcd\x03\x1c\x16\x8e\x37\x40\x7e\xb0\x9f\x06\x2e", + "\xbf\xca\xe1\x10\xc7\x37\xbb\xe7\x57\x96\x7f\x4e", 0, 0, 128 }, + { 48, 256, 96, 478, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x40\x41\x42\x43\x44\x45", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xae\x28\x65\xf8\x6b\xa6\x32\x8c\xe1\x31\xa4\x9c\xd4\x99\xf9\x36", + "\x43\x90\xb5\x4f\x3b\x7c\xbf\x9e\x54\xa2\x20\x48", 0, 0, 128 }, + { 112, 256, 96, 479, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x56\x80\xeb\xd1\x6a\xe4\x46\xe9\xe2\xd0\x7f\xba\xac\x7a\xbd\x0b", + "\xd7\x2d\x01\x57\x82\xfd\x94\xd7\x6d\x2f\x68\x2f", 0, 0, 128 }, + { 120, 256, 96, 480, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x6a\x61\x1e\x6f\xb6\x7d\x83\x1b\x4b\x09\x61\x69\xf2\xe8\x66\x47", + "\x0c\x39\xbb\x24\x62\xbb\xaf\xf7\x19\x39\xee\x1f", 0, 0, 128 }, + { 128, 256, 96, 481, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x68\x18\x41\xa0\xa0\x13\x92\x39\x07\xc6\x69\xef\xd3\xff\xd0\x69", + "\x05\x4c\xd6\xf1\xcd\xa1\xb1\xbf\x91\xe0\x10\x0f", 0, 0, 128 }, + { 160, 256, 96, 482, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x68\x18\x41\xa0\xa0\x13\x92\x39\x07\xc6\x69\xef\xd3\xff\xd0\x69", + "\x05\x4c\xd6\xf1\xcd\xa1\xb1\xbf\x91\xe0\x10\x0f", 0, 0, 128 }, + { 256, 256, 96, 483, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x68\x18\x41\xa0\xa0\x13\x92\x39\x07\xc6\x69\xef\xd3\xff\xd0\x69", + "\x05\x4c\xd6\xf1\xcd\xa1\xb1\xbf\x91\xe0\x10\x0f", 0, 0, 128 }, + { 512, 256, 96, 484, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xe7\x3c\xc2\xee\x05\x0a\x0e\x7b\x34\x5a\xeb\x10\x00\xc4\x81\xc3", + "\x18\x2d\xab\xaf\xf0\x40\x0d\xe7\x08\x74\x6b\x6a", 0, 0, 128 }, + { 1024, 256, 96, 485, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43" + "\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54" + "\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60\x61\x62\x63\x64\x65" + "\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76" + "\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xe7\x3c\xc2\xee\x05\x0a\x0e\x7b\x34\x5a\xeb\x10\x00\xc4\x81\xc3", + "\x18\x2d\xab\xaf\xf0\x40\x0d\xe7\x08\x74\x6b\x6a", 0, 0, 128 }, + { 2144, 256, 96, 486, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f\x40\x41\x42\x43" + "\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51\x52\x53\x54" + "\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f\x60\x61\x62\x63\x64\x65" + "\x66\x67\x68\x69\x6a\x6b\x6c\x6d\x6e\x6f\x70\x71\x72\x73\x74\x75\x76" + "\x77\x78\x79\x7a\x7b\x7c\x7d\x7e\x7f\x80\x81\x82\x83\x84\x85\x86\x87" + "\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98" + "\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9" + "\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba" + "\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb" + "\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc" + "\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed" + "\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe" + "\xff\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xe7\x3c\xc2\xee\x05\x0a\x0e\x7b\x34\x5a\xeb\x10\x00\xc4\x81\xc3", + "\x18\x2d\xab\xaf\xf0\x40\x0d\xe7\x08\x74\x6b\x6a", 0, 0, 128 }, + { 96, 128, 16, 487, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\x19\x8c", 0, 0, 128 }, + { 96, 128, 24, 488, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\x19\x8c\x08", 0, 0, 128 }, + { 96, 128, 40, 489, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\x23\x1a\x2d\x8f\x6a", 0, 0, 128 }, + { 96, 128, 56, 490, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\xb1\xbb\xf3\x88\x35\x07\xcd", 0, 0, 128 }, + { 96, 128, 72, 491, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\xcd\x0a\xe6\x3f\x3a\x30\xf7\xfb\x5b", 0, 0, 128 }, + { 96, 128, 88, 492, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\x53\x5e\x32\xac\x41\x68\x16\x61\x5e\x5a\x20", 0, 0, 128 }, + { 96, 128, 104, 493, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\xa6\xc5\x84\x58\xd3\x96\x9d\xa9\xcb\x08\x49\xf9\x5e", 0, 0, 128 }, + { 96, 128, 120, 494, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xd3\xda\xb1\xee\x49\x4c\xc2\x29\x09\x9d\x6c\xac\x7d\xf1\x4a\xdd", + "\xf7\x87\x9f\xb7\xfe\x88\xdd\x74\xcb\x8e\x96\xfd\xa1\xd2\xeb", 0, 0, + 128 }, + { 96, 192, 16, 495, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x95\xeb", 0, 0, 128 }, + { 96, 192, 24, 496, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x95\xeb\x98", 0, 0, 128 }, + { 96, 192, 40, 497, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x3f\xd4\x0d\xd8\xe0", 0, 0, 128 }, + { 96, 192, 56, 498, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x79\x3c\xa5\xd3\x51\xe6\x8c", 0, 0, 128 }, + { 96, 192, 72, 499, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x63\xa0\x98\x7f\xff\xf1\x31\x3c\xaa", 0, 0, 128 }, + { 96, 192, 88, 500, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x0e\xe4\x0f\x14\x47\x5b\x7e\x28\x75\x29\x83", 0, 0, 128 }, + { 96, 192, 104, 501, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\x40\xa4\xfc\x82\xd4\x29\xa0\x09\x1c\x96\x2d\x71\x52", 0, 0, 128 }, + { 96, 192, 120, 502, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x1c\x49\x32\x45\x15\xa3\x46\xd4\x24\xee\xd6\xfe\xd9\xbd\xdc\x17", + "\xf8\xf9\xbd\xc6\xb8\x50\x6a\xfd\x3a\xe5\x4a\x0a\x67\xe1\x85", 0, 0, + 128 }, + { 96, 256, 16, 503, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\x8f\x8b", 0, 0, 128 }, + { 96, 256, 24, 504, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\x8f\x8b\x32", 0, 0, 128 }, + { 96, 256, 40, 505, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\xa9\x4e\x19\xf3\x4c", 0, 0, 128 }, + { 96, 256, 56, 506, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\x5e\x90\x21\x8c\xac\xa4\x70", 0, 0, 128 }, + { 96, 256, 72, 507, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\x38\x5c\xef\x2c\x25\x99\xfa\xa9\x60", 0, 0, 128 }, + { 96, 256, 88, 508, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\xe8\x1c\x27\x00\x20\xed\xd9\x3b\xa7\xe5\x64", 0, 0, 128 }, + { 96, 256, 104, 509, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\xc5\xd5\xf2\x9a\xf5\xc0\xdb\x44\x4a\xc2\x61\x8b\x9d", 0, 0, 128 }, + { 96, 256, 120, 510, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50\x51", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x3b\x68\x29\xd5\xde\xb4\x7c\xa9\xf1\x0a\xbf\x48\x15\x64\xae\xe1", + "\xf2\xf6\xd8\xb6\xab\x69\xc8\xe1\x00\x39\xb5\x75\x4f\x55\x37", 0, 0, + 128 }, + { 0, 0, 0, 0, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0 } +}; diff --git a/test/wycheproof/aes_cmac_test.json.c b/test/wycheproof/aes_cmac_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..911556f0a324cc2d7bfb360055c207c0f3b39723 --- /dev/null +++ b/test/wycheproof/aes_cmac_test.json.c @@ -0,0 +1,1774 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* AES-CMAC, 0.8r12 */ +#include "mac_test.h" +const struct mac_test aes_cmac_test_json[] = { + { 128, 128, 1, + "\xe3\x4f\x15\xc7\xbd\x81\x99\x30\xfe\x9d\x66\xe0\xc1\x66\xe6\x1c", + "", + "\xd4\x7a\xfc\xa1\xd8\x57\xa5\x93\x34\x05\xb1\xeb\x7a\x5c\xb7\xaf", 1, + 0, NULL, 0 }, + { 128, 128, 2, + "\xe1\xe7\x26\x67\x7f\x48\x93\x89\x0f\x8c\x02\x7f\x9d\x8e\xf8\x0d", + "\x3f", + "\x15\xf8\x56\xbb\xed\x3b\x32\x19\x52\xa5\x84\xb3\xc4\x43\x7a\x63", 1, + 8, NULL, 0 }, + { 128, 128, 3, + "\xb1\x51\xf4\x91\xc4\xc0\x06\xd1\xf2\x82\x14\xaa\x3d\xa9\xa9\x85", + "\x27\xd9", + "\xbd\xbb\xeb\xac\x98\x2d\xd6\x2b\x9f\x68\x26\x18\xa6\xa6\x04\xe9", 1, + 16, NULL, 0 }, + { 128, 128, 4, + "\xc3\x6f\xf1\x5f\x72\x77\x7e\xe2\x1d\xee\xc0\x7b\x63\xc1\xa0\xcd", + "\x50\xb4\x28", + "\xbe\x0c\x3e\xde\x15\x75\x68\xaf\x39\x40\x23\xeb\x9a\x7c\xc9\x83", 1, + 24, NULL, 0 }, + { 128, 128, 5, + "\x32\xb9\xc5\xc7\x8c\x3a\x06\x89\xa8\x60\x52\x42\x0f\xa1\xe8\xfc", + "\x0b\x92\x62\xec", + "\x57\xe1\x50\x68\x56\xc5\x5d\xd3\x2c\xd9\xca\x82\x1a\xdb\x6c\x81", 1, + 32, NULL, 0 }, + { 128, 128, 6, + "\x43\x15\x1b\xba\xef\x36\x72\x77\xeb\xfc\x97\x50\x9d\x0a\xa4\x9c", + "\xea\xa9\x12\x73\xe7", + "\xe0\x1a\xdc\x3b\xe6\xa7\x62\x18\x24\x23\x2c\x42\x85\xdd\x35\xb9", 1, + 40, NULL, 0 }, + { 128, 128, 7, + "\x48\x14\x40\x29\x85\x25\xcc\x26\x1f\x81\x59\x15\x9a\xed\xf6\x2d", + "\x61\x23\xc5\x56\xc5\xcc", + "\xa2\x81\xe0\xd2\xd5\x37\x8d\xfd\xcc\x13\x10\xfd\x97\x82\xca\x56", 1, + 48, NULL, 0 }, + { 128, 128, 8, + "\x9c\xa2\x6e\xb8\x87\x31\xef\xbf\x7f\x81\x0d\x5d\x95\xe1\x96\xac", + "\x7e\x48\xf0\x61\x83\xaa\x40", + "\xfc\x81\x76\x1f\x2f\x7b\x4c\xe1\x3b\x53\xd3\x6e\x32\x67\x73\x32", 1, + 56, NULL, 0 }, + { 128, 128, 9, + "\x48\xf0\xd0\x3e\x41\xcc\x55\xc4\xb5\x8f\x73\x7b\x5a\xcd\xea\x32", + "\xf4\xa1\x33\xaa\x6d\x59\x85\xa0", + "\x1f\x1c\xd0\x32\x7c\x02\xe6\xd0\x00\x86\x91\x59\x37\xdd\x61\xd9", 1, + 64, NULL, 0 }, + { 128, 128, 10, + "\x1c\x95\x88\x49\xf3\x19\x96\xb2\x89\x39\xce\x51\x30\x87\xd1\xbe", + "\xb0\xd2\xfe\xe1\x1b\x8e\x2f\x86\xb7", + "\x55\x5f\x46\x21\x51\xf7\xdd\x16\xde\x69\x8d\x63\x9f\xb2\x67\x60", 1, + 72, NULL, 0 }, + { 128, 128, 11, + "\x39\xde\x0e\xbe\xa9\x7c\x09\xb2\x30\x1a\x90\x00\x9a\x42\x32\x53", + "\x81\xe5\xc3\x3b\x4c\x62\x08\x52\xf0\x44", + "\x9b\x00\x4f\x15\xb7\xf6\xf3\x66\x37\x49\x54\xe6\x4b\xc5\x8f\x5f", 1, + 80, NULL, 0 }, + { 128, 128, 12, + "\x91\x65\x6d\x8f\xc0\xac\xed\x60\xdd\xb1\xc4\x00\x6d\x0d\xde\x53", + "\x7b\x3e\x44\x0f\xe5\x66\x79\x00\x64\xb2\xec", + "\x76\x67\x2e\xd1\x6c\x29\xbe\x44\x9e\x0c\x80\x78\x5c\xc3\x8e\x89", 1, + 88, NULL, 0 }, + { 128, 128, 13, + "\xaf\x7d\x51\x34\x72\x0b\x53\x86\x15\x8d\x51\xea\x12\x6e\x7c\xf9", + "\x7c\xc6\xfc\xc9\x25\xc2\x0f\x3c\x83\xb5\x56\x7c", + "\x2d\xc5\xc8\x8c\xf3\xb8\x0a\xb6\xc0\x19\x9f\x40\xbe\x90\x4a\xbc", 1, + 96, NULL, 0 }, + { 128, 128, 14, + "\x4e\xd5\x67\x53\xde\x6f\x75\xa0\x32\xeb\xab\xca\x3c\xe2\x79\x71", + "\x0c\x8c\x0f\x56\x19\xd9\xf8\xda\x53\x39\x28\x12\x85", + "\xea\xb4\x36\x6d\x97\xe9\x9a\x08\x50\xf0\x77\x32\x9a\xd0\x58\xc0", 1, + 104, NULL, 0 }, + { 128, 128, 15, + "\xbe\xba\x50\xc9\x36\xb6\x96\xc1\x5e\x25\x04\x6d\xff\xb2\x3a\x64", + "\x82\x1e\xa8\x53\x2f\xba\xbf\xfb\x6e\x3d\x21\x2e\x9b\x46", + "\x22\xf3\x3c\xab\x09\xc1\x73\xf7\x5d\x34\x01\xfe\x44\xef\xee\xad", 1, + 112, NULL, 0 }, + { 128, 128, 16, + "\x50\x1d\x81\xeb\xf9\x12\xdd\xb8\x7f\xbe\x3b\x7a\xac\x14\x37\xbc", + "\x23\x68\xe3\xc3\x63\x6b\x5e\x8e\x94\xd2\x08\x1a\xdb\xf7\x98", + "\xae\xb7\x84\xa3\x82\x51\x68\xdd\xd6\x1f\x72\xd0\x20\x21\x25\xe6", 1, + 120, NULL, 0 }, + { 128, 128, 17, + "\xe0\x9e\xaa\x5a\x3f\x5e\x56\xd2\x79\xd5\xe7\xa0\x33\x73\xf6\xea", + "\xef\x4e\xab\x37\x18\x1f\x98\x42\x3e\x53\xe9\x47\xe7\x05\x0f\xd0", + "\x40\xfa\xcf\x0e\x2f\xb5\x1b\x73\xa7\x47\x26\x81\xb0\x33\xd6\xdc", 1, + 128, NULL, 0 }, + { 128, 128, 18, + "\x83\x1e\x66\x4c\x9e\x3f\x0c\x30\x94\xc0\xb2\x7b\x9d\x90\x8e\xb2", + "\x26\x60\x3b\xb7\x6d\xd0\xa0\x18\x07\x91\xc4\xed\x4d\x3b\x05\x88" + "\x07", + "\xa8\x14\x4c\x8b\x24\xf2\xaa\x47\xd9\xc1\x60\xcf\xf4\xab\x17\x16", 1, + 136, NULL, 0 }, + { 128, 128, 19, + "\x54\x9b\xd2\x82\xee\x21\xb4\xd7\xc3\xb1\xd0\x2e\x3e\xe2\x0e\xf7", + "\xd8\x4b\xf7\x3c\x5e\xec\xbd\x38\x44\x4f\x1a\x73\x55\x6e\x2f\xa3\x25" + "\x3f\x4c\x54\xd6\x91\x65\x45", + "\x7e\xd4\x58\xaf\xe0\x2f\x4a\x51\x3f\x59\x71\x5b\x66\x4b\x1b\xbe", 1, + 192, NULL, 0 }, + { 128, 128, 20, + "\x9b\xd3\x90\x2e\xd0\x99\x6c\x86\x9b\x57\x22\x72\xe7\x6f\x38\x89", + "\xa7\xba\x19\xd4\x9e\xe1\xea\x02\xf0\x98\xaa\x8e\x30\xc7\x40\xd8\x93" + "\xa4\x45\x6c\xcc\x29\x40\x40\x48\x4e\xd8\xa0\x0a\x55\xf9\x3e", + "\x45\x08\x22\x18\xc2\xd0\x5e\xef\x32\x24\x7f\xeb\x11\x33\xd0\xa3", 1, + 256, NULL, 0 }, + { 128, 128, 21, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x96\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 22, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x43\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 23, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7a\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 24, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x95\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 25, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x40\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 26, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x79\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 27, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x17\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc2\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xfb\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdc\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x81\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xce\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\xda\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\x31\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\x4a\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x89\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x92\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7b\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x8a\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x91\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x78\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\xd6\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\xb2\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x0b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4d\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xae\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x97\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\xcc\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\x2f\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x16\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x19\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xc9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x4f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xaf\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x85\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc4\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1d\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x36\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9e\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1e\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x35\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9d\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x9c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\xb7\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x1f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xab", 0, + 0, NULL, 0 }, + { 128, 128, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x30", 0, + 64, NULL, 0 }, + { 128, 128, 68, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9f", 0, + 128, NULL, 0 }, + { 128, 128, 69, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xa8", 0, + 0, NULL, 0 }, + { 128, 128, 70, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x33", 0, + 64, NULL, 0 }, + { 128, 128, 71, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x9c", 0, + 128, NULL, 0 }, + { 128, 128, 72, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\xea", 0, + 0, NULL, 0 }, + { 128, 128, 73, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\x71", 0, + 64, NULL, 0 }, + { 128, 128, 74, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\xde", 0, + 128, NULL, 0 }, + { 128, 128, 75, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\x56\x4c\x39\xae\x7d\x1c\x5a\x31\x2a", 0, + 0, NULL, 0 }, + { 128, 128, 76, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\x32\xaf\xe9\x84\x44\x37\x38\xcd\xb1", 0, + 64, NULL, 0 }, + { 128, 128, 77, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x8b\x96\x6f\xc5\x39\x9f\x74\x80\x1e", 0, + 128, NULL, 0 }, + { 128, 128, 78, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x96\xdd\x6e\x5a\x88\x2c\xbd\x56\x4d\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 79, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x43\x80\x2e\xb1\x93\x1f\x00\x32\xae\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 80, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7a\xcf\xbb\xca\x7a\x2e\xa6\x8b\x97\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 81, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\xda\x88\x2c\xbd\xd6\x4c\x39\xae\x7d\x1c\x5a\x31\xaa", 0, + 0, NULL, 0 }, + { 128, 128, 82, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\x31\x93\x1f\x00\xb2\xaf\xe9\x84\x44\x37\x38\xcd\x31", 0, + 64, NULL, 0 }, + { 128, 128, 83, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\x4a\x7a\x2e\xa6\x0b\x96\x6f\xc5\x39\x9f\x74\x80\x9e", 0, + 128, NULL, 0 }, + { 128, 128, 84, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x97\xdd\x6e\x5a\x88\x2c\xbd\xd6\x4c\x39\xae\x7d\x1c\x5a\x31\x2a", 0, + 0, NULL, 0 }, + { 128, 128, 85, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x42\x80\x2e\xb1\x93\x1f\x00\xb2\xaf\xe9\x84\x44\x37\x38\xcd\xb1", 0, + 64, NULL, 0 }, + { 128, 128, 86, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7b\xcf\xbb\xca\x7a\x2e\xa6\x0b\x96\x6f\xc5\x39\x9f\x74\x80\x1e", 0, + 128, NULL, 0 }, + { 128, 128, 87, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x68\x22\x91\xa5\x77\xd3\x42\xa9\xb3\xc6\x51\x82\xe3\xa5\xce\x55", 0, + 0, NULL, 0 }, + { 128, 128, 88, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xbd\x7f\xd1\x4e\x6c\xe0\xff\xcd\x50\x16\x7b\xbb\xc8\xc7\x32\xce", 0, + 64, NULL, 0 }, + { 128, 128, 89, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x84\x30\x44\x35\x85\xd1\x59\x74\x69\x90\x3a\xc6\x60\x8b\x7f\x61", 0, + 128, NULL, 0 }, + { 128, 128, 90, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, NULL, 0 }, + { 128, 128, 91, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, NULL, 0 }, + { 128, 128, 92, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, NULL, 0 }, + { 128, 128, 93, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, NULL, 0 }, + { 128, 128, 94, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, NULL, 0 }, + { 128, 128, 95, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, NULL, 0 }, + { 128, 128, 96, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x17\x5d\xee\xda\x08\xac\x3d\xd6\xcc\xb9\x2e\xfd\x9c\xda\xb1\x2a", 0, + 0, NULL, 0 }, + { 128, 128, 97, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc2\x00\xae\x31\x13\x9f\x80\xb2\x2f\x69\x04\xc4\xb7\xb8\x4d\xb1", 0, + 64, NULL, 0 }, + { 128, 128, 98, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xfb\x4f\x3b\x4a\xfa\xae\x26\x0b\x16\xef\x45\xb9\x1f\xf4\x00\x1e", 0, + 128, NULL, 0 }, + { 128, 128, 99, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", + "\x96\xdc\x6f\x5b\x89\x2d\xbc\x57\x4d\x38\xaf\x7c\x1d\x5b\x30\xab", 0, + 0, NULL, 0 }, + { 128, 128, 100, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x43\x81\x2f\xb0\x92\x1e\x01\x33\xae\xe8\x85\x45\x36\x39\xcc\x30", 0, + 64, NULL, 0 }, + { 128, 128, 101, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7a\xce\xba\xcb\x7b\x2f\xa7\x8a\x97\x6e\xc4\x38\x9e\x75\x81\x9f", 0, + 128, NULL, 0 }, + { 192, 128, 102, + "\x3d\x6b\xf9\xed\xae\x6d\x88\x1e\xad\xe0\xff\x8c\x70\x76\xa4\x83\x5b" + "\x71\x32\x0c\x1f\x36\xb6\x31", + "", + "\xa8\xdd\x15\xfe\x2c\xe3\x49\x5e\xc5\xb6\x66\x74\x4e\xc2\x92\x20", 1, + 0, NULL, 0 }, + { 192, 128, 103, + "\x91\x54\x29\x74\x34\x35\xc2\x89\x97\xa3\x3b\x33\xb6\x57\x4a\x95\x3d" + "\x81\xda\xe0\xe7\x03\x2e\x6a", + "\x58", + "\xe1\x3b\x3f\x7f\x7f\x51\x0c\x3a\x05\x9d\xf7\xa6\x8c\x7e\x2a\xd5", 1, + 8, NULL, 0 }, + { 192, 128, 104, + "\xf0\xc2\x88\xba\x26\xb2\x84\xf9\xfb\x32\x1b\x44\x4a\x65\x17\xb3\xcd" + "\xda\x1a\x79\x9d\x55\xfd\xff", + "\x0f\x7e", + "\x06\xef\x84\x7f\x5f\x9d\xbf\x03\xa4\xf2\x83\xda\x8c\x40\x02\x20", 1, + 16, NULL, 0 }, + { 192, 128, 105, + "\x6b\x55\xe4\xd4\xfd\x68\x47\xa8\x0a\x6b\xfb\x0d\xcc\x0a\xa9\x3f\x9f" + "\xd7\x97\xfc\x5c\x50\x29\x2e", + "\x33\xf5\x30", + "\xdd\x13\x50\x53\xa4\x7c\xa8\xf2\x82\xc2\x99\xe8\x3b\x8c\x57\xc4", 1, + 24, NULL, 0 }, + { 192, 128, 106, + "\x1e\xb2\x1a\x9e\x99\x5a\x8e\x45\xc9\xe7\x1e\xcb\xd6\xfe\x61\x5b\x3e" + "\x03\x18\x00\x7c\x64\xb6\x44", + "\x3a\xa7\x3c\x48", + "\x1e\x93\xff\xf8\x46\x93\x4a\x6e\xea\x05\x75\xee\xcb\x0f\x0e\x1f", 1, + 32, NULL, 0 }, + { 192, 128, 107, + "\x71\x0e\x2d\x5d\x4a\x9f\x0b\xc7\xe5\x07\x96\x65\x5e\x04\x6a\x18\xcc" + "\x57\x69\xd7\x76\x43\x55\xda", + "\x7e\x4c\x69\x0a\x88", + "\x01\x6d\x4d\xf0\x6c\x68\xa6\xa7\x88\xa9\xea\x05\x2e\x1b\x55\x0d", 1, + 40, NULL, 0 }, + { 192, 128, 108, + "\xd8\xc0\x9e\xa4\x00\x77\x9b\x63\xe7\x74\xbd\xac\xd0\xcb\x7b\x5d\xd6" + "\xf7\x36\xca\x23\xd5\x2a\xcf", + "\xe9\x52\x02\x80\x97\x3b", + "\x80\x30\xae\x9f\x98\xf5\xd2\x0c\x60\x89\xf6\xb1\xbd\x87\xc2\x9e", 1, + 48, NULL, 0 }, + { 192, 128, 109, + "\x8e\x67\xe9\xa0\x86\x3b\x55\xbe\xd4\x08\x86\x6f\x1c\xbc\x05\x35\x7a" + "\xbe\x3f\x9d\x79\xf4\x06\xf2", + "\x48\x80\xb4\x12\x28\x7a\x0b", + "\xbc\xaf\x50\x78\x5f\x06\x2a\x8f\xb8\xdd\x3c\x2c\x4c\xea\xd2\xe1", 1, + 56, NULL, 0 }, + { 192, 128, 110, + "\x28\xd8\xda\x67\x80\x64\x10\xe5\x56\x5b\xcc\x5a\x9d\x7a\xb9\xfb\x35" + "\x74\x13\xfa\x01\x58\x37\x8c", + "\x00\x4e\x3f\x4a\x4e\x6d\xb9\x55", + "\xc4\xc2\xc0\x87\x6b\xe9\xea\xbe\xb5\xa9\x56\xda\x53\x84\x6b\x08", 1, + 64, NULL, 0 }, + { 192, 128, 111, + "\xdc\x96\x8d\xd8\x9f\xd6\x02\xbb\x7e\xca\x6f\x3a\x8a\x13\xe4\xf5\x9c" + "\x08\xd0\x2a\x51\x4b\x19\x34", + "\x41\xa2\x53\x54\xef\xeb\x1b\xc3\xb8", + "\xf3\x3a\x62\xca\xf3\x97\xf9\xaf\xf7\x1f\xe4\x29\x41\xba\x41\xd8", 1, + 72, NULL, 0 }, + { 192, 128, 112, + "\x76\x58\x95\x1c\x0f\x62\x0d\x82\xaf\xd9\x27\x56\xcc\x2d\x79\x83\xb7" + "\x9d\xa3\xe5\x6f\xdd\x1b\x78", + "\xf0\xe8\x2f\xb5\xc5\x66\x6f\x4a\xf4\x9f", + "\x4d\x72\x4d\x05\xf3\x40\x29\x67\xeb\x65\xae\x1e\x32\xd5\x46\x9e", 1, + 80, NULL, 0 }, + { 192, 128, 113, + "\xd9\x57\x4c\x3a\x22\x1b\x98\x66\x90\x93\x1f\xaa\xc5\x25\x8d\x9d\x3c" + "\x52\x36\x2b\x2c\xb9\xb0\x54", + "\x17\x8e\xa8\x40\x4b\xa5\x4e\xe4\xe4\x52\x2c", + "\x64\xa0\xe0\xb6\x75\x73\x09\xab\x58\xd7\x4f\x72\xc3\x10\xe4\x73", 1, + 88, NULL, 0 }, + { 192, 128, 114, + "\x70\x44\x09\xba\xb2\x80\x85\xc4\x49\x81\xf2\x8f\x75\xdd\x14\x3a\x4f" + "\x74\x71\x06\xf6\x3f\x26\x2e", + "\xcd\xa5\x70\x9e\x7f\x11\x56\x24\xe7\x4a\xb0\x31", + "\x6a\xb2\x07\x43\x34\xbe\x14\xa9\x5b\x6a\x24\x1f\x89\x7a\x43\xde", 1, + 96, NULL, 0 }, + { 192, 128, 115, + "\xd8\xd0\x6e\xf6\xa5\x3b\xbf\xf5\xc8\xf1\x2d\x79\x1b\x8f\x4c\x67\xe5" + "\x74\xbf\x44\x07\x36\xd1\xcc", + "\xa1\x17\x1e\xae\x19\x79\xf4\x83\x45\xdd\x94\x85\xa0", + "\x7a\xa5\x7c\xf9\x8b\x24\x89\x7c\xc9\x23\x0e\x33\x16\x75\x8e\x61", 1, + 104, NULL, 0 }, + { 192, 128, 116, + "\x71\x12\x9e\x78\x16\x13\xf3\x9d\x9a\xc3\x9f\xbd\xe2\x62\x8b\x44\xc2" + "\x50\xc1\x4d\xeb\x5e\xf9\xe2", + "\x96\x75\x93\xcc\x64\xbc\xbf\x7f\x3c\x58\xd0\x4c\xb8\x2b", + "\x6c\xc4\x88\xb0\xa4\x0e\xad\xbe\x4b\xce\xe2\x62\x32\x39\xd1\x26", 1, + 112, NULL, 0 }, + { 192, 128, 117, + "\x85\x0f\xc8\x59\xe9\xf7\xb8\x9a\x36\x76\x11\xde\xe6\x69\x8f\x33\x96" + "\x2d\x82\x45\xca\x8d\xc3\x31", + "\x58\x6f\x4f\x17\x1a\xf1\x16\x51\x90\x61\xa8\xe0\xe7\x79\x40", + "\xfb\x11\xa3\x60\xc9\x77\x69\x91\xd7\x3d\x6e\x41\xd0\x77\x10\xa2", 1, + 120, NULL, 0 }, + { 192, 128, 118, + "\xf4\xbf\xa5\xaa\x4f\x0f\x4d\x62\xcf\x73\x6c\xd2\x96\x9c\x43\xd5\x80" + "\xfd\xb9\x2f\x27\x53\xbe\xdb", + "\x0e\x23\x9f\x23\x97\x05\xb2\x82\xce\x22\x00\xfe\x20\xde\x11\x65", + "\xab\x20\xa6\xcf\x60\x87\x36\x65\xb1\xd6\x99\x9b\x05\xc7\xf9\xc6", 1, + 128, NULL, 0 }, + { 192, 128, 119, + "\xcf\xd3\xf6\x88\x73\xd8\x1a\x27\xd2\xbf\xce\x87\x6c\x79\xf6\xe6\x09" + "\x07\x4d\xec\x39\xe3\x46\x14", + "\xb1\x97\x3c\xb2\x5a\xa8\x7e\xf9\xd1\xa8\x88\x8b\x0a\x0f\x5c\x04" + "\xc6", + "\xb9\x5a\x01\x6b\x83\xa0\xae\x41\x94\x02\x33\x33\xc8\xa7\x34\x5a", 1, + 136, NULL, 0 }, + { 192, 128, 120, + "\x64\x8a\x44\x46\x8d\x67\xbb\x67\x44\xb2\x35\xee\x7a\x3f\xcd\x6e\xd4" + "\xbd\xc2\x9e\xc5\xb5\xfa\x1a", + "\xc5\x9d\x0d\x69\x81\xcc\xa1\xbe\x1d\x55\x19\xfc\x78\x81\xe6\xd2\x30" + "\xf3\x9f\x6c\x12\xa9\xe8\x27", + "\xa1\xb9\x62\x72\xae\x7f\x9a\xef\x56\x72\x71\x79\x5f\x21\xd1\xd3", 1, + 192, NULL, 0 }, + { 192, 128, 121, + "\x9d\x11\xab\xc1\xfc\xb2\x48\xa4\x36\x59\x8e\x69\x5b\xe1\x2c\x3c\x2e" + "\xd9\x0a\x18\xba\x09\xd6\x2c", + "\xaa\x51\x82\xca\xe2\xa8\xfb\x06\x8c\x0b\x3f\xb2\xbe\x3e\x57\xae\x52" + "\x3d\x13\xdf\xfd\x1a\x94\x45\x87\x70\x7c\x2b\x67\x44\x7f\x3f", + "\x85\x97\xd9\xa0\x4d\x1c\x27\x1d\x61\xd4\x2f\x00\x7b\x43\x51\x75", 1, + 256, NULL, 0 }, + { 192, 128, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xed\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc8\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf9\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xee\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xcb\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xfa\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\x6c\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x49\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x78\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x13\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x12\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1a\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x8e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\x5f\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x80\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 137, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa1\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 138, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x61\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 139, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x68\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 140, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa2\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 141, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x62\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 142, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x6b\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 143, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x95\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 144, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\x4b\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 145, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x09\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 146, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd8\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 147, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x90\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 148, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x56\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 149, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\x59\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 150, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x11\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 151, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\xd7\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xf3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x3c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x1b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7b\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7a\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf1\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xcb\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1a\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7d\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 161, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xc8\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 162, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x19\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 163, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7e\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 164, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\x4a\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 165, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x9b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 166, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\xfc\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 167, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x91", 0, + 0, NULL, 0 }, + { 192, 128, 168, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x91", 0, + 64, NULL, 0 }, + { 192, 128, 169, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8d", 0, + 128, NULL, 0 }, + { 192, 128, 170, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x92", 0, + 0, NULL, 0 }, + { 192, 128, 171, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x92", 0, + 64, NULL, 0 }, + { 192, 128, 172, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x8e", 0, + 128, NULL, 0 }, + { 192, 128, 173, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\xd0", 0, + 0, NULL, 0 }, + { 192, 128, 174, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\xd0", 0, + 64, NULL, 0 }, + { 192, 128, 175, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\xcc", 0, + 128, NULL, 0 }, + { 192, 128, 176, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x15\xd9\xd3\x7a\x6e\xca\x1f\xc9\x10", 0, + 0, NULL, 0 }, + { 192, 128, 177, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\xcb\x91\x1c\x7b\xe6\x1b\xe7\xca\x10", 0, + 64, NULL, 0 }, + { 192, 128, 178, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x89\x57\x3b\xf0\x4e\x7c\xde\x68\x0c", 0, + 128, NULL, 0 }, + { 192, 128, 179, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xed\x12\x39\x0e\xa0\xa7\xed\x15\xd8\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 180, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc8\x13\x07\xdf\x60\x85\x9a\xcb\x90\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 181, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf9\x1b\xde\x00\x69\xa6\xe3\x89\x56\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 182, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x8e\xa0\xa7\xed\x95\xd9\xd3\x7a\x6e\xca\x1f\xc9\x90", 0, + 0, NULL, 0 }, + { 192, 128, 183, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\x5f\x60\x85\x9a\x4b\x91\x1c\x7b\xe6\x1b\xe7\xca\x90", 0, + 64, NULL, 0 }, + { 192, 128, 184, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x80\x69\xa6\xe3\x09\x57\x3b\xf0\x4e\x7c\xde\x68\x8c", 0, + 128, NULL, 0 }, + { 192, 128, 185, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xec\x12\x39\x0e\xa0\xa7\xed\x95\xd9\xd3\x7a\x6e\xca\x1f\xc9\x10", 0, + 0, NULL, 0 }, + { 192, 128, 186, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc9\x13\x07\xdf\x60\x85\x9a\x4b\x91\x1c\x7b\xe6\x1b\xe7\xca\x10", 0, + 64, NULL, 0 }, + { 192, 128, 187, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf8\x1b\xde\x00\x69\xa6\xe3\x09\x57\x3b\xf0\x4e\x7c\xde\x68\x0c", 0, + 128, NULL, 0 }, + { 192, 128, 188, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\x13\xed\xc6\xf1\x5f\x58\x12\xea\x26\x2c\x85\x91\x35\xe0\x36\x6f", 0, + 0, NULL, 0 }, + { 192, 128, 189, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x36\xec\xf8\x20\x9f\x7a\x65\x34\x6e\xe3\x84\x19\xe4\x18\x35\x6f", 0, + 64, NULL, 0 }, + { 192, 128, 190, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x07\xe4\x21\xff\x96\x59\x1c\x76\xa8\xc4\x0f\xb1\x83\x21\x97\x73", 0, + 128, NULL, 0 }, + { 192, 128, 191, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, NULL, 0 }, + { 192, 128, 192, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, NULL, 0 }, + { 192, 128, 193, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, NULL, 0 }, + { 192, 128, 194, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, NULL, 0 }, + { 192, 128, 195, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, NULL, 0 }, + { 192, 128, 196, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, NULL, 0 }, + { 192, 128, 197, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\x6c\x92\xb9\x8e\x20\x27\x6d\x95\x59\x53\xfa\xee\x4a\x9f\x49\x10", 0, + 0, NULL, 0 }, + { 192, 128, 198, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x49\x93\x87\x5f\xe0\x05\x1a\x4b\x11\x9c\xfb\x66\x9b\x67\x4a\x10", 0, + 64, NULL, 0 }, + { 192, 128, 199, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x78\x9b\x5e\x80\xe9\x26\x63\x09\xd7\xbb\x70\xce\xfc\x5e\xe8\x0c", 0, + 128, NULL, 0 }, + { 192, 128, 200, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "", + "\xed\x13\x38\x0f\xa1\xa6\xec\x14\xd8\xd2\x7b\x6f\xcb\x1e\xc8\x91", 0, + 0, NULL, 0 }, + { 192, 128, 201, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xc8\x12\x06\xde\x61\x84\x9b\xca\x90\x1d\x7a\xe7\x1a\xe6\xcb\x91", 0, + 64, NULL, 0 }, + { 192, 128, 202, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf9\x1a\xdf\x01\x68\xa7\xe2\x88\x56\x3a\xf1\x4f\x7d\xdf\x69\x8d", 0, + 128, NULL, 0 }, + { 256, 128, 203, + "\x7b\xf9\xe5\x36\xb6\x6a\x21\x5c\x22\x23\x3f\xe2\xda\xaa\x74\x3a\x89" + "\x8b\x9a\xcb\x9f\x78\x02\xde\x70\xb4\x0e\x3d\x6e\x43\xef\x97", + "", + "\x73\x6c\x7b\x56\x95\x7d\xb7\x74\xc5\xdd\xf7\xc7\xa7\x0b\xa8\xa8", 1, + 0, NULL, 0 }, + { 256, 128, 204, + "\xe7\x54\x07\x6c\xea\xb3\xfd\xaf\x4f\x9b\xca\xb7\xd4\xf0\xdf\x0c\xbb" + "\xaf\xbc\x87\x73\x1b\x8f\x9b\x7c\xd2\x16\x64\x72\xe8\xee\xbc", + "\x40", + "\x9d\x47\x48\x2c\x2d\x92\x52\xba\xce\x43\xa7\x5a\x83\x35\xb8\xb8", 1, + 8, NULL, 0 }, + { 256, 128, 205, + "\xea\x3b\x01\x6b\xdd\x38\x7d\xd6\x4d\x83\x7c\x71\x68\x38\x08\xf3\x35" + "\xdb\xdc\x53\x59\x8a\x4e\xa8\xc5\xf9\x52\x47\x3f\xaf\xaf\x5f", + "\x66\x01", + "\xc7\xc4\x4e\x31\xc4\x66\x33\x49\x92\xd6\xf9\xde\x3c\x77\x16\x34", 1, + 16, NULL, 0 }, + { 256, 128, 206, + "\x73\xd4\x70\x96\x37\x85\x7d\xaf\xab\x6a\xd8\xb2\xb0\xa5\x1b\x06\x52" + "\x47\x17\xfe\xdf\x10\x02\x96\x64\x4f\x7c\xfd\xaa\xe1\x80\x5b", + "\xf1\xd3\x00", + "\xb7\x08\x66\x03\xa8\x5e\x11\xfc\xeb\x8c\xad\xea\x9b\xd3\x09\x39", 1, + 24, NULL, 0 }, + { 256, 128, 207, + "\xd5\xc8\x1b\x39\x9d\x4c\x0d\x15\x83\xa1\x3d\xa5\x6d\xe6\xd2\xdc\x45" + "\xa6\x6e\x7b\x47\xc2\x4a\xb1\x19\x2e\x24\x6d\xc9\x61\xdd\x77", + "\x2a\xe6\x3c\xbf", + "\xba\x38\x3a\x3a\x15\xc9\xdf\x64\xbb\xa5\x0d\x61\x11\x13\xa0\x24", 1, + 32, NULL, 0 }, + { 256, 128, 208, + "\x25\x21\x20\x3f\xa0\xdd\xdf\x59\xd8\x37\xb2\x83\x0f\x87\xb1\xaa\x61" + "\xf9\x58\x15\x5d\xf3\xca\x4d\x1d\xf2\x45\x7c\xb4\x28\x4d\xc8", + "\xaf\x3a\x01\x5e\xa1", + "\xb4\x57\x13\x7c\x54\x89\x08\xc6\x29\xf7\x14\xfe\x83\xb1\xed\x90", 1, + 40, NULL, 0 }, + { 256, 128, 209, + "\x66\x5a\x02\xbc\x26\x5a\x66\xd0\x17\x75\x09\x1d\xa5\x67\x26\xb6\x66" + "\x8b\xfd\x90\x3c\xb7\xaf\x66\xfb\x1b\x78\xa8\xa0\x62\xe4\x3c", + "\x3f\x56\x93\x5d\xef\x3f", + "\xb6\xd6\xfd\xe9\x3f\xc8\x5d\xe2\x89\xb3\x6b\x44\x6d\x77\xb4\x23", 1, + 48, NULL, 0 }, + { 256, 128, 210, + "\xfa\xcd\x75\xb2\x22\x21\x38\x00\x47\x30\x5b\xc9\x81\xf5\x70\xe2\xa1" + "\xaf\x38\x92\x8e\xa7\xe2\x05\x9e\x3a\xf5\xfc\x6b\x82\xb4\x93", + "\x57\xbb\x86\xbe\xed\x15\x6f", + "\x8b\x1e\xf7\x2d\x0a\x61\x27\x35\xb0\x8e\xfe\xf9\x81\xf2\x13\xc2", 1, + 56, NULL, 0 }, + { 256, 128, 211, + "\x50\x5a\xa9\x88\x19\x80\x9e\xf6\x3b\x9a\x36\x8a\x1e\x8b\xc2\xe9\x22" + "\xda\x45\xb0\x3c\xe0\x2d\x9a\x79\x66\xb1\x50\x06\xdb\xa2\xd5", + "\x2e\x4e\x7e\xf7\x28\xfe\x11\xaf", + "\xf7\x96\x06\xb8\x3a\x77\x06\xa2\xa1\x9e\x06\x8b\xce\x81\x88\x98", 1, + 64, NULL, 0 }, + { 256, 128, 212, + "\xf9\x42\x09\x38\x42\x80\x8b\xa4\x7f\x64\xe4\x27\xf7\x35\x1d\xde\x6b" + "\x95\x46\xe6\x6d\xe4\xe7\xd6\x0a\xa6\xf3\x28\x18\x27\x12\xcf", + "\x85\x2a\x21\xd9\x28\x48\xe6\x27\xc7", + "\xa5\xa8\x77\xf2\x2a\xc7\x43\xb7\xfb\x9e\x05\x0d\x2e\x3d\xdb\x02", 1, + 72, NULL, 0 }, + { 256, 128, 213, + "\x64\xbe\x16\x2b\x39\xc6\xe5\xf1\xfe\xd9\xc3\x2d\x9f\x67\x4d\x9a\x8c" + "\xde\x6e\xaa\x24\x43\x21\x4d\x86\xbd\x4a\x1f\xb5\x3b\x81\xb4", + "\x19\x5a\x3b\x29\x2f\x93\xba\xff\x0a\x2c", + "\x6e\xa1\x72\xe5\xc4\xd2\xfa\xc0\x75\xca\x60\x2d\xe5\x75\x7a\x62", 1, + 80, NULL, 0 }, + { 256, 128, 214, + "\xb2\x59\xa5\x55\xd4\x4b\x8a\x20\xc5\x48\x9e\x2f\x38\x39\x2d\xda\xa6" + "\xbe\x9e\x35\xb9\x83\x3b\x67\xe1\xb5\xfd\xf6\xcb\x3e\x4c\x6c", + "\xaf\xd7\x31\x17\x33\x0c\x6e\x85\x28\xa6\xe4", + "\x68\x02\x0b\xfc\x9b\xd7\x3f\xd8\x0d\x3c\xe5\x81\xba\x3b\x12\x08", 1, + 88, NULL, 0 }, + { 256, 128, 215, + "\x2c\x6f\xc6\x2d\xaa\x77\xba\x8c\x68\x81\xb3\xdd\x69\x89\x89\x8f\xef" + "\x64\x66\x63\xcc\x7b\x0a\x3d\xb8\x22\x8a\x70\x7b\x85\xf2\xdc", + "\x0f\xf5\x4d\x6b\x67\x59\x12\x0c\x2e\x8a\x51\xe3", + "\x11\x0e\xdd\x72\x7a\x9b\xf7\xfa\x11\xa6\x35\x8a\xfe\x61\x7d\x9d", 1, + 96, NULL, 0 }, + { 256, 128, 216, + "\xab\xab\x81\x5d\x51\xdf\x29\xf7\x40\xe4\xe2\x07\x9f\xb7\x98\xe0\x15" + "\x28\x36\xe6\xab\x57\xd1\x53\x6a\xe8\x92\x9e\x52\xc0\x6e\xb8", + "\xf0\x05\x8d\x41\x2a\x10\x4e\x53\xd8\x20\xb9\x5a\x7f", + "\x1f\xa2\x4c\x66\x25\xa0\xf8\xe1\xfc\x37\x82\x7a\xc8\x4d\x3c\xc4", 1, + 104, NULL, 0 }, + { 256, 128, 217, + "\x3d\x5d\xa1\xaf\x83\xf7\x28\x74\x58\xbf\xf7\xa7\x65\x1e\xa5\xd8\xdb" + "\x72\x25\x94\x01\x33\x3f\x6b\x82\x09\x69\x96\xdd\x7e\xaf\x19", + "\xaa\xcc\x36\x97\x2f\x18\x30\x57\x91\x9f\xf5\x7b\x49\xe1", + "\x86\x87\x65\xa8\xfa\x6a\xa8\x98\xdd\xec\x0f\x41\x23\xe9\x96\xbe", 1, + 112, NULL, 0 }, + { 256, 128, 218, + "\xc1\x9b\xdf\x31\x4c\x6c\xf6\x43\x81\x42\x54\x67\xf4\x2a\xef\xa1\x7c" + "\x1c\xc9\x35\x8b\xe1\x6c\xe3\x1b\x1d\x21\x48\x59\xce\x86\xaa", + "\x5d\x06\x6a\x92\xc3\x00\xe9\xb6\xdd\xd6\x3a\x7c\x13\xae\x33", + "\xb9\x68\x18\xb7\xac\xaf\x87\x9c\x7a\x7f\x82\x71\x37\x5a\x69\x14", 1, + 120, NULL, 0 }, + { 256, 128, 219, + "\x61\x2e\x83\x78\x43\xce\xae\x7f\x61\xd4\x96\x25\xfa\xa7\xe7\x49\x4f" + "\x92\x53\xe2\x0c\xb3\xad\xce\xa6\x86\x51\x2b\x04\x39\x36\xcd", + "\xcc\x37\xfa\xe1\x5f\x74\x5a\x2f\x40\xe2\xc8\xb1\x92\xf2\xb3\x8d", + "\x4b\x88\xe1\x93\x00\x0c\x5a\x4b\x23\xe9\x5c\x7f\x2b\x26\x53\x0b", 1, + 128, NULL, 0 }, + { 256, 128, 220, + "\x73\x21\x6f\xaf\xd0\x02\x2d\x0d\x6e\xe2\x71\x98\xb2\x27\x25\x78\xfa" + "\x8f\x04\xdd\x9f\x44\x46\x7f\xbb\x64\x37\xaa\x45\x64\x1b\xf7", + "\xd5\x24\x7b\x8f\x6c\x3e\xdc\xbf\xb1\xd5\x91\xd1\x3e\xce\x23\xd2" + "\xf5", + "\x86\x91\x1c\x7d\xa5\x1d\xc0\x82\x3d\x6e\x93\xd4\x29\x0d\x1a\xd4", 1, + 136, NULL, 0 }, + { 256, 128, 221, + "\x04\x27\xa7\x0e\x25\x75\x28\xf3\xab\x70\x64\x0b\xba\x1a\x5d\xe1\x2c" + "\xf3\x88\x5d\xd4\xc8\xe2\x84\xfb\xbb\x55\xfe\xb3\x52\x94\xa5", + "\x13\x93\x7f\x85\x44\xf4\x42\x70\xd0\x11\x75\xa0\x11\xf7\x67\x0e\x93" + "\xfa\x6b\xa7\xef\x02\x33\x6e", + "\xcc\xb2\xc5\x1b\xfb\xe2\x59\x8f\x91\x09\xfc\x70\xed\x07\xf0\xeb", 1, + 192, NULL, 0 }, + { 256, 128, 222, + "\x96\xe1\xe4\x89\x6f\xb2\xcd\x05\xf1\x33\xa6\xa1\x00\xbc\x56\x09\xa7" + "\xac\x3c\xa6\xd8\x17\x21\xe9\x22\xda\xdd\x69\xad\x07\xa8\x92", + "\x91\xa1\x7e\x4d\xfc\xc3\x16\x6a\x1a\xdd\x26\xff\x0e\x7c\x12\x05\x6e" + "\x8a\x65\x4f\x28\xa6\xde\x24\xf4\xba\x73\x9c\xeb\x5b\x5b\x18", + "\x92\x5f\x17\x7d\x85\xea\x29\x7e\xf1\x4b\x20\x3f\xe4\x09\xf9\xab", 1, + 256, NULL, 0 }, + { 256, 128, 223, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6a\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 224, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd7\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 225, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 226, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x69\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 227, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd4\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 228, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x5b\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 229, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xeb\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 230, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x56\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 231, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 232, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf1\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 233, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x08\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 234, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xef\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 235, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x13\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 236, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\xfc\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 237, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\xbb\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 238, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd9\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 239, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3b\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 240, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5e\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 241, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xda\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 242, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x38\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 243, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5d\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 244, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x90\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 245, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\x22\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 246, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x10\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 247, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1e\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 248, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xeb\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 249, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xcb\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 250, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x9f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 251, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\x6a\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 252, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\x4a\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 253, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x20\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 254, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x00\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 255, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xc2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 256, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x88\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 257, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0a\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 258, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6c\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 259, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x77\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 260, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7c\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 261, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x28\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 262, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x74\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 263, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7f\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 264, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x2b\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 265, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\xf6\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 266, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\xfd\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 267, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\xa9\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 268, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xfa", 0, + 0, NULL, 0 }, + { 256, 128, 269, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xed", 0, + 64, NULL, 0 }, + { 256, 128, 270, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2c", 0, + 128, NULL, 0 }, + { 256, 128, 271, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xf9", 0, + 0, NULL, 0 }, + { 256, 128, 272, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xee", 0, + 64, NULL, 0 }, + { 256, 128, 273, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x2f", 0, + 128, NULL, 0 }, + { 256, 128, 274, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\xbb", 0, + 0, NULL, 0 }, + { 256, 128, 275, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\xac", 0, + 64, NULL, 0 }, + { 256, 128, 276, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\x6d", 0, + 128, NULL, 0 }, + { 256, 128, 277, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x10\x1f\x00\x89\x72\x76\x91\xb7\x7b", 0, + 0, NULL, 0 }, + { 256, 128, 278, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\xa2\xea\x20\x0b\x29\x7d\x2a\xcc\x6c", 0, + 64, NULL, 0 }, + { 256, 128, 279, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x90\xca\xe2\x6d\xad\x29\xbb\xa3\xad", 0, + 128, NULL, 0 }, + { 256, 128, 280, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6a\xf0\xa2\x93\xd8\xcb\xa0\x10\x1e\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 281, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd7\x09\x71\x7c\x3a\x4e\xf8\xa2\xeb\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 282, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\xee\x3f\x3b\x5f\x83\xe2\x90\xcb\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 283, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x13\xd8\xcb\xa0\x90\x1f\x00\x89\x72\x76\x91\xb7\xfb", 0, + 0, NULL, 0 }, + { 256, 128, 284, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\xfc\x3a\x4e\xf8\x22\xea\x20\x0b\x29\x7d\x2a\xcc\xec", 0, + 64, NULL, 0 }, + { 256, 128, 285, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\xbb\x5f\x83\xe2\x10\xca\xe2\x6d\xad\x29\xbb\xa3\x2d", 0, + 128, NULL, 0 }, + { 256, 128, 286, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6b\xf0\xa2\x93\xd8\xcb\xa0\x90\x1f\x00\x89\x72\x76\x91\xb7\x7b", 0, + 0, NULL, 0 }, + { 256, 128, 287, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd6\x09\x71\x7c\x3a\x4e\xf8\x22\xea\x20\x0b\x29\x7d\x2a\xcc\x6c", 0, + 64, NULL, 0 }, + { 256, 128, 288, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x59\xee\x3f\x3b\x5f\x83\xe2\x10\xca\xe2\x6d\xad\x29\xbb\xa3\xad", 0, + 128, NULL, 0 }, + { 256, 128, 289, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x94\x0f\x5d\x6c\x27\x34\x5f\xef\xe0\xff\x76\x8d\x89\x6e\x48\x04", 0, + 0, NULL, 0 }, + { 256, 128, 290, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x29\xf6\x8e\x83\xc5\xb1\x07\x5d\x15\xdf\xf4\xd6\x82\xd5\x33\x13", 0, + 64, NULL, 0 }, + { 256, 128, 291, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa6\x11\xc0\xc4\xa0\x7c\x1d\x6f\x35\x1d\x92\x52\xd6\x44\x5c\xd2", 0, + 128, NULL, 0 }, + { 256, 128, 292, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, NULL, 0 }, + { 256, 128, 293, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, NULL, 0 }, + { 256, 128, 294, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, NULL, 0 }, + { 256, 128, 295, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, NULL, 0 }, + { 256, 128, 296, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, NULL, 0 }, + { 256, 128, 297, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, NULL, 0 }, + { 256, 128, 298, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xeb\x70\x22\x13\x58\x4b\x20\x90\x9f\x80\x09\xf2\xf6\x11\x37\x7b", 0, + 0, NULL, 0 }, + { 256, 128, 299, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x56\x89\xf1\xfc\xba\xce\x78\x22\x6a\xa0\x8b\xa9\xfd\xaa\x4c\x6c", 0, + 64, NULL, 0 }, + { 256, 128, 300, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\x6e\xbf\xbb\xdf\x03\x62\x10\x4a\x62\xed\x2d\xa9\x3b\x23\xad", 0, + 128, NULL, 0 }, + { 256, 128, 301, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x6a\xf1\xa3\x92\xd9\xca\xa1\x11\x1e\x01\x88\x73\x77\x90\xb6\xfa", 0, + 0, NULL, 0 }, + { 256, 128, 302, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xd7\x08\x70\x7d\x3b\x4f\xf9\xa3\xeb\x21\x0a\x28\x7c\x2b\xcd\xed", 0, + 64, NULL, 0 }, + { 256, 128, 303, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\xef\x3e\x3a\x5e\x82\xe3\x91\xcb\xe3\x6c\xac\x28\xba\xa2\x2c", 0, + 128, NULL, 0 }, + { 0, 128, 304, "", "\x00\xb9\x44\x93\x26\xd3\x94\x16", "", 0, 64, NULL, + 0 }, + { 8, 128, 305, "\x0f", "\x45\x38\xb7\x9a\x13\x97\xe2\xaa", "", 0, 64, + NULL, 0 }, + { 64, 128, 306, "\xa8\x8e\x38\x5a\xf7\x18\x51\x48", + "\xdc\x63\xb7\xef\x08\x09\x6e\x4f", "", 0, 64, NULL, 0 }, + { 160, 128, 307, + "\x00\x3a\x22\x80\x08\xd3\x90\xb6\x45\x92\x9d\xf7\x3a\x2b\x2b\xdd\x82" + "\x98\x91\x8d", + "\xad\x1d\x3c\x31\x22\xab\x7a\xc6", "", 0, 64, NULL, 0 }, + { 320, 128, 308, + "\x94\xba\xaa\xc1\x50\xe2\x64\x5a\xe1\xec\x19\x39\xc7\xbc\xef\xb7\x3f" + "\x6e\xdb\x14\x6f\xae\x02\x28\x9b\x6c\x63\x26\xff\x39\xbc\x26\x5d\x61" + "\x2b\xef\x27\x27\xfa\x72", + "\xe3\xf7\x5a\x88\x6c\x4a\x55\x91", "", 0, 64, NULL, 0 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/aes_gcm_test.json.c b/test/wycheproof/aes_gcm_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..bfa3a3d15ee95e90a47163f4354bb4b7bdafdbc2 --- /dev/null +++ b/test/wycheproof/aes_gcm_test.json.c @@ -0,0 +1,2528 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* AES-GCM, 0.8r12 */ +#include "aead_test.h" +const struct aead_test aes_gcm_test_json[] = { + { 96, 128, 128, 1, + "\x5b\x96\x04\xfe\x14\xea\xdb\xa9\x31\xb0\xcc\xf3\x48\x43\xda\xb9", + "\x02\x83\x18\xab\xc1\x82\x40\x29\x13\x81\x41\xa2", "", + "\x00\x1d\x0c\x23\x12\x87\xc1\x18\x27\x84\x55\x4c\xa3\xa2\x19\x08", + "\x26\x07\x3c\xc1\xd8\x51\xbe\xff\x17\x63\x84\xdc\x98\x96\xd5\xff", + "\x0a\x3e\xa7\xa5\x48\x7c\xb5\xf7\xd7\x0f\xb6\xc5\x8d\x03\x85\x54", 1, + 0, 128 }, + { 96, 128, 128, 2, + "\x5b\x96\x04\xfe\x14\xea\xdb\xa9\x31\xb0\xcc\xf3\x48\x43\xda\xb9", + "\x92\x1d\x25\x07\xfa\x80\x07\xb7\xbd\x06\x7d\x34", + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x00\x1d\x0c\x23\x12\x87\xc1\x18\x27\x84\x55\x4c\xa3\xa2\x19\x08", + "\x49\xd8\xb9\x78\x3e\x91\x19\x13\xd8\x70\x94\xd1\xf6\x3c\xc7\x65", + "\x1e\x34\x8b\xa0\x7c\xca\x2c\xf0\x4c\x61\x8c\xb4\xd4\x3a\x5b\x92", 1, + 128, 128 }, + { 96, 128, 128, 3, + "\xaa\x02\x3d\x04\x78\xdc\xb2\xb2\x31\x24\x98\x29\x3d\x9a\x91\x29", + "\x04\x32\xbc\x49\xac\x34\x41\x20\x81\x28\x81\x27", + "\xaa\xc3\x92\x31\x12\x98\x72\xa2", + "\x20\x35\xaf\x31\x3d\x13\x46\xab\x00\x15\x4f\xea\x78\x32\x21\x05", + "\xee\xa9\x45\xf3\xd0\xf9\x8c\xc0\xfb\xab\x47\x2a\x0c\xf2\x4e\x87", + "\x4b\xb9\xb4\x81\x25\x19\xda\xdf\x9e\x12\x32\x01\x6d\x06\x81\x33", 1, + 64, 128 }, + { 96, 128, 128, 4, + "\xbe\xdc\xfb\x5a\x01\x1e\xbc\x84\x60\x0f\xcb\x29\x6c\x15\xaf\x0d", + "\x43\x8a\x54\x7a\x94\xea\x88\xdc\xe4\x6c\x6c\x85", "", "", "", + "\x96\x02\x47\xba\x5c\xde\x02\xe4\x1a\x31\x3c\x4c\x01\x36\xed\xc3", 1, + 0, 0 }, + { 96, 128, 128, 5, + "\x38\x4e\xa4\x16\xac\x3c\x2f\x51\xa7\x6e\x7d\x82\x26\x34\x6d\x4e", + "\xb3\x0c\x08\x47\x27\xad\x1c\x59\x2a\xc2\x1d\x12", "", "\x35", + "\x54", + "\x7c\x1e\x4a\xe8\x8b\xb2\x7e\x56\x38\x34\x3c\xb9\xfd\x3f\x63\x37", 1, + 0, 8 }, + { 96, 128, 128, 6, + "\xca\xe3\x1c\xd9\xf5\x55\x26\xeb\x03\x82\x41\xfc\x44\xca\xc1\xe5", + "\xb5\xe0\x06\xde\xd5\x53\x11\x0e\x6d\xc5\x65\x29", "", + "\xd1\x09\x89\xf2\xc5\x2e\x94\xad", + "\xa0\x36\xea\xd0\x31\x93\x90\x3f", + "\x3b\x62\x69\x40\xe0\xe9\xf0\xcb\xea\x8e\x18\xc4\x37\xfd\x60\x11", 1, + 0, 64 }, + { 96, 128, 128, 7, + "\xdd\x61\x97\xcd\x63\xc9\x63\x91\x9c\xf0\xc2\x73\xef\x6b\x28\xbf", + "\xec\xb0\xc4\x2f\x70\x00\xef\x0e\x6f\x95\xf2\x4d", "", + "\x4d\xcc\x14\x85\x36\x58\x66\xe2\x5a\xc3\xf2\xca\x6a\xba\x97", + "\x8a\x99\x92\x38\x8e\x73\x5f\x80\xee\x18\xf4\xa6\x3c\x10\xad", + "\x14\x86\xa9\x1c\xcc\xf9\x2c\x9a\x5b\x00\xf7\xb0\xe0\x34\x89\x1c", 1, + 0, 120 }, + { 96, 128, 128, 8, + "\xff\xdf\x42\x28\x36\x1e\xa1\xf8\x16\x58\x52\x13\x6b\x34\x80\xf7", + "\x0e\x16\x66\xf2\xdc\x65\x2f\x77\x08\xfb\x8f\x0d", "", + "\x25\xb1\x2e\x28\xac\x0e\xf6\xea\xd0\x22\x6a\x3b\x22\x88\xc8\x00", + "\xf7\xbd\x37\x9d\x13\x04\x77\x17\x6b\x8b\xb3\xcb\x23\xdb\xbb\xaa", + "\x1e\xe6\x51\x3c\xe3\x0c\x78\x73\xf5\x9d\xd4\x35\x0a\x58\x8f\x42", 1, + 0, 128 }, + { 96, 128, 128, 9, + "\xc1\x5e\xd2\x27\xdd\x2e\x23\x7e\xcd\x08\x7e\xaa\xaa\xd1\x9e\xa4", + "\x96\x5f\xf6\x64\x31\x16\xac\x14\x43\xa2\xde\xc7", "", + "\xfe\xe6\x2f\xde\x97\x3f\xe0\x25\xad\x6b\x32\x2d\xcd\xf3\xc6\x3f" + "\xc7", + "\x0d\xe5\x1f\xe4\xf7\xf2\xd1\xf0\xf9\x17\x56\x9f\x5c\x6d\x1b\x00" + "\x9c", + "\x6c\xd8\x52\x14\x22\xc0\x17\x7e\x83\xef\x1b\x7a\x84\x5d\x97\xdb", 1, + 0, 136 }, + { 96, 128, 128, 10, + "\xa8\xee\x11\xb2\x6d\x7c\xeb\x7f\x17\xea\xa1\xe4\xb8\x3a\x2c\xf6", + "\xfb\xbc\x04\xfd\x6e\x02\x5b\x71\x93\xeb\x57\xf6", "", + "\xc0\x8f\x08\x5e\x6a\x9e\x0e\xf3\x63\x62\x80\xc1\x1e\xcf\xad\xf0\xc1" + "\xe7\x29\x19\xff\xc1\x7e\xaf", + "\x7c\xd9\xf4\xe4\xf3\x65\x70\x4f\xff\x3b\x99\x00\xaa\x93\xba\x54\xb6" + "\x72\xba\xc5\x54\x27\x56\x50", + "\xf4\xeb\x19\x32\x41\x22\x6d\xb0\x17\xb3\x2e\xc3\x8c\xa4\x72\x17", 1, + 0, 192 }, + { 96, 128, 128, 11, + "\x28\xff\x3d\xef\x08\x17\x93\x11\xe2\x73\x4c\x6d\x1c\x4e\x28\x71", + "\x32\xbc\xb9\xb5\x69\xe3\xb8\x52\xd3\x7c\x76\x6a", "\xc3", + "\xdf\xc6\x1a\x20\xdf\x85\x05\xb5\x3e\x3c\xd5\x9f\x25\x77\x0d\x50\x18" + "\xad\xd3\xd6", + "\xf5\x8d\x45\x32\x12\xc2\xc8\xa4\x36\xe9\x28\x36\x72\xf5\x79\xf1\x19" + "\x12\x29\x78", + "\x59\x01\x13\x1d\x07\x60\xc8\x71\x59\x01\xd8\x81\xfd\xfd\x3b\xc0", 1, + 8, 160 }, + { 96, 128, 128, 12, + "\xe6\x3a\x43\x21\x6c\x08\x86\x72\x10\xe2\x48\x85\x9e\xb5\xe9\x9c", + "\x9c\x3a\x42\x63\xd9\x83\x45\x66\x58\xaa\xd4\xb1", + "\x83\x4a\xfd\xc5\xc7\x37\x18\x6b", + "\xb1\x4d\xa5\x6b\x04\x62\xdc\x05\xb8\x71\xfc\x81\x52\x73\xff\x48\x10" + "\xf9\x2f\x4b", + "\xbf\x86\x46\x16\xc2\x34\x75\x09\xca\x9b\x10\x44\x63\x79\xb9\xbd\xbb" + "\x3b\x8f\x64", + "\xa9\x7d\x25\xb4\x90\x39\x0b\x53\xc5\xdb\x91\xf6\xee\x2a\x15\xb8", 1, + 64, 160 }, + { 96, 128, 128, 13, + "\x38\x44\x98\x90\x23\x4e\xb8\xaf\xab\x0b\xbf\x82\xe2\x38\x54\x54", + "\x33\xe9\x06\x58\x41\x6e\x7c\x1a\x7c\x00\x5f\x11", + "\x40\x20\x85\x5c\x66\xac\x45\x95\x05\x83\x95\xf3\x67\x20\x1c\x4c", + "\xf7\x62\x77\x6b\xf8\x31\x63\xb3\x23\xca\x63\xa6\xb3\xad\xea\xc1\xe1" + "\x35\x72\x62", + "\xa6\xf2\xef\x3c\x7e\xf7\x4a\x12\x6d\xd2\xd5\xf6\x67\x39\x64\xe2\x7d" + "\x5b\x34\xb6", + "\xb8\xbb\xdc\x4f\x50\x14\xbc\x75\x2c\x8b\x4e\x9b\x87\xf6\x50\xa3", 1, + 128, 160 }, + { 96, 128, 128, 14, + "\x6a\x68\x67\x1d\xfe\x32\x3d\x41\x98\x94\x38\x1f\x85\xeb\x63\xfd", + "\x9f\x0d\x85\xb6\x05\x71\x1f\x34\xcd\x2a\x35\xba", + "\x76\xeb\x5f\x14\x72\x50\xfa\x3c\x12\xbf\xf0\xa6\xe3\x93\x4a\x0b\x16" + "\x86\x0c\xf1\x16\x46\x77\x3b", + "\x0f\xc6\x78\x99\xc3\xf1\xbb\xe1\x96\xd9\x0f\x1e\xca\x37\x97\x38\x92" + "\x30\xaa\x37", + "\xbd\x64\x80\x2c\xfe\xba\xeb\x48\x7d\x3a\x8f\x76\xce\x94\x3a\x37\xb3" + "\x47\x2d\xd5", + "\xfc\xe9\xa5\xb5\x30\xc7\xd7\xaf\x71\x8b\xe1\xec\x0a\xe9\xed\x4d", 1, + 192, 160 }, + { 96, 128, 128, 15, + "\xe1\x22\x60\xfc\xd3\x55\xa5\x1a\x0d\x01\xbb\x1f\x6f\xa5\x38\xc2", + "\x5d\xfc\x37\x36\x6f\x56\x88\x27\x51\x47\xd3\xf9", "", + "\xd9\x02\xde\xea\xb1\x75\xc0\x08\x32\x9a\x33\xbf\xac\xcd\x5c\x0e\xb3" + "\xa6\xa1\x52\xa1\x51\x0e\x7d\xb0\x4f\xa0\xaf\xf7\xce\x42\x88\x53\x0d" + "\xb6\xa8\x0f\xa7\xfe\xa5\x82\xaa\x7d\x46\xd7\xd5\x6e\x70\x8d\x2b\xb0" + "\xc5\xed\xd3\xd2\x66\x48\xd3\x36\xc3\x62\x0e\xa5\x5e", + "\xd3\x3b\xf6\x72\x2f\xc2\x93\x84\xfa\xd7\x5f\x99\x02\x48\xb9\x52\x8e" + "\x09\x59\xaa\x67\xec\x66\x86\x9d\xc3\x99\x6c\x67\xa2\xd5\x59\xe7\xd7" + "\x7c\xe5\x95\x5f\x8c\xad\x2a\x4d\xf5\xfd\xc3\xac\xcc\xaf\xa7\xbc\x0d" + "\xef\x53\xd8\x48\x11\x12\x56\x90\x3e\x5a\xdd\x04\x20", + "\x8b\xc8\x33\xde\x51\x08\x63\xb4\xb4\x32\xc3\xcb\xf4\x5a\xa7\xcc", 1, + 0, 512 }, + { 96, 128, 128, 16, + "\x3c\x55\xf8\x8e\x9f\xaa\x0d\x68\xab\x50\xd0\x2b\x47\x16\x12\x76", + "\xd7\x67\xc4\x8d\x20\x37\xb4\xbd\x2c\x23\x1b\xbd", "", + "\x5d\x6a\xdd\x48\xe7\xa5\x70\x4e\x54\xf9\xc2\x82\x9a\x9b\x42\x83\xdc" + "\xe0\xd3\xa6\x5b\x13\x3e\xba\x37\x93\xc4\xfb\xfa\x1d\x8e\x3a\x25\x39" + "\xd0\xd4\xf3\xde\x38\x15\x98\xce\x5b\x23\x60\x17\x3f\xbd\x14\x94\x76" + "\xc3\x16\x92\xc5\xd6\xe8\x72\xfc\xe4\x02\x19\x37\x89\x49\xc2\xe7\x0b" + "\x5f\x1b\x9f\x0a\x1d\x5f\x38\x35\x2a\xd8\x14\xb2\xa0\x35\xbb\x3f\x3f" + "\x26\x42\x5d\x83\x1a\x2f\x7a\x5e\x65\xc5\xdf\xcd\x91\xa3\x15\xc2\xb2" + "\x4f\x53\xa6\x62\x60\x5e\xa4\x08\x57\xdd\x98\x0e\x9b\xe5\xcd\xad\x00" + "\x0c\x56\x9f\x2d\x20\x4d\x4b\xd3\xb0", + "\x17\xd7\x2d\x90\xbd\x23\xe0\x76\xd8\x36\x4a\x87\xec\xb9\xac\x58\xac" + "\xc5\xde\x46\x29\xbf\xd5\x90\x40\x9b\x8b\xf1\xfc\xd3\xa2\xf6\x02\x73" + "\x1b\x46\x14\xce\xc1\x5e\x77\x3e\xa6\x5a\x65\xe7\x21\x09\x94\x25\x6b" + "\xf5\x45\x0a\x25\xac\xb5\x27\x26\x9c\x06\x5f\x2e\x2f\x22\x79\xd1\xfe" + "\x8b\x3e\xda\x98\xdc\xf8\x7b\x34\x8f\x15\x28\x37\x7b\xbd\xd2\x58\x35" + "\x5d\x46\xe0\x35\x33\x04\x83\xd8\x09\x7e\x80\xc7\xde\x9b\xbb\x60\x6d" + "\xdf\x72\x3f\x29\x09\x21\x7f\xfd\xd1\x8e\x8b\xdb\xd7\xb0\x80\x62\xf1" + "\xdc\xba\x96\x0e\x5c\x0d\x29\x0f\x5f", + "\x09\x0b\x8c\x2e\xc9\x8e\x41\x16\x18\x6d\x0e\x5f\xbe\xfe\xb9\xc2", 1, + 0, 1024 }, + { 96, 128, 128, 17, + "\xa2\x94\xe7\x0f\xa2\xac\x10\xa1\xfb\x00\xc5\x88\xb8\x88\xb6\x73", + "\xdf\xe2\x0d\x1c\x43\x50\xe6\x23\x5d\x98\x7a\xf1", "", + "\x6e\xd1\xd7\xd6\x18\xd1\x58\x74\x1f\x52\x07\x80\x06\xf2\x84\x94\xba" + "\x72\xa2\x45\x4f\x27\x16\x0a\xe8\x72\x27\x93\xfc\xeb\xc5\x38\xeb\xc2" + "\xf6\x7c\x3a\xce\x3e\x0f\xe7\xc4\x7b\x9e\x74\xe0\x81\x18\x2b\x47\xc9" + "\x30\x14\x4e\x3f\xc8\x0d\x0a\xd5\x06\x11\xc3\xaf\xcf\xe2\xdb\xc5\x27" + "\x9e\xdb\xbb\xa0\x87\xc0\xe3\x90\x35\x5f\x3d\xaf\xfc\xd2\x5a\xd4\xde" + "\xa0\x07\xc2\x84\xad\x92\xe7\xfc\xbe\xcb\x43\x8f\xb6\x06\x23\xff\x89" + "\xa5\x99\xdc\xa2\xaa\xc1\x41\xb2\x66\x51\x38\x6c\xa5\x5b\x73\x9b\x94" + "\x90\x1e\xf6\xdb\x60\x9c\x34\x4d\x8a\xcf\x45\x44\x56\x8e\x31\xbb\x09" + "\x36\x11\x12\x75\x4b\x1c\x0c\x6a\x3c\x87\x5b\xd9\x45\x3b\x0e\xe0\x08" + "\x14\x12\x15\x13\x98\xa2\x94\xec\xad\x75\xad\xd5\x21\x61\x1d\xb5\x28" + "\x8b\x60\xac\x3c\x01\x28\xf6\xe9\x43\x66\xb6\x9e\x65\x9e\x6a\xa6\x6f" + "\x05\x8a\x3a\x35\x71\x06\x4e\xdb\xb0\xf0\x5c\x11\xe5\xdd\xe9\x38\xfb" + "\x46\xc3\x93\x5d\xd5\x19\x3a\x4e\x56\x64\x68\x8f\x0a\xe6\x7c\x29\xb7" + "\xcc\x49\xa7\x96\x31\x40\xf8\x2e\x31\x1a\x20\xc9\x8c\xd3\x4f\xbc\xab" + "\x7b\x4b\x51\x5a\xe8\x65\x57\xe6\x20\x99\xe3\xfc\x37\xb9\x59\x5c\x85" + "\xa7\x5c", + "\x5b\xc6\xdb\xaf\xc4\x01\x10\x1c\x7a\x08\xc8\x1d\x6c\x27\x91\xaa\x14" + "\x7c\xe0\x93\xaa\xd1\x72\xbe\x18\x37\x9c\x74\x73\x84\xa5\x4a\x41\xa7" + "\x47\xba\x95\x5c\xad\xe8\xfd\xfb\x89\x67\xaa\x80\x8b\x43\xfe\xe3\xd7" + "\x57\xcc\x80\xf1\x11\x63\xb8\x00\xe5\xe5\x9d\xf9\x32\x75\x7f\x76\xc4" + "\x0b\x3d\x9c\xba\x44\x9a\xaf\x11\xe4\xf8\x0e\x00\x3b\x1f\x38\x4e\xaf" + "\xa4\xf7\x6e\x81\xb1\x3c\x09\xec\x1a\xd8\x8e\x76\x50\xc7\x50\xd4\x42" + "\xfe\x46\xd2\x25\xa3\x73\xe8\xa1\xb5\x64\xb4\x91\x5a\x5c\x6c\x51\x3c" + "\xfd\xfa\x22\xd9\x29\xd5\x74\x1c\xa5\xeb\xef\xae\xdc\xba\x63\x6c\x7c" + "\x3b\xbe\xf1\x88\x63\xfd\xc1\x26\xb4\xb4\x51\x61\x10\x49\xc3\x5d\x81" + "\x4f\xc2\xeb\x7e\x4b\x8f\x1a\x89\x95\xec\xb4\xa3\xc8\x66\x52\xa0\x68" + "\xc0\xb2\xa3\xe1\xc5\x94\x1d\x59\xc2\x10\xb4\x58\xd5\xd5\xd3\xb0\x64" + "\x20\xec\x20\x53\x46\x5c\xcc\xec\xa7\xc2\x0f\x67\x40\x49\x85\x46\x03" + "\x79\xe2\xee\x80\x6a\x46\xe8\x40\x9d\xfa\xb2\xe0\xdd\x67\xea\x3c\xf4" + "\x6d\x5a\xd4\xeb\x78\x75\x68\x27\x35\x8c\x3e\xf1\xfd\xbd\x07\xc3\x38" + "\x34\xf3\xd9\xec\xa3\xff\x13\xb7\x44\xa0\x10\x59\xa6\xc1\x7a\x31\x5a" + "\x8f\xd4", + "\xc7\x58\x7e\x7d\xa4\x1b\xed\x68\x2c\x37\x37\x7e\xa4\x32\x40\x29", 1, + 0, 2056 }, + { 96, 128, 128, 18, + "\xc4\xb0\x34\x35\xb9\x1f\xc5\x2e\x09\xef\xf2\x7e\x4d\xc3\xfb\x42", + "\x50\x46\xe7\xe0\x8f\x07\x47\xe1\xef\xcc\xb0\x9e", + "\x75\xfc\x90\x78\xb4\x88\xe9\x50\x3d\xcb\x56\x8c\x88\x2c\x9e\xec\x24" + "\xd8\x0b\x04\xf0\x95\x8c\x82\xaa\xc8\x48\x4f\x02\x5c\x90\x43\x41\x48" + "\xdb\x8e\x9b\xfe\x29\xc7\xe0\x71\xb7\x97\x45\x7c\xb1\x69\x5a\x5e\x5a" + "\x63\x17\xb8\x36\x90\xba\x05\x38\xfb\x11\xe3\x25\xca", + "\x8e\x88\x7b\x22\x4e\x8b\x89\xc8\x2e\x9a\x64\x1c\xf5\x79\xe6\x87\x9e" + "\x11\x11\xc7", + "\xb6\x78\x68\x12\x57\x4a\x25\x4e\xb4\x3b\x1c\xb1\xd1\x75\x35\x64\xc6" + "\xb5\x20\xe9", + "\xad\x8c\x09\x61\x0d\x50\x8f\x3d\x0f\x03\xcc\x52\x3c\x0d\x5f\xcc", 1, + 512, 160 }, + { 96, 128, 128, 19, + "\x7e\x37\xd5\x6e\x6b\x1d\x01\x72\xd4\x0d\x64\xd6\x11\x1d\xd4\x24", + "\x51\x7c\x55\xc2\xec\x9b\xfe\xa9\x0a\xdd\xc2\xbd", + "\x8e\xd8\xa9\xbe\x4c\x3d\x32\xa5\x09\x84\x34\xee\x5c\x0c\x4f\xc2\x0f" + "\x78\xef\x5e\x25\xed\x8b\x72\xa8\x40\xa4\x63\xe3\x6b\x67\xb8\x81\xe0" + "\x48\xb5\xe4\x9f\x51\x5b\x25\x41\xad\x5c\xe4\xeb\xb3\xa9\x17\xc1\x6b" + "\xcd\xc0\xdc\x3c\xb5\x2b\xb4\xed\x5a\x1d\xff\xcf\x1e\x18\x66\x54\x4e" + "\x8d\xb1\x03\xb2\xad\x99\xc6\xfa\x6e\x7d\xe1\xd8\xb4\x5b\xff\x57\xec" + "\x87\x2f\x1c\xfc\x78\xb0\xe4\x87\x0f\x6f\x20\x0f\xf1\x29\x1c\xae\x03" + "\x3d\xef\xc3\x32\x7b\xa8\x27\x92\xba\x43\x8e\x35\xc4\xbf\xbb\x68\x4f" + "\xec\x5c\xe5\xe3\xae\x16\x7d\x01\xd7", + "\x6a\x7d\xea\x03\xc1\xbb\xa7\x0b\xe8\xc7\x3d\xa4\x7d\x5e\xe0\x6d\x72" + "\xa2\x74\x30", + "\xcf\xb6\x31\x79\x07\x67\xd0\x64\x5d\x8e\xc6\xf2\x3b\xf7\xfa\x8b\x19" + "\xce\x79\xee", + "\xc5\x76\x7d\xda\xa7\x47\x15\x84\x46\x23\x17\x66\xbd\x20\x49\x0c", 1, + 1024, 160 }, + { 96, 128, 128, 20, + "\x30\x76\x74\x14\x08\xf7\x34\xce\x25\xd4\x8f\x98\x2e\x8b\x84\x4b", + "\xa2\x71\x2e\xac\x5e\x06\xd3\xcc\x28\x64\xaa\x8b", + "\x18\x52\x6e\x4e\xfd\x99\x5a\x0b\xf6\x40\x5d\x9f\x90\x67\x25\xc2\x90" + "\x27\x89\x58\xd4\x95\x54\x97\x4d\x8f\xe0\x25\xe7\x86\x0d\xaa\x22\x5c" + "\x12\x85\xb0\x57\x39\x16\xa4\xb6\x74\x1f\x7c\xc2\xe2\x9c\xe4\xe5\x25" + "\xe1\x2f\x43\x6c\xb7\xce\x0a\xd4\x7d\xf3\xd0\xf5\xbd\x80\xfb\x27\xe4" + "\x76\x35\xa4\x98\x5f\xda\xed\xf0\xe8\x21\xf1\xc8\x95\x99\x85\xca\xc4" + "\x9c\x97\xa4\xa0\x24\x38\xd9\x2b\x4a\xfd\x4c\x85\x5d\xcc\x7e\xf4\x1e" + "\xcf\xc3\x68\x66\x33\x4f\xcc\x05\xb2\xbb\x93\xef\x13\xf0\x0c\x5e\xa9" + "\xb9\x21\xe8\xa5\x19\xd7\x7f\x64\x8e\x0e\xfe\x9b\x5a\x62\x30\x5a\x2e" + "\xcf\x7d\x49\x99\x66\x3a\x6d\xdf\xca\x51\x7f\x1f\x36\xf0\x89\x9b\x0b" + "\xde\xf9\xf4\x33\xc4\xbb\x26\x63\xc0\xcc\x1b\xb6\x16\xe7\xd1\x94\x9e" + "\x52\x2b\xec\x85\x48\x5d\x37\x1d\x11\x34\xc9\x0e\xed\xe7\x5e\x86\x5d" + "\xc7\xbe\x40\x5b\x54\xc3\x3f\x0a\xcb\xac\xe6\xcf\x78\x0c\x78\x03\x5b" + "\x80\x35\xb6\xea\x3f\x56\x2a\x8d\x30\xa1\x56\xc1\x99\xfd\xaf\xd2\x5b" + "\xe0\x6e\xe8\x95\x58\x11\x95\xef\x12\x5c\xb4\xe6\x29\xe4\xf1\x8e\x0b" + "\xee\x97\x9d\x31\x51\x38\x96\xdb\x84\x66\xe4\x48\xe6\xb4\x60\x0a\x31" + "\x67\x57", + "\x41\x4e\xc6\xb1\x49\xe5\x47\x35\x30\x2d\xad\xa8\x88\xb9\x8b\x7f\xdb" + "\x4c\x12\x7c", + "\xe4\xd3\xf4\x89\x8c\xb3\xd9\x73\x26\x41\xd1\xf8\xd9\xd8\x89\xb2\xc9" + "\x8a\xf9\x30", + "\x76\xd4\xfb\xb6\x9d\x52\x9b\x64\x17\x5b\x32\x8b\xe0\x0b\x10\x68", 1, + 2056, 160 }, + { 96, 128, 128, 21, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", "", + "\xeb\xd4\xa3\xe1\x0c\xf6\xd4\x1c\x50\xae\xae\x00\x75\x63\xb0\x72", + "\xf6\x2d\x84\xd6\x49\xe5\x6b\xc8\xcf\xed\xc5\xd7\x4a\x51\xe2\xf7", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 0, 128 }, + { 96, 128, 128, 22, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", "", + "\xd5\x93\xc4\xd8\x22\x4f\x1b\x10\x0c\x35\xe4\xf6\xc4\x00\x65\x43", + "\x43\x1f\x31\xe6\x84\x09\x31\xfd\x95\xf9\x4b\xf8\x82\x96\xff\x69", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 0, 128 }, + { 96, 128, 128, 23, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd9\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 24, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xda\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 25, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\x58\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 26, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x85\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 27, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\x3c\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x33\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x30\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\x69\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x89\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x08\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xe7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7b\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x87\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x84\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x06\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x82", 0, + 0, 128 }, + { 96, 128, 128, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x81", 0, + 0, 128 }, + { 96, 128, 128, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\xc3", 0, + 0, 128 }, + { 96, 128, 128, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\xe9\x88\xc7\x7a\xd3\x86\x3e\x60\x03", 0, + 0, 128 }, + { 96, 128, 128, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd9\x84\x7d\xbc\x32\x6a\x06\xe9\x89\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\x3c\x32\x6a\x06\x69\x88\xc7\x7a\xd3\x86\x3e\x60\x83", 0, + 0, 128 }, + { 96, 128, 128, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd8\x84\x7d\xbc\x32\x6a\x06\x69\x88\xc7\x7a\xd3\x86\x3e\x60\x03", 0, + 0, 128 }, + { 96, 128, 128, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\x27\x7b\x82\x43\xcd\x95\xf9\x16\x77\x38\x85\x2c\x79\xc1\x9f\x7c", 0, + 0, 128 }, + { 96, 128, 128, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, 128 }, + { 96, 128, 128, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, 128 }, + { 96, 128, 128, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\x58\x04\xfd\x3c\xb2\xea\x86\x69\x08\x47\xfa\x53\x06\xbe\xe0\x03", 0, + 0, 128 }, + { 96, 128, 128, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xeb\x15\x6d\x08\x1e\xd6\xb6\xb5\x5f\x46\x12\xf0\x21\xd8\x7b\x39", + "\xd9\x85\x7c\xbd\x33\x6b\x07\xe8\x89\xc6\x7b\xd2\x87\x3f\x61\x82", 0, + 0, 128 }, + { 64, 128, 128, 50, + "\xaa\x02\x3d\x04\x78\xdc\xb2\xb2\x31\x24\x98\x29\x3d\x9a\x91\x29", + "\x04\x32\xbc\x49\xac\x34\x41\x20", + "\xaa\xc3\x92\x31\x12\x98\x72\xa2", + "\x20\x35\xaf\x31\x3d\x13\x46\xab\x00\x15\x4f\xea\x78\x32\x21\x05", + "\x64\xc3\x6b\xb3\xb7\x32\x03\x4e\x3a\x7d\x04\xef\xc5\x19\x77\x85", + "\xb7\xd0\xdd\x70\xb0\x0d\x65\xb9\x7c\xfd\x08\x0f\xf4\xb8\x19\xd1", 1, + 64, 128 }, + { 64, 128, 128, 51, + "\xf3\x43\x47\x25\xc8\x2a\x7f\x8b\xb0\x7d\xf1\xf8\x12\x2f\xb6\xc9", + "\x28\xe9\xb7\x85\x17\x24\xba\xe3", "", "", "", + "\x44\xac\xa0\x0f\x42\xe4\x19\x9b\x82\x9a\x55\xe6\x9b\x07\x3d\x9e", 1, + 0, 0 }, + { 64, 128, 128, 52, + "\xde\xb6\x22\x33\x55\x9b\x57\x47\x66\x02\xb5\xad\xac\x57\xc7\x7f", + "\xd0\x84\x54\x7d\xe5\x5b\xbc\x15", "", + "\xd8\x98\x6d\xf0\x24\x1e\xd3\x29\x75\x82\xc0\xc2\x39\xc7\x24\xcb", + "\x03\xe1\xa1\x68\xa7\xe3\x77\xa9\x13\x87\x9b\x29\x6a\x1b\x5f\x9c", + "\x32\x90\xaa\x95\xaf\x50\x5a\x74\x2f\x51\x7f\xab\xcc\x9b\x20\x94", 1, + 0, 128 }, + { 128, 128, 128, 53, + "\x20\x34\xa8\x25\x47\x27\x6c\x83\xdd\x32\x12\xa8\x13\x57\x2b\xce", + "\x32\x54\x20\x2d\x85\x47\x34\x81\x23\x98\x12\x7a\x3d\x13\x44\x21", + "\x1a\x02\x93\xd8\xf9\x02\x19\x05\x89\x02\x13\x90\x13\x90\x81\x90\xbc" + "\x49\x08\x90\xd3\xff\x12\xa3", + "\x02\xef\xd2\xe5\x78\x23\x12\x82\x7e\xd5\xd2\x30\x18\x9a\x2a\x34\x2b" + "\x27\x7c\xe0\x48\x46\x21\x93", + "\x64\x06\x9c\x2d\x58\x69\x05\x61\xf2\x7e\xe1\x99\xe6\xb4\x79\xb6\x36" + "\x9e\xec\x68\x86\x72\xbd\xe9", + "\x9b\x7a\xba\xdd\x6e\x69\xc1\xd9\xec\x92\x57\x86\x53\x4f\x50\x75", 1, + 192, 192 }, + { 128, 128, 128, 54, + "\xb6\x7b\x1a\x6e\xfd\xd4\x0d\x37\x08\x0f\xbe\x8f\x80\x47\xae\xb9", + "\xfa\x29\x4b\x12\x99\x72\xf7\xfc\x5b\xbd\x5b\x96\xbb\xa8\x37\xc9", + "", "", "", + "\xa2\xcf\x26\x48\x15\x17\xec\x25\x08\x5c\x5b\x17\xd0\x78\x61\x83", 1, + 0, 0 }, + { 128, 128, 128, 55, + "\x20\x9e\x6d\xbf\x2a\xd2\x6a\x10\x54\x45\xfc\x02\x07\xcd\x9e\x9a", + "\x94\x77\x84\x9d\x6c\xcd\xfc\xa1\x12\xd9\x2e\x53\xfa\xe4\xa7\xca", + "", "\x01", "\xfd", + "\x03\x2d\xf7\xbb\xa5\xd8\xea\x1a\x14\xf1\x6f\x70\xbd\x0e\x14\xec", 1, + 0, 8 }, + { 128, 128, 128, 56, + "\xa5\x49\x44\x2e\x35\x15\x40\x32\xd0\x7c\x86\x66\x00\x6a\xa6\xa2", + "\x51\x71\x52\x45\x68\xe8\x1d\x97\xe8\xc4\xde\x4b\xa5\x6c\x10\xa0", + "", "\x11\x82\xe9\x35\x96\xca\xc5\x60\x89\x46\x40\x0b\xc7\x3f\x3a", + "\x2f\x33\x30\x87\xbd\xca\x58\x21\x9f\x9b\xfc\x27\x3e\x45\xcc", + "\xe0\x6d\x1e\xf4\x73\x13\x29\x57\xad\x37\xea\xef\x29\x73\x3c\xa0", 1, + 0, 120 }, + { 128, 128, 128, 57, + "\xcf\xb4\xc2\x6f\x12\x6f\x6a\x0a\xcb\x8e\x4e\x22\x0f\x6c\x56\xcd", + "\x12\x75\x11\x54\x99\xae\x72\x22\x68\x51\x5b\xf0\xc1\x64\xb4\x9c", + "", + "\x09\xdf\xd7\xf0\x80\x27\x52\x57\xcf\x97\xe7\x6f\x96\x6b\x1a\xd9", + "\xa7\x80\xbd\x01\xc8\x08\x85\x15\x6c\x88\xa9\x73\x26\x4c\x8e\xe5", + "\x2a\xde\xff\xa6\x82\xc8\xd8\xa8\x1f\xad\xa7\xd9\xfc\xdd\x2e\xe2", 1, + 0, 128 }, + { 128, 128, 128, 58, + "\x0b\x11\xef\x3a\x08\xc0\x29\x70\xf7\x42\x81\xc8\x60\x69\x1c\x75", + "\x95\xc1\xdd\x8c\x0f\x17\x05\xec\xe6\x89\x37\x90\x1f\x7a\xdd\x7b", + "", + "\xf6\x93\xd4\xed\xd8\x25\xdb\xb0\x61\x8d\x91\x11\x31\x28\x88\x0d\xbe" + "\xbb\x23\xe2\x5d\x00\xed\x1f\x07\x7d\x87\x0b\xe9\xcc\x75\x36", + "\x7e\x47\xe1\x0f\xe3\xc6\xfb\xfa\x38\x17\x70\xea\xf5\xd4\x8d\x14\x82" + "\xe7\x1e\x0c\x44\xdf\xf1\xe3\x0c\xa6\xf9\x5d\x92\x05\x20\x84", + "\xd0\x14\x44\xfa\x5d\x9c\x49\x96\x29\xd1\x74\xff\x39\x27\xa1\xac", 1, + 0, 256 }, + { 128, 128, 128, 59, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xf9\x5f\xde\x4a\x75\x19\x13\x20\x2a\xee\xee\x32\xa0\xb5\x57\x53", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x00\x07\x8d\x10\x9d\x92\x14\x3f\xcd\x5d\xf5\x67\x21\xb8\x84\xfa\xc6" + "\x4a\xc7\x76\x2c\xc0\x9e\xea\x2a\x3c\x68\xe9\x2a\x17\xbd\xb5\x75\xf8" + "\x7b\xda\x18\xbe\x56\x4e", + "\x15\x2a\x65\x04\x5f\xe6\x74\xf9\x76\x27\x42\x7a\xf5\xbe\x22\xda", 1, + 0, 320 }, + { 128, 128, 128, 60, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x7b\x95\xb8\xc3\x56\x81\x0a\x84\x71\x1d\x68\x15\x0a\x1b\x77\x50", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x84\xd4\xc9\xc0\x8b\x4f\x48\x28\x61\xe3\xa9\xc6\xc3\x5b\xc4\xd9\x1d" + "\xf9\x27\x37\x45\x13\xbf\xd4\x9f\x43\x6b\xd7\x3f\x32\x52\x85\xda\xef" + "\x4f\xf7\xe1\x3d\x46\xa6", + "\x21\x3a\x3c\xb9\x38\x55\xd1\x8e\x69\x33\x7e\xee\x66\xae\xec\x07", 1, + 0, 320 }, + { 128, 128, 128, 61, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x1a\x55\x2e\x67\xcd\xc4\xdc\x1a\x33\xb8\x24\x87\x4e\xbf\x0b\xed", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x94\x8c\xa3\x7a\x8e\x66\x49\xe8\x8a\xef\xfb\x1c\x59\x8f\x36\x07\x00" + "\x77\x02\x41\x7e\xa0\xe0\xbc\x3c\x60\xad\x5a\x94\x98\x86\xde\x96\x8c" + "\xf5\x3e\xa6\x46\x2a\xed", + "\x99\xb3\x81\xbf\xa2\xaf\x97\x51\xc3\x9d\x1b\x6e\x86\xd1\xbe\x6a", 1, + 0, 320 }, + { 128, 128, 128, 62, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xdd\x9d\x0b\x4a\x0c\x3d\x68\x15\x24\xbf\xfc\xa3\x1d\x90\x76\x61", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x64\xb1\x93\x14\xc3\x1a\xf4\x5a\xcc\xdf\x7e\x3c\x4d\xb7\x9f\x0d\x94" + "\x8c\xa3\x7a\x8e\x66\x49\xe8\x8a\xef\xfb\x1c\x59\x8f\x36\x07\x00\x77" + "\x02\x41\x7e\xa0\xe0\xbc", + "\x52\x81\xef\xc7\xf1\x3a\xc8\xe1\x4c\xcf\x5d\xca\x7b\xfb\xfd\xd1", 1, + 0, 320 }, + { 128, 128, 128, 63, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x57\xc5\x64\x3c\x4e\x37\xb4\x04\x1d\xb7\x94\xcf\xe8\xe1\xf0\xf4", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x2b\xb6\x9c\x3e\x5d\x1f\x91\x81\x5c\x6b\x87\xa0\xd5\xbb\xea\x71\x64" + "\xb1\x93\x14\xc3\x1a\xf4\x5a\xcc\xdf\x7e\x3c\x4d\xb7\x9f\x0d\x94\x8c" + "\xa3\x7a\x8e\x66\x49\xe8", + "\xa3\xea\x2c\x09\xee\x4f\x8c\x8a\x12\xf4\x5c\xdd\xf9\xae\xff\x81", 1, + 0, 320 }, + { 128, 128, 128, 64, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x99\x82\x1c\x2d\xd5\xda\xec\xde\xd0\x73\x00\xf5\x77\xf7\xaf\xf1", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x12\x7a\xf9\xb3\x9e\xcd\xfc\x57\xbb\x11\xa2\x84\x7c\x7c\x2d\x3d\x8f" + "\x93\x8f\x40\xf8\x77\xe0\xc4\xaf\x37\xd0\xfe\x9a\xf0\x33\x05\x2b\xd5" + "\x37\xc4\xae\x97\x8f\x60", + "\x07\xeb\x2f\xe4\xa9\x58\xf8\x43\x4d\x40\x68\x48\x99\x50\x7c\x7c", 1, + 0, 320 }, + { 128, 128, 128, 65, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x5e\x4a\x39\x00\x14\x23\x58\xd1\xc7\x74\xd8\xd1\x24\xd8\xd2\x7d", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x0c\xf6\xae\x47\x15\x6b\x14\xdc\xe0\x3c\x8a\x07\xa2\xe1\x72\xb1\x12" + "\x7a\xf9\xb3\x9e\xcd\xfc\x57\xbb\x11\xa2\x84\x7c\x7c\x2d\x3d\x8f\x93" + "\x8f\x40\xf8\x77\xe0\xc4", + "\xf1\x45\xc2\xdc\xaf\x33\x9e\xed\xe4\x27\xbe\x93\x43\x57\xea\xc0", 1, + 0, 320 }, + { 128, 128, 128, 66, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xd4\x12\x56\x76\x56\x29\x84\xc0\xfe\x7c\xb0\xbd\xd1\xa9\x54\xe8", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xf0\xc6\xff\xc1\x8b\xd4\x6d\xf5\x56\x91\x85\xa9\xaf\xd1\x69\xeb\x0c" + "\xf6\xae\x47\x15\x6b\x14\xdc\xe0\x3c\x8a\x07\xa2\xe1\x72\xb1\x12\x7a" + "\xf9\xb3\x9e\xcd\xfc\x57", + "\xfa\xcd\x0b\xfe\x87\x01\xb7\xb4\xa2\xba\x96\xd9\x8a\xf5\x2b\xd9", 1, + 0, 320 }, + { 128, 128, 128, 67, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xb9\x7e\xc6\x2a\x5e\x59\x00\xcc\xf9\xe4\xbe\x33\x2e\x33\x60\x91", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xd6\x92\x8e\x09\x4c\x06\xe0\xa7\xc4\xdb\x42\x18\x4c\xf7\x52\x9e\x95" + "\xde\x88\xb7\x67\xed\xeb\xe9\xb3\x43\x00\x0b\xe3\xda\xb4\x7e\xa0\x8b" + "\x74\x42\x93\xee\xd6\x98", + "\xa0\x3e\x72\x9d\xcf\xd7\xa0\x31\x55\x65\x5f\xec\xe8\xaf\xfd\x7e", 1, + 0, 320 }, + { 128, 128, 128, 68, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x7e\xb6\xe3\x07\x9f\xa0\xb4\xc3\xee\xe3\x66\x17\x7d\x1c\x1d\x1d", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xd8\x2c\xe5\x87\x71\xbf\x64\x87\x11\x6b\xf8\xe9\x64\x21\x87\x7e\xd6" + "\x92\x8e\x09\x4c\x06\xe0\xa7\xc4\xdb\x42\x18\x4c\xf7\x52\x9e\x95\xde" + "\x88\xb7\x67\xed\xeb\xe9", + "\x1e\x43\x92\x68\x28\xbc\x9a\x16\x14\xc7\xb1\x63\x90\x96\xc1\x95", 1, + 0, 320 }, + { 128, 128, 128, 69, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x03\x14\xfc\xd1\x0f\xdd\x67\x5d\x3c\x61\x29\x62\xc9\x31\xf6\x35", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xa1\x97\xa3\x7a\x5d\x79\x69\x70\x78\x53\x6b\xc2\x7f\xe4\x6c\xd8\xd4" + "\x75\x52\x6d\x90\x44\xaa\x94\xf0\x88\xa0\x54\xf8\xe3\x80\xc6\x4f\x79" + "\x41\x47\x95\xc6\x14\x80", + "\xf0\x8b\xad\xdf\x0b\x52\x85\xc9\x1f\xc0\x6a\x67\xfe\x47\x08\xca", 1, + 0, 320 }, + { 128, 128, 128, 70, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xc4\xdc\xd9\xfc\xce\x24\xd3\x52\x2b\x66\xf1\x46\x9a\x1e\x8b\xb9", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x14\x9f\xde\x9a\xbb\xd3\xa4\x3c\x25\x48\x57\x5e\x0d\xb9\xfb\x84\xa1" + "\x97\xa3\x7a\x5d\x79\x69\x70\x78\x53\x6b\xc2\x7f\xe4\x6c\xd8\xd4\x75" + "\x52\x6d\x90\x44\xaa\x94", + "\x62\xa4\xb6\x87\x5c\x28\x83\x45\xd6\xa4\x54\x39\x9e\xac\x1a\xfa", 1, + 0, 320 }, + { 128, 128, 128, 71, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + "\xbe\xc6\xfa\x05\xc1\x71\x8b\x9b\x84\xc4\x73\x45\xbb\xed\x7d\xcb", + "\x45\xa3\xf8\x9d\x02\x91\x8b\xfd\x0c\x81\x61\x65\x8c\xcc\x97\x95", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 0, 128 }, + { 128, 128, 128, 72, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "", + "\x4d\x82\x63\x9c\x39\xd3\xf3\x49\x0e\xe9\x03\xdd\x0b\xe7\xaf\xcf", + "\x1c\xd5\xa0\x62\x14\x23\x5c\xeb\x04\x4d\x4b\xad\x7b\x04\x73\x12", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 0, 128 }, + { 96, 256, 128, 73, + "\x92\xac\xe3\xe3\x48\xcd\x82\x10\x92\xcd\x92\x1a\xa3\x54\x63\x74\x29" + "\x9a\xb4\x62\x09\x69\x1b\xc2\x8b\x87\x52\xd1\x7f\x12\x3c\x20", + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb", + "\x00\x00\x00\x00\xff\xff\xff\xff", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09", + "\xe2\x7a\xbd\xd2\xd2\xa5\x3d\x2f\x13\x6b", + "\x9a\x4a\x25\x79\x52\x93\x01\xbc\xfb\x71\xc7\x8d\x40\x60\xf5\x2c", 1, + 64, 80 }, + { 96, 256, 128, 74, + "\x29\xd3\xa4\x4f\x87\x23\xdc\x64\x02\x39\x10\x0c\x36\x54\x23\xa3\x12" + "\x93\x4a\xc8\x02\x39\x21\x2a\xc3\xdf\x34\x21\xa2\x09\x81\x23", + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb", + "\xaa\xbb\xcc\xdd\xee\xff", "", "", + "\x2a\x7d\x77\xfa\x52\x6b\x82\x50\xcb\x29\x60\x78\x92\x6b\x50\x20", 1, + 48, 0 }, + { 96, 256, 128, 75, + "\x80\xba\x31\x92\xc8\x03\xce\x96\x5e\xa3\x71\xd5\xff\x07\x3c\xf0\xf4" + "\x3b\x6a\x2a\xb5\x76\xb2\x08\x42\x6e\x11\x40\x9c\x09\xb9\xb0", + "\x4d\xa5\xbf\x8d\xfd\x58\x52\xc1\xea\x12\x37\x9d", "", "", "", + "\x47\x71\xa7\xc4\x04\xa4\x72\x96\x6c\xea\x8f\x73\xc8\xbf\xe1\x7a", 1, + 0, 0 }, + { 96, 256, 128, 76, + "\xcc\x56\xb6\x80\x55\x2e\xb7\x50\x08\xf5\x48\x4b\x4c\xb8\x03\xfa\x50" + "\x63\xeb\xd6\xea\xb9\x1f\x6a\xb6\xae\xf4\x91\x6a\x76\x62\x73", + "\x99\xe2\x3e\xc4\x89\x85\xbc\xcd\xee\xab\x60\xf1", "", "\x2a", + "\x06", + "\x63\x3c\x1e\x97\x03\xef\x74\x4f\xff\xfb\x40\xed\xf9\xd1\x43\x55", 1, + 0, 8 }, + { 96, 256, 128, 77, + "\x51\xe4\xbf\x2b\xad\x92\xb7\xaf\xf1\xa4\xbc\x05\x55\x0b\xa8\x1d\xf4" + "\xb9\x6f\xab\xf4\x1c\x12\xc7\xb0\x0e\x60\xe4\x8d\xb7\xe1\x52", + "\x4f\x07\xaf\xed\xfd\xc3\xb6\xc2\x36\x18\x23\xd3", "", + "\xbe\x33\x08\xf7\x2a\x2c\x6a\xed", + "\xcf\x33\x2a\x12\xfd\xee\x80\x0b", + "\x60\x2e\x8d\x7c\x47\x99\xd6\x2c\x14\x0c\x9b\xb8\x34\x87\x6b\x09", 1, + 0, 64 }, + { 96, 256, 128, 78, + "\x67\x11\x96\x27\xbd\x98\x8e\xda\x90\x62\x19\xe0\x8c\x0d\x0d\x77\x9a" + "\x07\xd2\x08\xce\x8a\x4f\xe0\x70\x9a\xf7\x55\xee\xec\x6d\xcb", + "\x68\xab\x7f\xdb\xf6\x19\x01\xda\xd4\x61\xd2\x3c", "", + "\x51\xf8\xc1\xf7\x31\xea\x14\xac\xdb\x21\x0a\x6d\x97\x3e\x07", + "\x43\xfc\x10\x1b\xff\x4b\x32\xbf\xad\xd3\xda\xf5\x7a\x59\x0e", + "\xec\x04\xaa\xcb\x71\x48\xa8\xb8\xbe\x44\xcb\x7e\xaf\x4e\xfa\x69", 1, + 0, 120 }, + { 96, 256, 128, 79, + "\x59\xd4\xea\xfb\x4d\xe0\xcf\xc7\xd3\xdb\x99\xa8\xf5\x4b\x15\xd7\xb3" + "\x9f\x0a\xcc\x8d\xa6\x97\x63\xb0\x19\xc1\x69\x9f\x87\x67\x4a", + "\x2f\xcb\x1b\x38\xa9\x9e\x71\xb8\x47\x40\xad\x9b", "", + "\x54\x9b\x36\x5a\xf9\x13\xf3\xb0\x81\x13\x1c\xcb\x6b\x82\x55\x88", + "\xf5\x8c\x16\x69\x01\x22\xd7\x53\x56\x90\x7f\xd9\x6b\x57\x0f\xca", + "\x28\x75\x2c\x20\x15\x30\x92\x81\x8f\xab\xa2\xa3\x34\x64\x0d\x6e", 1, + 0, 128 }, + { 96, 256, 128, 80, + "\x3b\x24\x58\xd8\x17\x6e\x16\x21\xc0\xcc\x24\xc0\xc0\xe2\x4c\x1e\x80" + "\xd7\x2f\x7e\xe9\x14\x9a\x4b\x16\x61\x76\x62\x96\x16\xd0\x11", + "\x45\xaa\xa3\xe5\xd1\x6d\x2d\x42\xdc\x03\x44\x5d", "", + "\x3f\xf1\x51\x4b\x1c\x50\x39\x15\x91\x8f\x0c\x0c\x31\x09\x4a\x6e" + "\x1f", + "\x73\xa6\xb6\xf4\x5f\x6c\xcc\x51\x31\xe0\x7f\x2c\xaa\x1f\x2e\x2f" + "\x56", + "\x2d\x73\x79\xec\x1d\xb5\x95\x2d\x4e\x95\xd3\x0c\x34\x0b\x1b\x1d", 1, + 0, 136 }, + { 96, 256, 128, 81, + "\x02\x12\xa8\xde\x50\x07\xed\x87\xb3\x3f\x1a\x70\x90\xb6\x11\x4f\x9e" + "\x08\xce\xfd\x96\x07\xf2\xc2\x76\xbd\xcf\xdb\xc5\xce\x9c\xd7", + "\xe6\xb1\xad\xf2\xfd\x58\xa8\x76\x2c\x65\xf3\x1b", "", + "\x10\xf1\xec\xf9\xc6\x05\x84\x66\x5d\x9a\xe5\xef\xe2\x79\xe7\xf7\x37" + "\x7e\xea\x69\x16\xd2\xb1\x11", + "\x08\x43\xff\xf5\x2d\x93\x4f\xc7\xa0\x71\xea\x62\xc0\xbd\x35\x1c\xe8" + "\x56\x78\xcd\xe3\xea\x2c\x9e", + "\x73\x55\xfd\xe5\x99\x00\x67\x15\x05\x38\x13\xce\x69\x62\x37\xa8", 1, + 0, 192 }, + { 96, 256, 128, 82, + "\xb2\x79\xf5\x7e\x19\xc8\xf5\x3f\x2f\x96\x3f\x5f\x25\x19\xfd\xb7\xc1" + "\x77\x9b\xe2\xca\x2b\x3a\xe8\xe1\x12\x8b\x7d\x6c\x62\x7f\xc4", + "\x98\xbc\x2c\x74\x38\xd5\xcd\x76\x65\xd7\x6f\x6e", "\xc0", + "\xfc\xc5\x15\xb2\x94\x40\x8c\x86\x45\xc9\x18\x3e\x3f\x4e\xce\xe5\x12" + "\x78\x46\xd1", + "\xeb\x55\x00\xe3\x82\x59\x52\x86\x6d\x91\x12\x53\xf8\xde\x86\x0c\x00" + "\x83\x1c\x81", + "\xec\xb6\x60\xe1\xfb\x05\x41\xec\x41\xe8\xd6\x8a\x64\x14\x1b\x3a", 1, + 8, 160 }, + { 96, 256, 128, 83, + "\xcd\xcc\xfe\x3f\x46\xd7\x82\xef\x47\xdf\x4e\x72\xf0\xc0\x2d\x9c\x7f" + "\x77\x4d\xef\x97\x0d\x23\x48\x6f\x11\xa5\x7f\x54\x24\x7f\x17", + "\x37\x61\x87\x89\x46\x05\xa8\xd4\x5e\x30\xde\x51", + "\x95\x68\x46\xa2\x09\xe0\x87\xed", + "\xe2\x8e\x0e\x9f\x9d\x22\x46\x3a\xc0\xe4\x26\x39\xb5\x30\xf4\x21\x02" + "\xfd\xed\x75", + "\xfe\xca\x44\x95\x24\x47\x01\x5b\x5d\xf1\xf4\x56\xdf\x8c\xa4\xbb\x4e" + "\xee\x2c\xe2", + "\x08\x2e\x91\x92\x4d\xee\xb7\x78\x80\xe1\xb1\xc8\x4f\x9b\x8d\x30", 1, + 64, 160 }, + { 96, 256, 128, 84, + "\xf3\x23\x64\xb1\xd3\x39\xd8\x2e\x4f\x13\x2d\x8f\x4a\x0e\xc1\xff\x7e" + "\x74\x65\x17\xfa\x07\xef\x1a\x7f\x42\x2f\x4e\x25\xa4\x81\x94", + "\x5a\x86\xa5\x0a\x0e\x8a\x17\x9c\x73\x4b\x99\x6d", + "\xab\x2a\xc7\xc4\x4c\x60\xbd\xf8\x22\x8c\x78\x84\xad\xb2\x01\x84", + "\x43\x89\x1b\xcc\xb5\x22\xb1\xe7\x2a\x6b\x53\xcf\x31\xc0\x74\xe9\xd6" + "\xc2\xdf\x8e", + "\x43\xdd\xa8\x32\xe9\x42\xe2\x86\xda\x31\x4d\xaa\x99\xbe\xf5\x07\x1d" + "\x9d\x2c\x78", + "\xc3\x92\x25\x83\x47\x6c\xed\x57\x54\x04\xdd\xb8\x5d\xd8\xcd\x44", 1, + 128, 160 }, + { 96, 256, 128, 85, + "\xff\x00\x89\xee\x87\x0a\x4a\x39\xf6\x45\xb0\xa5\xda\x77\x4f\x7a\x59" + "\x11\xe9\x69\x6f\xc9\xca\xd6\x46\x45\x2c\x2a\xa8\x59\x5a\x12", + "\xbc\x2a\x77\x57\xd0\xce\x2d\x8b\x1f\x14\xcc\xd9", + "\x97\x2a\xb4\xe0\x63\x90\xca\xae\x8f\x99\xdd\x6e\x21\x87\xbe\x6c\x7f" + "\xf2\xc0\x8a\x24\xbe\x16\xef", + "\x74\x8b\x28\x03\x16\x21\xd9\x5e\xe6\x18\x12\xb4\xb4\xf4\x7d\x04\xc6" + "\xfc\x2f\xf3", + "\xa9\x29\xee\x7e\x67\xc7\xa2\xf9\x1b\xbc\xec\x63\x89\xa3\xca\xf4\x3a" + "\xb4\x93\x05", + "\xeb\xec\x67\x74\xb9\x55\xe7\x89\x59\x1c\x82\x2d\xab\x73\x9e\x12", 1, + 192, 160 }, + { 96, 256, 128, 86, + "\x5b\x1d\x10\x35\xc0\xb1\x7e\xe0\xb0\x44\x47\x67\xf8\x0a\x25\xb8\xc1" + "\xb7\x41\xf4\xb5\x0a\x4d\x30\x52\x22\x6b\xaa\x1c\x6f\xb7\x01", + "\xd6\x10\x40\xa3\x13\xed\x49\x28\x23\xcc\x06\x5b", "", + "\xd0\x96\x80\x31\x81\xbe\xef\x9e\x00\x8f\xf8\x5d\x5d\xdc\x38\xdd\xac" + "\xf0\xf0\x9e\xe5\xf7\xe0\x7f\x1e\x40\x79\xcb\x64\xd0\xdc\x8f\x5e\x67" + "\x11\xcd\x49\x21\xa7\x88\x7d\xe7\x6e\x26\x78\xfd\xc6\x76\x18\xf1\x18" + "\x55\x86\xbf\xea\x9d\x4c\x68\x5d\x50\xe4\xbb\x9a\x82", + "\xc7\xd1\x91\xb6\x01\xf8\x6c\x28\xb6\xa1\xbd\xef\x6a\x57\xb4\xf6\xee" + "\x3a\xe4\x17\xbc\x12\x5c\x38\x1c\xdf\x1c\x4d\xac\x18\x4e\xd1\xd8\x4f" + "\x11\x96\x20\x6d\x62\xca\xd1\x12\xb0\x38\x84\x57\x20\xe0\x2c\x06\x11" + "\x79\xa8\x83\x6f\x02\xb9\x3f\xa7\x00\x83\x79\xa6\xbf", + "\xf1\x56\x12\xf6\xc4\x0f\x2e\x0d\xb6\xdc\x76\xfc\x48\x22\xfc\xfe", 1, + 0, 512 }, + { 96, 256, 128, 87, + "\xd7\xad\xdd\x38\x89\xfa\xdf\x8c\x89\x3e\xee\x14\xba\x2b\x7e\xa5\xbf" + "\x56\xb4\x49\x90\x48\x69\x61\x5b\xd0\x5d\x5f\x11\x4c\xf3\x77", + "\x8a\x3a\xd2\x6b\x28\xcd\x13\xba\x65\x04\xe2\x60", "", + "\xc8\x77\xa7\x6b\xf5\x95\x56\x07\x72\x16\x7c\x6e\x3b\xcc\x70\x53\x05" + "\xdb\x9c\x6f\xcb\xeb\x90\xf4\xfe\xa8\x51\x16\x03\x8b\xc5\x3c\x3f\xa5" + "\xb4\xb4\xea\x0d\xe5\xcc\x53\x4f\xbe\x1c\xf9\xae\x44\x82\x4c\x6c\x2c" + "\x0a\x5c\x88\x5b\xd8\xc3\xcd\xc9\x06\xf1\x26\x75\x73\x7e\x43\x4b\x98" + "\x3e\x1e\x23\x1a\x52\xa2\x75\xdb\x5f\xb1\xa0\xca\xc6\xa0\x7b\x3b\x7d" + "\xcb\x19\x48\x2a\x5d\x3b\x06\xa9\x31\x7a\x54\x82\x6c\xea\x6b\x36\xfc" + "\xe4\x52\xfa\x9b\x54\x75\xe2\xaa\xf2\x54\x99\x49\x9d\x8a\x89\x32\xa1" + "\x9e\xb9\x87\xc9\x03\xbd\x85\x02\xfe", + "\x53\xcc\x8c\x92\x0a\x85\xd1\xac\xcb\x88\x63\x6d\x08\xbb\xe4\x86\x9b" + "\xfd\xd9\x6f\x43\x7b\x2e\xc9\x44\x51\x21\x73\xa9\xc0\xfe\x7a\x47\xf8" + "\x43\x41\x33\x98\x9b\xa7\x7d\xda\x56\x1b\x7e\x37\x01\xb9\xa8\x3c\x3b" + "\xa7\x66\x0c\x66\x6b\xa5\x9f\xef\x96\x59\x8e\xb6\x21\x54\x4c\x63\x80" + "\x6d\x50\x9a\xc4\x76\x97\x41\x2f\x95\x64\xeb\x0a\x2e\x1f\x72\xf6\x59" + "\x9f\x56\x66\xaf\x34\xcf\xfc\xa0\x65\x73\xff\xb4\xf4\x7b\x02\xf5\x9f" + "\x21\xc6\x43\x63\xda\xec\xb9\x77\xb4\x41\x5f\x19\xfd\xda\x3c\x9a\xae" + "\x50\x66\xa5\x7b\x66\x9f\xfa\xa2\x57", + "\x5e\x63\x37\x4b\x51\x9e\x6c\x36\x08\x32\x19\x43\xd7\x90\xcf\x9a", 1, + 0, 1024 }, + { 96, 256, 128, 88, + "\x31\x7b\xa3\x31\x30\x7f\x3a\x3d\x3d\x82\xee\x1f\xda\xb7\x0f\x62\xa1" + "\x55\xaf\x14\xda\xf6\x31\x30\x7a\x61\xb1\x87\xd4\x13\xe5\x33", + "\xa6\x68\x7c\xf5\x08\x35\x6b\x17\x46\x25\xde\xaa", "", + "\x32\xc1\xd0\x91\x07\xc5\x99\xd3\xcc\xe4\xe7\x82\x17\x9c\x96\x6c\x6e" + "\xf9\x63\x68\x9d\x45\x35\x1d\xbe\x0f\x6f\x88\x1d\xb2\x73\xe5\x4d\xb7" + "\x6f\xc4\x8f\xdc\x5d\x30\xf0\x89\xda\x83\x83\x01\xa5\xf9\x24\xbb\xa3" + "\xc0\x44\xe1\x9b\x3e\xd5\xaa\x6b\xe8\x71\x18\x55\x40\x04\xca\x30\xe0" + "\x32\x43\x37\xd9\x87\x83\x94\x12\xbf\x8f\x8b\xbd\xd5\x37\x20\x5d\x4b" + "\x0e\x21\x20\xe9\x65\x37\x32\x35\xd6\xcb\xd2\xfb\x37\x76\xba\x0a\x38" + "\x4e\xc1\xd9\xb7\xc6\x31\xa0\x37\x9f\xf9\x97\xc3\xf9\x74\xa6\xf7\xbb" + "\xf4\xfd\x23\x01\x62\x11\xf5\xfc\x10\xac\xad\xb5\xe4\x00\xd2\xff\x0f" + "\xdf\xd1\x93\xf5\xc6\xfc\x6d\x4f\x72\x71\xdf\xd1\x34\x9e\xd8\x0f\xbe" + "\xda\xeb\xb1\x55\xb9\xb0\x2f\xb3\x07\x44\x95\xd5\x5f\x9a\x24\x55\xf5" + "\x9b\xf6\xf1\x13\x19\x1a\x02\x9c\x6b\x0b\xa7\x5d\x97\xcd\xc0\xc8\x4f" + "\x13\x18\x36\x33\x7f\x29\xf9\xd9\x6c\xa4\x48\xee\xc0\xcc\x46\xd1\xca" + "\x8b\x37\x35\x66\x19\x79\xd8\x33\x02\xfe\xc0\x8f\xff\xcf\x5e\x58\xf1" + "\x2b\x1e\x70\x50\x65\x7b\x1b\x97\xc6\x4a\x4e\x07\xe3\x17\xf5\x54\xf8" + "\x31\x0b\x6c\xcb\x49\xf3\x6d\x48\xc5\x78\x16\xd2\x49\x52\xaa\xda\x71" + "\x1d\x4f", + "\xd7\xee\xbc\x95\x87\xaa\x21\x13\x6f\xa3\x8b\x41\xcf\x0e\x2d\xb0\x3a" + "\x7e\xa2\xba\x9e\xad\xdf\x83\xd3\x3f\x78\x10\x93\x61\x7b\xf5\x0f\x49" + "\xb2\xbf\xe2\xf7\x17\x3b\x11\x39\x12\xe2\xe1\x77\x5f\x40\xed\xfe\xd8" + "\xb3\xb0\x09\x9b\x9e\x1c\x22\x0d\xd1\x03\xbe\x61\x66\x21\x0b\x01\x02" + "\x9f\xeb\x24\xed\x9e\x20\x61\x4e\xdd\xc3\xce\xbe\x41\xb0\x07\x9a\x9a" + "\x8c\x11\x7b\x59\x6c\x90\x28\x8e\xff\xd3\x79\x6f\xbd\x0c\x7e\x8e\xab" + "\x00\x60\x9a\x64\xbe\x3a\xd9\x59\x7c\xdb\xf3\xa8\x18\xc2\x60\xcd\x93" + "\x8b\xdf\x23\x2e\x40\x59\xae\x35\xa2\x57\x1a\x83\x88\x87\xfc\x19\x69" + "\x12\x17\x94\x86\xe0\x46\xa6\x22\x27\xa4\xca\xdd\xce\x38\xcb\xbc\x37" + "\x58\x7b\xb9\x43\x9e\xc6\x37\x60\x2b\x68\x18\xc5\xcb\xe3\xc7\x1a\x7c" + "\x41\x43\x96\x05\x33\xdc\x74\x17\x4b\xd3\x15\xc8\xdb\x22\x7b\x69\xb5" + "\x5b\xb7\xfc\x30\xba\x1d\x52\x13\xa7\x52\xec\x33\x92\x50\x43\xce\xfb" + "\xc1\xa6\x29\x43\xee\x5f\x34\xd5\xda\x01\x79\x9e\x69\x09\x4d\x73\x2a" + "\xef\x52\xf8\xe0\x36\x98\x0d\x00\x70\xe2\x2e\x17\x3c\x67\xc4\xbb\xcc" + "\xa6\x1c\xc1\xee\xdb\xd6\x01\x65\x16\xc5\x92\x14\x48\x19\xdf\x13\x20" + "\x4d\xee", + "\xbf\x05\x40\xd3\x4b\x20\xf7\x61\x10\x1b\xc6\x08\xb0\x24\x58\xf2", 1, + 0, 2056 }, + { 96, 256, 128, 89, + "\x2c\xe6\xb4\xc1\x5f\x85\xfb\x2d\xa5\xcc\x6c\x26\x94\x91\xee\xf2\x81" + "\x98\x03\x09\x18\x12\x49\xeb\xf2\x83\x2b\xd6\xd0\x73\x2d\x0b", + "\xc0\x64\xfa\xe9\x17\x3b\x17\x3f\xd6\xf1\x1f\x34", + "\x49\x8d\x30\x75\xb0\x9f\xed\x99\x82\x80\x58\x3d\x61\xbb\x36\xb6\xce" + "\x41\xf1\x30\x06\x3b\x80\x82\x4d\x15\x86\xe1\x43\xd3\x49\xb1\x26\xb1" + "\x6a\xa1\x0f\xe5\x73\x43\xed\x22\x3d\x63\x64\xee\x60\x22\x57\xfe\x31" + "\x3a\x7f\xc9\xbf\x90\x88\xf0\x27\x79\x5b\x8d\xc1\xd3", + "\xf8\xa2\x7a\x4b\xaf\x00\xdc\x05\x55\xd2\x22\xf2\xfa\x4f\xb4\x2d\xc6" + "\x66\xea\x3c", + "\xae\xd5\x8d\x8a\x25\x2f\x74\x0d\xba\x4b\xf6\xd3\x67\x73\xbd\x5b\x41" + "\x23\x4b\xba", + "\x01\xf9\x3d\x74\x56\xaa\x18\x4e\xbb\x49\xbe\xa4\x72\xb6\xd6\x5d", 1, + 512, 160 }, + { 96, 256, 128, 90, + "\x44\xc8\xd0\xcd\xb8\xf7\xe7\x36\xcf\xd9\x97\xc8\x72\xa5\xd9\xc5\xef" + "\x30\xaf\xbe\x44\xb6\x56\x66\x06\xb9\x0a\xa5\xe3\xe8\xb7\x97", + "\x6f\x39\xaf\xba\x02\x1e\x4c\x36\xeb\x92\x96\x2e", + "\x98\xd1\xca\x17\x88\xcb\xeb\x30\x0e\xa5\xc6\xb1\xee\xc9\x5e\xb2\x34" + "\x71\x77\x20\x14\x00\x91\x3d\x45\x22\x56\x22\xb6\x27\x3e\xec\x8a\x74" + "\xc3\xf1\x2c\x8d\x52\x48\xda\xbe\xe5\x86\x22\x97\x86\xff\x19\x2c\x4d" + "\xf0\xc7\x95\x47\xf7\xad\x6a\x92\xd7\x8d\x9f\x89\x52\x75\x86\x35\x78" + "\x3a\xdd\x2a\x59\x77\xd3\x86\xe0\xae\xf7\x64\x82\x21\x1d\x2c\x3a\xe9" + "\x8d\xe4\xba\xad\xb3\xf8\xb3\x5b\x51\x04\x64\x75\x5d\xc7\x5c\xeb\x2b" + "\xf2\x5b\x23\x33\x17\x52\x3f\x39\x9a\x6c\x50\x7d\xb2\x14\xf0\x85\xfa" + "\x28\x18\xf0\xd3\x70\x2b\x10\x95\x2b", + "\x2e\x6f\x40\xf9\xd3\x72\x58\x36\xac\x0c\x85\x81\x77\x93\x8f\xd6\x7b" + "\xe1\x94\x32", + "\xb4\x24\x28\xf8\x09\x4e\xf7\xe6\x5c\x9e\x8c\x45\xef\x3e\x95\xc2\x8c" + "\xe0\x7d\x72", + "\x32\xb2\x5d\xfb\xb8\x96\xd0\xf9\xd7\x9c\x82\x3b\xdd\x8e\x5d\x06", 1, + 1024, 160 }, + { 96, 256, 128, 91, + "\xe4\x00\x03\xd6\xe0\x8a\xb8\x0b\x4b\xfc\x84\x00\xef\x11\x29\x45\xa9" + "\x01\xec\x64\xa1\xb6\x53\x6c\xa9\x26\x65\x09\x0d\x60\x8b\xc4", + "\x9f\x09\x5d\xaf\xe6\xf6\xe0\xfb\xaf\xbb\xe0\x2e", + "\x42\x2d\x5e\xfc\xff\xe3\x64\x90\x59\x84\x53\x3f\x0a\x57\x9d\x80\xb1" + "\x8b\xda\x7b\x29\xe6\xe4\x64\x98\xef\xfb\xa5\x3c\x35\x01\x12\xc0\xbb" + "\xb8\xdc\x4c\xe0\x3b\xb0\xc6\x9e\x1d\x0b\xaa\x19\xf0\x63\x71\x08\xaa" + "\x4a\x16\xb0\x9a\x28\x1f\x23\x28\x39\xd8\x7b\x6d\x0e\x42\xbe\x1b\xaa" + "\x7c\x67\xf1\xbe\x97\x0e\xa1\x69\xd3\x96\x0b\x9f\xe0\xa6\x1f\x11\xcd" + "\x2e\xb7\x39\x8c\x19\xe6\x41\xfe\xb4\x3f\x77\x8e\x25\x7a\x39\x70\x63" + "\xdb\x5b\x3a\x67\x07\xe9\xdb\x62\x38\x70\x54\xf9\xf9\xd4\x4f\x14\x35" + "\x83\xe6\x3e\xda\xd4\x5a\x00\x25\x1e\x51\x73\xd7\x50\x5f\x22\xa8\xbc" + "\xe2\x32\xe5\x6c\x2c\x27\x6a\x58\x03\x3a\xe3\x0d\x5d\xbf\x4e\x35\xa8" + "\x62\xe4\x2a\xf5\x73\xbe\x38\xc6\x40\x6d\x9b\x4c\x7a\xcb\xf2\x75\xfe" + "\x36\xc0\xec\xf2\xc4\x64\x28\x98\xa3\x0e\x61\x46\xfa\xc9\x92\xa1\x64" + "\x05\xf9\x83\x12\x12\x6b\x7a\x37\x22\xf5\xdf\xb7\xdd\x4e\x49\x11\xc1" + "\x42\x6b\x2e\x01\xd0\x4e\x9b\xe6\xdb\x37\x71\x10\x0f\x7d\x7d\x42\x82" + "\xe4\xea\x58\x5f\x36\x46\x24\x1e\x80\x7c\xa6\x4f\x06\xa7\xfa\x9b\x70" + "\x03\xd7\x10\xb8\x01\xd6\x6f\x51\x7d\x2d\x5e\xbd\x74\x08\x72\xde\xba" + "\x13\xd0", + "\x38\xc3\xf4\x4b\xc5\x76\x5d\xe1\xf3\xd1\xc3\x68\x4c\xd0\x9c\xdd\xef" + "\xaf\x29\x8d", + "\xd4\xa7\x9f\x72\x94\x87\x93\x59\x50\xec\x03\x2e\x69\x0a\xb8\xfe\x25" + "\xc4\x15\x8e", + "\x87\x6d\x2f\x33\x4f\x47\x96\x8b\x10\xc1\x03\x85\x9d\x43\x6d\xb8", 1, + 2056, 160 }, + { 96, 256, 128, 92, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", "", + "\x56\x10\x08\xfa\x07\xa6\x8f\x5c\x61\x28\x5c\xd0\x13\x46\x4e\xaf", + "\x23\x29\x3e\x9b\x07\xca\x7d\x1b\x0c\xae\x7c\xc4\x89\xa9\x73\xb3", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 0, 128 }, + { 96, 256, 128, 93, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", "", + "\xc6\x15\x22\x44\xce\xa1\x97\x8d\x3e\x0b\xc2\x74\xcf\x8c\x0b\x3b", + "\x7c\xb6\xfc\x7c\x6a\xbc\x00\x9e\xfe\x95\x51\xa9\x9f\x36\xa4\x21", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 0, 128 }, + { 96, 256, 128, 94, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9d\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 95, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9e\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 96, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x1c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 97, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe9\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 98, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\x76\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 99, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd9\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 100, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xda\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 101, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\x71\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 102, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbe\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 103, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\x3f\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 104, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\xa8\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 105, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x73\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 106, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xeb\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 107, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xe8\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 108, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\x6a\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdc", 0, + 0, 128 }, + { 96, 256, 128, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\xdf", 0, + 0, 128 }, + { 96, 256, 128, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\x9d", 0, + 0, 128 }, + { 96, 256, 128, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbf\x88\x72\x32\xea\xb5\x90\x5d", 0, + 0, 128 }, + { 96, 256, 128, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9d\xe8\xfe\xf6\xd8\xab\x1b\xf1\xbe\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\x76\xd8\xab\x1b\x71\xbf\x88\x72\x32\xea\xb5\x90\xdd", 0, + 0, 128 }, + { 96, 256, 128, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9c\xe8\xfe\xf6\xd8\xab\x1b\x71\xbf\x88\x72\x32\xea\xb5\x90\x5d", 0, + 0, 128 }, + { 96, 256, 128, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x63\x17\x01\x09\x27\x54\xe4\x0e\x40\x77\x8d\xcd\x15\x4a\x6f\x22", 0, + 0, 128 }, + { 96, 256, 128, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, 128 }, + { 96, 256, 128, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, 128 }, + { 96, 256, 128, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x1c\x68\x7e\x76\x58\x2b\x9b\x71\x3f\x08\xf2\xb2\x6a\x35\x10\x5d", 0, + 0, 128 }, + { 96, 256, 128, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\xb2\x06\x14\x57\xc0\x75\x9f\xc1\x74\x9f\x17\x4e\xe1\xcc\xad\xfa", + "\x9d\xe9\xff\xf7\xd9\xaa\x1a\xf0\xbe\x89\x73\x33\xeb\xb4\x91\xdc", 0, + 0, 128 }, + { 128, 192, 128, 121, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x02\x9e\x0e\x77\x7d\xb0\x92\xb1\x25\x35\xd0\x43\x01\x2f\x09\xba", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xf8\x3c\xee\x46\x73\x36\xe1\xa0\x9b\x75\xf2\x4e\x9b\x43\x85\xc9\x9c" + "\x13\xe6\xaf\x72\x22\x56\xa6\x61\x29\xec\xe9\x61\xfe\x80\x3b\x16\x7b" + "\xad\x20\x6f\x50\x17\xfb", + "\x09\x33\x8a\x42\xf0\xac\xc1\x4f\x97\xc0\x64\xf5\x2f\x5f\x16\x88", 1, + 0, 320 }, + { 128, 192, 128, 122, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xf1\xbe\x3b\x06\xb7\xfe\xac\x07\xe7\xea\xb6\x29\xf5\x56\x04\x7b", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x0b\x32\xb6\x48\xa2\xc2\x8e\x9e\xdd\x7c\xee\x08\xee\xeb\x90\x00\x34" + "\xca\xe7\x21\x5e\x5a\xb1\xe2\x01\xbd\x2e\xed\x10\x32\xc5\xa9\x78\x66" + "\xba\x58\x2a\x34\x58\xa4", + "\x90\xbe\x36\x06\xde\x58\xbd\x77\x8f\xa5\xbe\xff\x4a\x41\x02\xbd", 1, + 0, 320 }, + { 128, 192, 128, 123, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xde\x9e\xb6\x3b\x1d\xae\xd3\x21\xa1\x1b\x75\x47\xcc\x9e\x22\x3c", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x57\x5e\x2e\xce\xc2\xb3\xc7\x2d\x4e\x80\x83\x0d\x0d\x85\x9a\xd9\xe4" + "\x2c\x29\xc4\xa6\x8d\x8d\x9d\x8d\x23\x43\x4d\xe2\xcd\x07\x73\x3b\xe4" + "\x9d\x62\xac\x1a\xe0\x85", + "\x6e\x4d\x63\x96\x12\x5a\x10\xdf\x54\x43\xbd\x0c\xbc\x85\x66\xd1", 1, + 0, 320 }, + { 128, 192, 128, 124, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x40\xbb\x0a\xbe\xbc\x48\x3f\xf6\xd5\x67\x12\x41\xff\x5d\x66\xc6", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x2a\x81\x88\x88\xd1\xf0\x9f\x32\xaa\x7b\xee\xdd\x28\x69\xb4\x46\x57" + "\x5e\x2e\xce\xc2\xb3\xc7\x2d\x4e\x80\x83\x0d\x0d\x85\x9a\xd9\xe4\x2c" + "\x29\xc4\xa6\x8d\x8d\x9d", + "\xdc\x48\x1f\x17\x25\x45\x26\x8e\xff\x63\xab\x04\x90\x40\x3d\xc3", 1, + 0, 320 }, + { 128, 192, 128, 125, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x20\xd5\xcf\x30\x5e\x63\x0a\x8f\x49\xe3\xbb\x4b\xab\x18\xab\xc9", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x96\xd3\x6b\x79\x5f\x8e\x7e\xdf\x6a\x8e\x0d\xbc\xd2\x0d\x6c\x07\x2a" + "\x81\x88\x88\xd1\xf0\x9f\x32\xaa\x7b\xee\xdd\x28\x69\xb4\x46\x57\x5e" + "\x2e\xce\xc2\xb3\xc7\x2d", + "\x8a\x3a\x22\xbf\x25\x92\x95\x8b\x93\x02\x92\xaa\x47\xf5\x90\xe8", 1, + 0, 320 }, + { 128, 192, 128, 126, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x25\x53\x58\xa7\x1a\x0e\x57\x31\xf6\xdd\x6c\xe2\x8e\x15\x8a\xe6", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xcf\xce\x3d\x92\x0f\x0e\x01\xf0\xbb\x49\xa7\x51\x95\x5b\x23\x6d\x1b" + "\x88\x7b\xae\xfd\x25\xc4\x7f\x41\x30\x3c\x46\xd5\xc7\xbf\x9c\xa4\xc2" + "\xc4\x5a\x8f\x1e\x66\x56", + "\x2d\xb9\xdc\x1b\x7f\xd3\x15\xdf\x1c\x95\x43\x24\x32\xfc\xf4\x74", 1, + 0, 320 }, + { 128, 192, 128, 127, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xbb\x76\xe4\x22\xbb\xe8\xbb\xe6\x82\xa1\x0b\xe4\xbd\xd6\xce\x1c", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x69\xa2\x41\x69\x79\x2e\x9a\x07\xf6\xe6\xf4\x73\x6f\xa9\x72\xdc\xcf" + "\xce\x3d\x92\x0f\x0e\x01\xf0\xbb\x49\xa7\x51\x95\x5b\x23\x6d\x1b\x88" + "\x7b\xae\xfd\x25\xc4\x7f", + "\x82\xad\x96\x7f\x7a\xc1\x90\x84\x35\x4f\x69\xa7\x51\x44\x3f\xb2", 1, + 0, 320 }, + { 128, 192, 128, 128, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xdb\x18\x21\xac\x59\xc3\x8e\x9f\x1e\x25\xa2\xee\xe9\x93\x03\x13", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x4e\x44\x17\xa8\x3b\xea\xc1\xeb\x7e\x24\x45\x6a\x05\xf6\xba\x55\x69" + "\xa2\x41\x69\x79\x2e\x9a\x07\xf6\xe6\xf4\x73\x6f\xa9\x72\xdc\xcf\xce" + "\x3d\x92\x0f\x0e\x01\xf0", + "\x47\x2d\x5d\xd5\x82\xdc\x05\xef\x5f\xc4\x96\xb6\x12\x02\x3c\xb2", 1, + 0, 320 }, + { 128, 192, 128, 129, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xf7\xa0\x2e\xcc\xa0\x30\x64\xb2\xef\x3c\xce\x9f\xea\xb7\x9f\x07", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x6f\x8e\x17\x4e\xfc\xa3\x09\x72\x99\xf7\x84\xef\xd4\xca\xff\x0b\xf1" + "\x68\xc3\xe5\x16\x5b\x9a\xd3\xd2\x00\x62\x00\x98\x48\x04\x4e\xef\x8f" + "\x31\xf7\xd2\xfe\xad\x05", + "\xca\xff\x72\x38\x26\xdf\x15\x09\x34\xae\xe3\x20\x1b\xa1\x75\xe7", 1, + 0, 320 }, + { 128, 192, 128, 130, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x69\x85\x92\x49\x01\xd6\x88\x65\x9b\x40\xa9\x99\xd9\x74\xdb\xfd", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xaf\x19\x30\x90\xce\x3d\x43\xa3\x88\xa1\xd2\x94\xa0\x96\x16\x90\x6f" + "\x8e\x17\x4e\xfc\xa3\x09\x72\x99\xf7\x84\xef\xd4\xca\xff\x0b\xf1\x68" + "\xc3\xe5\x16\x5b\x9a\xd3", + "\x3b\x08\x95\x8b\xe1\x28\x6c\x2b\x4a\xcb\xa0\x2b\x36\x74\xad\xb2", 1, + 0, 320 }, + { 128, 192, 128, 131, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x3f\x11\x88\x54\x6c\x65\xed\x0f\xc5\x5e\x75\x03\x2c\x68\xee\x44", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x5d\xec\xcf\x83\x8b\x2c\xf5\xf8\x69\xc9\x0d\x2a\x61\x11\x60\xb1\xe5" + "\x78\xab\x81\x21\xb9\x37\x35\xcb\xa4\xa1\x93\x06\x47\xb8\xc4\xc8\x4b" + "\xf7\x76\x33\x3e\xe4\x5a", + "\xc1\x4d\x52\x20\x8f\x0f\x51\xb8\x16\xa4\x89\x71\xea\xf8\xff\x7e", 1, + 0, 320 }, + { 128, 192, 128, 132, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xa1\x34\x34\xd1\xcd\x83\x01\xd8\xb1\x22\x12\x05\x1f\xab\xaa\xbe", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xd2\xca\xe1\x68\x4a\xa4\x07\xa1\x3a\x2e\x2d\xa5\x35\x7e\x29\xf5\x5d" + "\xec\xcf\x83\x8b\x2c\xf5\xf8\x69\xc9\x0d\x2a\x61\x11\x60\xb1\xe5\x78" + "\xab\x81\x21\xb9\x37\x35", + "\xea\x2d\x01\x80\x99\xcd\x79\x25\xc5\x07\xce\xf0\xce\xdd\xb0\xae", 1, + 0, 320 }, + { 128, 192, 128, 133, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + "\x5c\x7d\x3f\x81\xd4\xb5\x05\x5e\xd6\xf8\xdb\x53\x61\x45\x87\xa4", + "\x54\x1b\x83\x5d\xc8\x28\xd5\x41\x07\x3f\x7d\x7d\x75\x04\xeb\xf5", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 0, 128 }, + { 128, 192, 128, 134, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "", + "\x6a\x34\x7a\xd1\x19\x0e\x72\xed\xe6\x11\x04\x4e\x74\x75\xf0\xeb", + "\xa3\xf3\x61\x54\x33\x1c\x19\x66\x24\x56\x4b\xc3\x95\xe4\x9c\x3b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 0, 128 }, + { 128, 192, 128, 135, + "\xfa\xe2\xa1\x41\x97\xc7\xd1\x14\x00\x61\xfe\x7c\x3d\x11\xd9\xf7\x7c" + "\x79\x56\x2e\x35\x93\xa9\x9b", + "\xbc\x28\x43\x39\x53\x77\x2d\x57\xbb\xd9\x33\x10\x0c\xd4\x7a\x56", + "", "", "", + "\x1b\xb9\x43\x31\xf2\x6c\xad\x24\x03\x6c\xfe\xff\x34\xb8\x9a\xaf", 1, + 0, 0 }, + { 128, 192, 128, 136, + "\xce\xe9\xab\xbc\x26\xb6\x3e\x16\x9f\x0c\xed\x62\x1f\xe2\x1d\x95\x90" + "\x4e\x75\xb8\x81\xd9\x3e\x6b", + "\x1e\x82\x59\xe0\xa4\x3e\x57\x10\x68\xf7\x01\xcd\x20\x64\xfc\x0c", + "", "\x46", "\xdc", + "\xaf\x1f\x55\x35\xb1\x25\xb3\x4f\xc4\x66\x90\x2e\xa4\x0c\xb3\xa2", 1, + 0, 8 }, + { 128, 192, 128, 137, + "\x18\x9f\x0b\xd3\x90\xba\x40\x63\x25\x86\xa4\x5c\x39\x73\x5c\x2b\x87" + "\x11\x33\x29\xc8\x00\xf3\x94", + "\xc8\x44\x42\xd6\x97\x5f\x03\x59\x73\x7d\xe0\xfa\x82\x8f\x95\x8e", + "", "\xb4\xbc\xd7\xb8\xee\xca\x30\x50\xdd\x17\x68\x2c\x6a\x91\x4e", + "\x2a\xab\x5c\x87\xdc\xb4\xa4\xda\xe4\xe9\x75\xdd\xb6\x5a\xab", + "\x6b\x03\xb7\x55\x7c\x71\x31\xe2\x35\x2e\x49\x5d\x54\xe6\x1a\xef", 1, + 0, 120 }, + { 128, 192, 128, 138, + "\xb0\x72\x4f\x15\xdf\x5b\x79\x2c\x2f\x49\xbc\x51\xdf\x0a\xc5\xaa\xd6" + "\x9b\xe0\x03\x09\x81\x61\x3c", + "\x13\xcd\x52\x6e\xc7\x7b\x58\xf6\x2d\x48\xd0\x3f\x8b\x88\xf2\xb8", + "", + "\x8d\xa3\xab\x9c\x3d\x19\x5b\x04\xdf\x45\x2a\xd2\x39\x53\xda\x4d", + "\xd1\x27\xfd\x2e\x67\xc0\x88\x7d\x90\xeb\x92\xb9\x1f\x35\x7d\x97", + "\xeb\x05\xbd\xa9\x37\xfa\xee\xd2\x7f\x88\x33\x29\x5d\x4b\xa5\x59", 1, + 0, 128 }, + { 128, 192, 128, 139, + "\x99\x87\x50\xba\x78\x48\x41\xe4\x0a\x7c\x5b\x03\x98\x57\x32\xb6\x39" + "\x7e\x54\x59\xa3\x84\x39\x54", + "\x1d\x3d\x62\xec\xcd\x8a\xc5\xe8\x96\xf2\x65\x4a\x7f\x60\x6f\xc9", + "", + "\x2f\x60\xca\x34\x94\xa9\x58\xdc\x3e\x6e\xbe\xb5\xd0\xb4\xe6\xdd\xa0" + "\xd0\xc4\x33\x1a\xb9\xc9\x57\xf6\x42\x2a\x51\x00\x87\x8e\xbf", + "\x34\x4c\x2c\xea\x17\xb0\x6c\xb3\xda\x27\x2e\x22\xa2\x2a\x3a\x71\xee" + "\x0e\xaa\x19\x59\xa7\xfa\xcf\xff\x46\x46\x60\xdd\xcc\xed\xd1", + "\xba\xb7\xfb\xf4\x99\xff\x06\xaa\xd5\xf7\x57\xb1\xc1\xa4\xfc\xc0", 1, + 0, 256 }, + { 96, 192, 128, 140, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", "", + "\x0b\x4d\xbb\xba\x89\x82\xe0\xf6\x49\xf8\xba\x85\xf3\xaa\x06\x1b", + "\x3f\x87\x5c\x9b\xd7\xd8\x51\x14\x48\x45\x94\x68\xe3\x98\xc3\xb2", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 0, 128 }, + { 96, 192, 128, 141, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", "", + "\x1a\xe9\x36\x88\xef\x7e\x26\x50\xa9\x34\x2a\xd4\x71\x8b\x27\x80", + "\x21\x0d\xab\xea\x43\x64\xc6\xd5\xb3\x42\x9e\x77\x43\x32\x29\x36", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 0, 128 }, + { 96, 192, 128, 142, + "\x50\x19\xeb\x9f\xef\x82\xe5\x75\x0b\x63\x17\x58\xf0\x21\x3e\x3e\x5f" + "\xcc\xa1\x27\x48\xb4\x0e\xb4", + "\xff\x0d\xdb\x0a\x0d\x7b\x36\xd2\x19\xda\x12\xb5", "", "", "", + "\x79\x71\x28\x4e\x6c\x9e\x6a\xac\x34\x6f\xe2\xb7\xa0\xa0\x64\xc2", 1, + 0, 0 }, + { 96, 192, 128, 143, + "\x21\x21\x8a\xf7\x90\x42\x8f\x80\x24\xd3\xe7\xe1\x42\x8c\x9f\xcf\x57" + "\x8c\x21\x66\x36\xd6\x0e\x73", + "\x34\x04\x7b\xc3\x9b\x9c\x60\x83\x84\xdf\xf5\xb8", "", "\xe3", + "\xfe", + "\x2e\x98\x2e\x24\xb8\x1c\xd1\x20\xd3\x5a\x70\xfe\x69\x35\xe6\x65", 1, + 0, 8 }, + { 96, 192, 128, 144, + "\x3a\x8b\xf5\x43\xc4\x80\x92\x56\x32\x11\x82\x45\xbc\xbf\x5d\x01\x52" + "\x2b\x98\x7a\x31\xa3\x3d\xa3", + "\x4e\xbc\x13\xcf\x46\x36\xcc\x7c\x45\xe5\x60\xa7", "", + "\x53\xfc\x72\xe7\x1b\x59\xee\xb3", + "\x99\xf2\xff\x1c\x8a\x44\xe5\xf2", + "\x68\x70\xf1\x04\xdd\xc5\x14\x47\x7b\x40\x03\x36\xfb\x01\x86\x0e", 1, + 0, 64 }, + { 96, 192, 128, 145, + "\x92\xf4\xd2\x67\x2f\xce\xec\x43\x96\x3c\xcf\xfb\x17\xe6\xea\x75\x78" + "\xb1\x14\x18\xb0\x6a\x3b\x82", + "\x6e\x7f\xf7\xf0\x79\x76\x85\xcf\xc4\x4b\x05\xff", "", + "\xc3\xec\x16\xad\xb1\x84\xaf\xfa\x8a\xe9\x73\x8b\xff\xb9\x16", + "\xaf\xe8\xef\x41\x59\x1b\xfc\xc0\x0d\xb3\xc8\x80\xce\xb1\x86", + "\x29\xff\xf7\xf2\x85\x76\x86\x45\xc9\xc8\xbf\x7a\x47\x1c\x93\x93", 1, + 0, 120 }, + { 96, 192, 128, 146, + "\xbc\xb6\xbc\x5e\xe6\x74\x3d\xf1\x39\x6a\x34\x63\x93\x27\xb2\x58\x09" + "\xec\x9c\x81\xdd\x6a\x0c\x0e", + "\xbe\x03\x26\xd2\x3b\xdc\x2c\x64\x64\x8d\x13\xf4", "", + "\x80\x47\x4a\x3a\x3b\x80\x95\x60\xee\xe2\xce\x7a\x7a\x33\xea\x07", + "\x90\x33\x9d\xca\x02\xef\x71\x7f\x16\x03\x99\x4a\xee\x6c\xf6\xd2", + "\xe3\xd3\x3e\x01\xce\x64\xf2\x71\x78\x31\x47\xde\x22\x62\x28\xbc", 1, + 0, 128 }, + { 96, 192, 128, 147, + "\x5e\x1d\x28\x21\x3e\x09\x25\x36\x52\x5b\xba\xe0\x9e\x21\x4a\xf4\xc8" + "\x91\xe2\x02\xb2\xb4\xfa\x4f", + "\xb6\xbe\x6c\xd0\x68\x12\x35\xd8\x26\xaa\x28\xea", "", + "\x53\xd5\x94\x33\xa7\xdb\x7f\x41\xb3\x1c\xcb\x6d\x4a\x2d\x78\x99" + "\x65", + "\xb9\x8e\xd6\x32\x16\x79\x94\x1a\x3e\x52\x18\x34\x29\x66\x86\xad" + "\x98", + "\x9f\x50\xc0\x3e\x05\x5e\x51\x97\x12\xc5\x82\xec\x9d\xb3\x23\x5b", 1, + 0, 136 }, + { 96, 192, 128, 148, + "\x7f\x67\x2d\x85\xe1\x51\xaa\x49\x0b\xc0\xee\xc8\xf6\x6b\x5e\x5b\xee" + "\x74\xaf\x11\x64\x2b\xe3\xff", + "\xb0\x22\x06\x70\x48\x50\x5b\x20\x94\x62\x16\xef", "", + "\xef\x64\x12\xc7\x2b\x03\xc6\x43\xfa\x02\x56\x5a\x0a\xe2\x37\x8a\x93" + "\x11\xc1\x1a\x84\x06\x5f\x80", + "\xad\xdd\x30\x36\x51\x11\x9e\x52\xf6\x17\x0d\xfc\x7a\x91\x50\x64\x25" + "\x3d\x57\x53\x29\x87\xb9\xab", + "\xfa\x04\x84\xf8\xba\xa9\x5f\x5b\x7a\x31\xc5\x6d\x1b\x34\xc5\x8b", 1, + 0, 192 }, + { 96, 192, 128, 149, + "\x96\x9f\xed\x50\x68\x54\x1d\x65\x41\x8c\x2c\x1d\xe8\xfe\x1f\x84\x5e" + "\x03\x60\x30\x49\x6e\x12\x72", + "\x81\x7f\xe5\x1c\x31\xf2\x87\x91\x41\xa3\x43\x35", "\xcb", + "\x3d\x82\x33\x19\x1a\x28\x23\xbf\x76\x7e\x99\x16\x7b\x1d\x4a\xf4\xf4" + "\x84\x84\x58", + "\x0d\x2c\x3a\x3c\x0c\xc4\xb4\x0e\x70\xed\x45\xe1\x88\xe3\x56\xa0\xe1" + "\x53\x3b\x31", + "\x92\x90\x9a\x80\xe9\x05\x40\xe1\x87\x8a\xb5\x9e\xf3\x00\x07\x2b", 1, + 8, 160 }, + { 96, 192, 128, 150, + "\xfa\x5b\x9b\x41\xf9\x3f\x8b\x68\x2c\x04\xba\x81\x6c\x3f\xec\xc2\x4e" + "\xec\x09\x5b\x04\xdd\x74\x97", + "\x62\xb9\xcf\x1e\x92\x3b\xc1\x13\x8d\x05\xd2\x05", + "\x2e\xd8\x48\x71\x53\xe2\x1b\x12", + "\x18\x15\x98\x41\x81\x3a\x69\xfc\x0f\x8f\x42\x29\xe1\x67\x8d\xa7\xc9" + "\x01\x67\x11", + "\xc7\xc1\xcb\xb8\x5c\xe2\xa0\xa3\xf3\x2c\xb9\xef\x01\xad\x45\xec\x11" + "\x18\xb6\x6d", + "\x25\x33\x17\xf9\x8b\xda\xb8\x75\x31\xec\xe2\x04\x75\xcd\x9e\xbb", 1, + 64, 160 }, + { 96, 192, 128, 151, + "\xfb\xfb\x39\x56\x62\x78\x7e\x2d\x25\xa2\xe7\x51\x0f\x81\x8e\x82\x59" + "\x36\xa3\x51\x14\xe2\x37\xc9", + "\x3f\x1a\x1e\x02\xe9\x0a\x4b\xa7\xa1\xdb\x9d\xf2", + "\x74\x31\x8d\x88\x76\x52\x82\x43\xf1\x94\x4b\x73\xeb\x77\xe9\x6e", + "\x29\x52\xa3\xd6\x41\x07\xd5\xcb\xb9\x60\x22\x39\xd0\x5a\x5c\x5c\x22" + "\x2c\xf7\x2b", + "\xec\xf5\xe4\x03\xf1\x9c\x00\x7c\x8d\xa7\xa4\x56\xca\xf0\xa6\xd7\x57" + "\x62\x82\x9b", + "\xe0\x87\x7a\x10\x0f\x9d\xd9\xd6\x79\x5f\x0e\x74\xc5\x6a\x9f\xab", 1, + 128, 160 }, + { 96, 192, 128, 152, + "\x5d\x8e\x9c\x22\x22\x31\x6c\x9e\xd5\xff\x94\x51\x3c\xc9\x57\x43\x6a" + "\xe4\x47\xa6\xe1\xa7\x3a\x29", + "\x08\x02\xae\x86\xc7\x5a\x73\xbf\x79\x56\x15\x21", + "\x5c\xa3\x54\xa4\xcb\x8e\x4f\xc9\x79\x8a\xa2\x09\xad\x4f\x73\x9d\xc7" + "\xc2\x32\xfd\xd1\xf2\x25\x84", + "\x42\xb4\x43\x9e\x1d\x21\x16\xf8\x34\xb9\x1c\x51\x6a\x26\x29\x9d\xf2" + "\x79\x95\x6b", + "\x94\xd8\x44\xd9\x8b\x94\x67\xda\xa7\xe8\xdd\xe7\xf4\x29\x00\x37\x35" + "\x4d\x7f\xb2", + "\x62\x19\x66\x38\x59\x0c\xef\x42\x9d\x6b\x1d\x1a\x59\x83\x9c\x02", 1, + 192, 160 }, + { 96, 192, 128, 153, + "\xcc\xbd\x0f\x50\x98\x25\xa5\xf3\x58\xa1\x4a\xac\x04\x4a\xe2\x82\x6b" + "\xb2\xc9\xea\xaa\xaa\x07\x7f", + "\x91\x89\xa7\x1a\xc3\x59\xb7\x3c\x8c\x08\xdf\x22", "", + "\xa1\xed\x10\x07\xb5\x2e\x36\xec\x0f\x70\x10\x9c\x68\xda\x72\xee\x7b" + "\x67\x5c\x85\x5e\x3e\x49\x56\xd2\xdc\xf9\xd1\x2f\x67\x5d\x69\x33\xf6" + "\x77\xdd\xcc\x58\xfa\xce\x85\x76\x99\xd2\xe3\xd9\x0a\xdc\xb8\xc6\xc5" + "\x7c\x9d\x88\xb5\xdf\xcf\x35\x6d\xe4\xc0\xb6\x3f\x0e", + "\xe9\x91\x5b\xc5\xae\xa6\x3c\x8b\xc0\x14\xf2\xae\x6a\x49\x86\xb0\x31" + "\x15\xff\x1f\x34\xad\x6c\x0a\xcd\x74\xff\xca\x07\xc4\x53\xec\x3f\x3c" + "\xe6\x90\x2d\x5f\xf3\x38\xc5\x88\xa3\x4a\x1c\x3b\x30\xef\x75\x3e\xc7" + "\x00\x15\x72\xcb\xfe\xaf\xe6\x90\xfd\x00\xf5\x9b\x02", + "\xfb\xf1\x9b\x6b\x90\xe2\xd9\xdf\x7e\xad\x0c\x3b\xc6\xe3\x75\xa2", 1, + 0, 512 }, + { 96, 192, 128, 154, + "\xd0\x45\xc6\xeb\x17\x3f\x44\x08\x43\xfa\xec\x3e\x93\x74\x60\x2a\x94" + "\xee\x3f\x71\x76\x31\x22\x08", + "\x98\xe9\x15\x3d\xac\xa2\x52\x2e\x31\x62\xcb\x15", "", + "\x3f\x0b\x30\xdc\x96\x3a\x82\xd1\x82\xc0\x35\xb5\xa8\x23\x06\x0f\x07" + "\xc4\x12\x37\x92\xe6\xce\xe6\xbf\x91\xfe\xa3\xc5\x2f\xa6\x6b\xb6\xa9" + "\x3e\xa6\xcc\xe9\xf4\x81\x3e\xb9\x5b\xf1\x8f\x81\x6c\x00\xad\x4f\xb5" + "\x69\x32\x82\x7a\x39\xef\xb2\xfe\x56\x80\x4e\x60\x4a\x60\x67\x74\xee" + "\x92\xad\x46\xcd\x8c\x17\x2a\x0d\x2b\xde\xa2\xfc\x99\xf6\x7c\xd8\x2c" + "\x60\x24\xc3\x15\xcf\xee\x6d\xbb\x8d\x27\xf7\x45\xc9\xd0\xce\x9b\xf5" + "\xd0\x97\x24\xf4\xbe\xd0\x03\xcf\x39\x47\x83\x48\xb3\x30\x4b\xaa\x4e" + "\xcc\x99\x74\xfc\x4f\x3f\xf9\x3f\x95", + "\x96\x63\xe6\xf9\x8b\x27\x68\x44\x8e\x6d\xd0\xdd\x78\x0e\x14\x56\x68" + "\xaf\x5b\x00\x22\x57\xe3\x53\x21\x38\x68\xc9\xcd\x9f\xd3\xa1\xe9\x42" + "\x75\x30\x32\x75\x41\x77\x5a\x09\x31\x23\x07\x6d\x34\x98\x5d\xb3\xaa" + "\x24\x8c\xd5\x5e\x53\x26\x09\xd1\xa3\x92\x74\xc4\x92\x16\xea\x20\xfb" + "\xab\x71\x9b\x9c\x7e\x31\x0b\x27\x87\x7b\x9a\x33\xd1\xb6\x9a\xb7\x47" + "\xaf\xac\x94\x4d\x1e\x97\xea\x78\x93\x67\x82\x1c\x33\x1f\x00\xb5\xd6" + "\x18\x40\x2b\xfc\x57\x88\x4d\x18\xed\xbd\x60\xc4\xdf\xe2\x18\xc0\x80" + "\x80\xb8\xe3\x47\x9f\xf8\x4b\xdf\xb5", + "\xfc\x2f\xf6\x2a\x41\xbd\xb7\x9a\xfc\x36\x98\x42\xe4\xec\xca\xbf", 1, + 0, 1024 }, + { 96, 192, 128, 155, + "\xe6\x02\x18\x8a\xbf\x6a\x91\xf3\xe2\x58\x83\x8c\xea\x6b\xef\xef\xfc" + "\xf6\x25\x7a\x50\x9c\x3e\x95", + "\x9e\x35\xd3\xef\x18\x97\xc5\xfe\x3f\x64\x72\x04", "", + "\x3b\x9a\x6e\xdc\x44\x84\x8c\x07\x23\x41\xfd\x4a\xf5\x1e\xc1\x16\xac" + "\x32\x8f\x69\xcc\x5a\x33\x54\xe4\x92\x99\xfb\x2e\x5d\x22\xfa\x00\x84" + "\xe3\x0b\x36\xec\xaf\x54\x30\x93\x97\xb2\xb4\x98\xd6\x86\x08\x7f\x34" + "\x57\x69\x8c\x36\x39\xe7\x3c\xa1\x8c\x78\xc3\xe0\x21\xd6\x73\x98\x6c" + "\xfc\x2c\xeb\x4d\x07\xe6\x69\x71\xe9\x76\xf5\x8f\x03\x36\xf8\x2c\x7f" + "\xc0\xd5\x2d\x66\x61\x0f\x26\xca\x3b\xfe\x53\xc0\xb0\x1c\xf7\xc2\x07" + "\x30\x6d\xb9\x04\xc1\xad\x30\x0a\xb9\x5c\x56\xfd\xe8\x20\xa8\xed\xd2" + "\x56\xf2\xb9\x90\x6b\x31\x2b\xf7\xaf\x5e\xf4\xa8\x06\xf6\x18\xdd\xfc" + "\xb6\x71\x79\xb0\x3f\xff\x80\xa2\x45\xc3\x8d\x8f\x4c\xff\x28\x75\xb7" + "\x1a\x0b\xf6\x91\x29\xca\xf9\x71\x21\x46\x2e\x05\x01\xec\x65\x74\xed" + "\xe9\x47\x06\xf4\xa0\x4d\x2f\xb3\x01\xd4\x15\xc2\x2e\xa1\x21\x57\xd2" + "\xe9\x19\xbc\x7a\x01\x69\xa5\xad\x5c\x7b\xb5\x76\x1a\x85\x31\xab\xbe" + "\x77\xd6\x6a\x48\x71\xb3\xf2\x7a\x71\x70\xf0\x99\x04\x4b\x9f\xdc\x50" + "\xa8\xcb\x3b\x89\x42\x52\xa5\x01\xcc\x89\x6a\xc4\x79\x3b\xdb\x47\x8b" + "\xb1\xcb\x99\xc0\x23\x41\xd7\x23\x8d\xd8\xd5\x93\xcf\xda\x02\xf7\xd5" + "\x20\xd7", + "\x16\x71\x83\x66\x16\x75\x67\x76\x25\xbe\xd2\xd5\xf5\x5f\x72\x8d\xab" + "\x80\xd7\xf0\x6f\x62\x9d\x99\xe5\x8b\x45\x06\x9f\xe9\xd7\x42\x8e\x89" + "\x61\x56\x1b\x11\x24\x5c\x70\x9a\xc9\xeb\xb5\xc5\x9a\xc2\xa8\x9d\x83" + "\x75\xd8\xa0\x1d\x84\x9c\x77\x33\xa1\xb4\x82\x52\x99\x27\xe3\xf1\xa1" + "\xa5\x3f\x63\xa4\xbe\x08\xa1\x1c\x94\x1c\x63\x4c\xd4\x03\x73\xc4\x2f" + "\xfb\x24\x49\xc6\x41\xbc\x9e\x39\xea\xfb\xcf\x9c\x0f\xba\x67\x7e\x36" + "\x49\x6f\x73\xfc\x70\xaa\x09\x72\x22\x49\x01\xab\x04\xb0\xa1\x96\xab" + "\x74\x52\x62\x02\x1b\x23\x13\xa8\x46\x41\x87\xfe\xce\xc4\x3a\xdb\x40" + "\x62\x58\xbd\xdc\xd8\xc9\xd0\x4d\xc2\xae\x29\xe6\x5d\x54\xa8\x9d\xd0" + "\xf1\x75\x2d\x6d\x95\x0d\xbf\x7d\xa4\xde\xa0\xa7\xb9\x46\x55\x79\x50" + "\x3f\xc8\xec\x44\x51\xf4\xb3\x98\x78\xac\x47\x54\xa1\xaa\xf7\xb0\xb7" + "\x3f\xee\x11\x21\x3c\xb8\xe6\x01\xfc\x60\x39\x39\x3f\x72\xe0\xe0\x79" + "\xee\x97\xec\xc6\x10\x24\x17\x57\xda\x2d\xb2\xf5\x1d\x5e\xd1\x21\x48" + "\x15\x40\xef\xf4\x72\x87\x74\x4d\xac\x43\x37\x5c\x4f\x48\xa4\x6a\xf7" + "\x01\x90\x45\x3a\x17\xc3\xc7\x8d\x73\x5b\xa1\xd1\xfc\x76\xa3\x30\xe6" + "\xcb\xed", + "\xc7\x20\x35\x31\x4f\x43\xd2\x56\xf8\xd8\x45\xeb\x69\x6b\xd9\x43", 1, + 0, 2056 }, + { 96, 192, 128, 156, + "\x55\xa4\xca\x52\x64\x43\x35\x7a\xc7\xc8\x96\xd9\xa6\x7c\xf7\xd4\x67" + "\xf6\x92\x1d\x69\x00\x2d\x3a", + "\xdb\xa2\x33\xcc\xbc\x79\x92\xe6\x4e\x82\xcf\xa3", + "\xdf\x73\x7c\xd7\x7d\x31\xeb\x90\x97\xa1\x7c\x31\xb4\xc9\x28\x89\xef" + "\x1f\x32\xb7\x46\x4e\x26\x20\xe9\x00\x71\x92\xea\x67\x5b\x9a\xd6\x91" + "\x05\x27\xff\xec\xee\x24\x52\xbe\x02\x48\xfa\xb7\x56\x08\xc7\xfd\xca" + "\x08\xe8\x65\x80\x32\x2a\xac\x1d\x6a\x11\xb9\x6e\xcf", + "\x4e\x56\xd1\xea\x53\x8c\xf4\x9c\xad\x49\x95\x9e\x88\x4e\xb5\x40\xc8" + "\x46\x55\x6c", + "\x3f\x57\xec\x1b\x41\x4f\x74\x81\x8f\xea\xd9\xf3\x5a\xa1\x67\x94\x02" + "\xc3\xe7\x50", + "\x97\xb8\x9b\x29\x14\x19\xe3\x2c\xf6\x54\xea\x63\x0a\x3a\xd0\x14", 1, + 512, 160 }, + { 96, 192, 128, 157, + "\xf3\x81\xd0\xff\xd3\x37\x3a\x1a\xa0\x2e\xdd\x1d\x7f\xa7\x48\xe9\x19" + "\x08\xfe\x53\x4b\xef\x73\xd2", + "\x10\xaa\xec\x0d\xe4\xad\x75\x37\x6b\xe9\xfd\x41", + "\x77\x39\xaa\xd7\x39\x9d\x9c\x0f\x0a\x3c\x95\xb4\x03\x88\x8f\x00\x72" + "\xd9\x4a\xcb\x76\xff\x57\x6e\x05\xf4\xa0\x63\x12\x0b\x84\xe7\x22\xb4" + "\xd5\xcd\x43\xa5\x8e\x4a\xba\xb4\x44\xcb\x8c\xed\x11\x2f\x3d\xbd\x89" + "\x93\xb8\x31\xc3\x9b\x4e\xdb\x76\xe9\x2e\xb3\x3e\xe2\x4c\x59\x22\xb5" + "\x65\x52\x68\x5f\x3b\x0f\x4c\xf2\x2e\x0e\x11\x62\x8f\x6a\x3d\x33\xef" + "\xf9\xde\xf7\xec\x52\x71\x12\xdf\xaf\xcf\x12\x28\x14\xe3\xd1\xaa\xf6" + "\x6c\x3f\x97\x05\x26\x51\x10\x88\xbf\xfe\xf8\x10\x1d\x1c\xef\x83\x32" + "\x68\xff\x80\x38\x7d\xf3\x05\x57\xf7", + "\x65\x3a\x3f\x03\x3c\x27\x75\xe0\x8f\xef\x73\xcf\x80\xf5\xe2\x69\x9f" + "\xb3\x60\xcb", + "\x55\x65\xc6\xd0\x9c\x4c\x92\x4d\x61\xc0\xef\x80\x8f\xb0\xea\x14\x4f" + "\xfb\x47\x38", + "\x12\xb7\x2e\xc1\xd9\xc3\x2f\xb2\x2c\x13\xc4\x0b\x33\x79\x6f\xa9", 1, + 1024, 160 }, + { 96, 192, 128, 158, + "\x8f\x27\xb1\xc3\xb3\xd7\x02\x3c\x76\xee\x66\xc7\x68\xa3\xe9\x2d\x49" + "\x71\xe2\x5f\x72\x9d\x87\x88", + "\x12\x44\x40\x40\xca\xed\xe6\x72\x85\xe4\x90\xd7", + "\x58\xfd\x02\xac\x23\xec\x7f\xa5\xb9\x46\x0f\x60\xbf\xc8\x5b\x4b\xeb" + "\xba\x70\x03\x9a\x8f\x83\x26\x1d\x6c\xc4\xf5\x60\x10\x7c\x10\xbc\x69" + "\x54\x8a\x5d\x61\x52\x88\x2f\xb4\x65\xfd\x59\xfb\x81\x64\xd7\xc9\x45" + "\x23\xc3\xdd\x42\x06\xd3\x30\x64\xf5\x19\x1b\xd3\x1f\x0c\x48\xfe\x03" + "\xd7\x46\x0e\x99\x5c\x93\x17\x5b\x57\xcb\x03\xf5\x87\x11\xad\xc9\x46" + "\x32\x03\x1c\x43\x05\x27\x23\x67\xb4\x28\x9c\x72\x5d\x9c\xb7\xae\x9b" + "\xa9\x96\xb3\xa0\x79\x17\x45\x08\xc1\xea\xe8\x16\x2a\x0b\xac\x44\x6c" + "\x1e\x53\xfe\x0c\x40\x2b\x69\x12\xdf\xd6\x70\x2a\xdd\xcc\xad\xa3\x0a" + "\x5c\x01\x0f\xc2\x2c\x2c\x75\xe4\x32\x26\x37\x8e\xc7\xf4\xb3\xb7\x1c" + "\xcc\x71\xf3\x2a\xb1\xad\xc8\x77\xcc\x7b\x0a\x18\x0c\x75\xd3\x85\xc0" + "\xf7\x1a\x0b\x29\x1a\x1c\xcc\xf4\xbe\x47\xe2\x72\x24\x9d\x61\xff\xbf" + "\x05\x9c\x4f\x7b\xe7\x4e\xba\x07\xd5\xe1\xbe\x3a\x74\x38\x45\x8a\x61" + "\x1f\xe5\x8c\xee\x4f\x94\x6e\x25\xde\xe0\x3e\x64\x85\x23\x55\x66\xf2" + "\x0e\xd5\x55\xbe\x32\xcd\x57\xa9\x4e\x52\x2d\x21\x68\xea\xe2\x3c\x45" + "\x87\x37\x1a\x2d\x14\x5f\x41\x8c\x59\xe7\xbb\xc4\x64\xa3\xbd\x88\xb8" + "\x91\x9b", + "\x0d\xf6\xe7\x50\x09\x2b\x9a\xc5\x76\xdd\xe6\x60\x06\xa4\xca\xb2\x11" + "\x6e\xee\x21", + "\xc6\x87\x7b\x03\x55\x2e\x97\xd9\xa1\xe6\x55\x7f\x90\xdc\x7a\xdd\xe1" + "\x5a\x2f\x43", + "\x25\x36\x27\x2b\xee\x74\x46\x82\x00\x41\x85\x4e\x10\xb4\x9a\x03", 1, + 2056, 160 }, + { 96, 192, 128, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb5\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb6\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 161, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\x34\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 162, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe5\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 163, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\xdb\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 164, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2e\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 165, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2d\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 166, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\xcc\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 167, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x79\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 168, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\xf8\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 169, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xd3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 170, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x59\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 171, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0c\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 172, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0f\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 173, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x8d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 174, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x65", 0, + 0, 128 }, + { 96, 192, 128, 175, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x66", 0, + 0, 128 }, + { 96, 192, 128, 176, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\x24", 0, + 0, 128 }, + { 96, 192, 128, 177, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x78\xf3\x58\xda\x0d\x99\xcb\xe4", 0, + 0, 128 }, + { 96, 192, 128, 178, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb5\xe4\x4c\x5b\x2f\xe9\x0e\x4c\x79\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 179, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\xdb\x2f\xe9\x0e\xcc\x78\xf3\x58\xda\x0d\x99\xcb\x64", 0, + 0, 128 }, + { 96, 192, 128, 180, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb4\xe4\x4c\x5b\x2f\xe9\x0e\xcc\x78\xf3\x58\xda\x0d\x99\xcb\xe4", 0, + 0, 128 }, + { 96, 192, 128, 181, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\x4b\x1b\xb3\xa4\xd0\x16\xf1\xb3\x87\x0c\xa7\x25\xf2\x66\x34\x9b", 0, + 0, 128 }, + { 96, 192, 128, 182, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, 128 }, + { 96, 192, 128, 183, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, 128 }, + { 96, 192, 128, 184, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\x34\x64\xcc\xdb\xaf\x69\x8e\xcc\xf8\x73\xd8\x5a\x8d\x19\x4b\xe4", 0, + 0, 128 }, + { 96, 192, 128, 185, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x50\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b", "", + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x45\x82\x56\x84\x2d\xfd\x29\x7f\x30\xbd\x2f\x8f\x15\xc9\x2d\xb0", + "\xb5\xe5\x4d\x5a\x2e\xe8\x0f\x4d\x79\xf2\x59\xdb\x0c\x98\xca\x65", 0, + 0, 128 }, + { 128, 256, 128, 186, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x5c\x2e\xa9\xb6\x95\xfc\xf6\xe2\x64\xb9\x60\x74\xd6\xbf\xa5\x72", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x28\xe1\xc5\x23\x2f\x4e\xe8\x16\x1d\xbe\x4c\x03\x63\x09\xe0\xb3\x25" + "\x4e\x92\x12\xbe\xf0\xa9\x34\x31\xce\x5e\x56\x04\xc8\xf6\xa7\x3c\x18" + "\xa3\x18\x30\x18\xb7\x70", + "\xd5\x80\x8a\x1b\xd1\x1a\x01\x12\x9b\xf3\xc6\x91\x9a\xff\x23\x39", 1, + 0, 320 }, + { 128, 256, 128, 187, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x57\xb3\xa8\x1f\x2c\x36\xb6\xb0\x65\x77\xca\x0f\xba\xb8\xfa\x8e", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xcc\xee\xbe\xb4\xfe\x4c\xd9\x0c\x51\x4e\x52\xd2\x32\x7a\x2e\xcd\x75" + "\x39\x36\x61\x00\x6c\xf2\x47\x6d\x86\x20\x14\x9a\xef\x3d\x1c\xdc\xe4" + "\x91\xff\xf3\xe7\xa7\xa3", + "\x81\x32\xe8\x65\xb6\x9d\x64\xef\x37\xdb\x26\x1f\x80\xcb\xbe\x24", 1, + 0, 320 }, + { 128, 256, 128, 188, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\xce\x20\xa7\xe8\x70\x69\x6a\x5e\x68\x53\x3c\x46\x5b\xad\x2b\xa1", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x4f\x43\x50\x56\x5d\x91\xd9\xaa\x8c\x5f\x40\x48\x55\x04\x92\xad\x6d" + "\x6f\xda\xbf\x66\xda\x5d\x1e\x2a\xf7\xbf\xe1\xa8\xaa\xda\xa0\xba\xa3" + "\xde\x38\xa4\x1d\x97\x13", + "\x15\x5d\xa6\x44\x1e\xc0\x71\xef\x2d\x8e\x6c\xff\xba\xcc\x1c\x7c", 1, + 0, 320 }, + { 128, 256, 128, 189, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x91\x8e\x3c\x19\xdb\xdf\xee\x2d\xb1\x81\x56\xc5\xb9\x3f\x3d\x75", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x83\x16\xa5\x31\x67\xb6\xde\x1a\x75\x75\x70\x06\x93\xff\xef\x27\x4f" + "\x43\x50\x56\x5d\x91\xd9\xaa\x8c\x5f\x40\x48\x55\x04\x92\xad\x6d\x6f" + "\xda\xbf\x66\xda\x5d\x1e", + "\x6c\x57\x4a\xa6\xa2\x49\x0c\xc3\xb2\xf2\xf8\xf0\xff\xbc\x56\xc4", 1, + 0, 320 }, + { 128, 256, 128, 190, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x71\x7d\x90\x0b\x27\x04\x62\xb9\xdb\xf7\xe9\x41\x9e\x89\x06\x09", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x51\x75\x92\x75\x13\xe7\x51\xeb\x30\x9f\x45\xbc\x2e\xf2\x25\xf2\x83" + "\x16\xa5\x31\x67\xb6\xde\x1a\x75\x75\x70\x06\x93\xff\xef\x27\x4f\x43" + "\x50\x56\x5d\x91\xd9\xaa", + "\x80\x82\xa7\x61\xe1\xd7\x55\x34\x4b\xf2\x96\x22\x14\x4e\x7d\x39", 1, + 0, 320 }, + { 128, 256, 128, 191, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\xec\xd5\x21\x20\xaf\x24\x0e\x9b\x4b\xf3\xb9\xd1\xee\xb4\x94\x34", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x36\xb3\xfb\xec\xd0\x91\x78\xd0\x45\x27\xfb\x37\x54\x4f\x55\x79\xd2" + "\x0d\x60\xa4\x12\x66\xf6\x85\xc4\x80\x98\xe1\xa5\x28\x04\xca\x38\x7d" + "\x90\x70\x9d\x32\x68\xdd", + "\x03\x3e\x0e\xf2\x95\x3e\xbf\xd8\x42\x57\x37\xc7\xd3\x93\xf8\x9a", 1, + 0, 320 }, + { 128, 256, 128, 192, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\xb3\x7b\xba\xd1\x04\x92\x8a\xe8\x92\x21\xd3\x52\x0c\x26\x82\xe0", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x16\x92\x9b\x77\x30\x51\xf1\x2b\x0a\xda\xc9\x5f\x65\xe2\x1a\x7f\x36" + "\xb3\xfb\xec\xd0\x91\x78\xd0\x45\x27\xfb\x37\x54\x4f\x55\x79\xd2\x0d" + "\x60\xa4\x12\x66\xf6\x85", + "\xca\x44\x8b\xb7\xe5\x2e\x89\x7e\xca\x23\x4e\xf3\x43\xd0\x57\xd0", 1, + 0, 320 }, + { 128, 256, 128, 193, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x53\x88\x16\xc3\xf8\x49\x06\x7c\xf8\x57\x6c\xd6\x2b\x90\xb9\x9c", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x6d\x3f\xae\xfa\xf6\x91\xd5\x81\x63\x84\x6f\x8d\x4b\x9f\xfd\x59\x16" + "\x92\x9b\x77\x30\x51\xf1\x2b\x0a\xda\xc9\x5f\x65\xe2\x1a\x7f\x36\xb3" + "\xfb\xec\xd0\x91\x78\xd0", + "\x84\xf4\x97\x40\xe6\x75\x7f\x63\xdd\x0d\xf7\xcb\x76\x56\xd0\xef", 1, + 0, 320 }, + { 128, 256, 128, 194, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\xd1\x0e\x63\x19\x43\xcd\x3b\xda\xba\xba\xb2\xbb\xd1\x39\x51\xc0", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xd6\x01\x96\xc2\xd1\x4f\xcf\x30\xc0\x99\x1d\x27\x21\xdd\xc5\x2d\x38" + "\x5f\x40\x7a\x16\x69\x1d\xad\xe8\x2c\x90\x23\xc8\x55\xfd\x8e\x2e\x8f" + "\xbb\x56\x21\x02\xf0\x18", + "\x87\x7e\x15\xd9\x88\x9e\x69\xa9\x9f\xcc\x6d\x72\x74\x65\xc3\x91", 1, + 0, 320 }, + { 128, 256, 128, 195, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x8e\xa0\xf8\xe8\xe8\x7b\xbf\xa9\x63\x68\xd8\x38\x33\xab\x47\x14", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x94\x8f\xbc\xec\xa1\x2a\x6e\x4f\xab\xb7\x9b\x6d\x96\x5e\x33\x6f\xd6" + "\x01\x96\xc2\xd1\x4f\xcf\x30\xc0\x99\x1d\x27\x21\xdd\xc5\x2d\x38\x5f" + "\x40\x7a\x16\x69\x1d\xad", + "\xcd\x57\x57\x62\x69\x45\x97\x6b\xa9\xf0\x26\x4b\xd6\xbe\xe8\x94", 1, + 0, 320 }, + { 128, 256, 128, 196, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x7b\x2d\xf4\xfb\xed\x1d\xe2\x72\x7e\xb2\x48\x98\xe5\xde\xab\xb9", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\xa1\xa0\x12\x06\x60\xff\x52\xe6\xb1\x70\x0b\x12\xc5\x4d\x2d\x33\xb9" + "\x4b\x00\xcd\x78\x82\xd8\x85\x7d\x84\xe6\xe1\x83\xa1\xde\xa6\xee\x85" + "\xa7\xda\x84\xfb\xc3\x5d", + "\xb0\x15\xd7\x2d\xa6\x2c\x81\xcb\x4d\x26\x72\x53\xb2\x0d\xb9\xe5", 1, + 0, 320 }, + { 128, 256, 128, 197, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x24\x83\x6f\x0a\x46\xab\x66\x01\xa7\x60\x22\x1b\x07\x4c\xbd\x6d", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00", + "\x5e\x34\x34\xb4\x5e\xdb\xf0\xd1\xf6\xe0\x2d\x11\x44\xdb\xf8\x67\xa1" + "\xa0\x12\x06\x60\xff\x52\xe6\xb1\x70\x0b\x12\xc5\x4d\x2d\x33\xb9\x4b" + "\x00\xcd\x78\x82\xd8\x85", + "\xee\x74\xcc\xb3\x0d\x64\x9e\xbf\x69\x16\xd0\x5a\x7d\xbe\x56\x96", 1, + 0, 320 }, + { 128, 256, 128, 198, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + "\x8d\x74\xf1\xc9\x72\x43\xd3\x62\x57\x7f\xf3\x76\xc3\x93\xd2\xdc", + "\x26\x5c\x42\xe2\xb9\x6e\xa1\xde\x9c\x24\xf7\x18\x2e\x33\x73\x90", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 0, 128 }, + { 128, 256, 128, 199, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff\x10" + "\x21\x32\x43\x54\x65\x76\x87\x98\xa9\xba\xcb\xdc\xed\xfe\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "", + "\x88\x4d\xf0\xe7\x6f\x3c\xe2\x27\xbf\x95\x95\xd1\x03\x82\x5a\x46", + "\x98\x8f\x47\x66\x8e\xa6\x50\xcb\xaa\x67\x14\x71\x1a\xbe\x26\x8d", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 0, 128 }, + { 128, 256, 128, 200, + "\xb4\xcd\x11\xdb\x0b\x3e\x0b\x9b\x34\xea\xfd\x9f\xe0\x27\x74\x69\x76" + "\x37\x91\x55\xe7\x61\x16\xaf\xde\x1b\x96\xd2\x12\x98\xe3\x4f", + "\x00\xc4\x9f\x4e\xbb\x07\x39\x3f\x07\xeb\xc3\x82\x5f\x7b\x08\x30", + "", "", "", + "\x30\x6f\xe8\xc9\x64\x5c\xc8\x49\x82\x3e\x33\x3a\x68\x5b\x90\xb2", 1, + 0, 0 }, + { 128, 256, 128, 201, + "\xb7\x79\x7e\xb0\xc1\xa6\x08\x9a\xd5\x45\x2d\x81\xfd\xb1\x48\x28\xc0" + "\x40\xdd\xc4\x58\x9c\x32\xb5\x65\xaa\xd8\xcb\x4d\xe3\xe4\xa0", + "\x0a\xd5\x70\xd8\x86\x39\x18\xfe\x89\x12\x4e\x09\xd1\x25\xa2\x71", + "", "\xed", "\x3f", + "\xfd\x8f\x59\x3b\x83\x31\x4e\x33\xc5\xa7\x2e\xfb\xeb\x70\x95\xe8", 1, + 0, 8 }, + { 128, 256, 128, 202, + "\x4c\x01\x0d\x95\x61\xc7\x23\x4c\x30\x8c\x01\xce\xa3\x04\x0c\x92\x5a" + "\x9f\x32\x4d\xc9\x58\xff\x90\x4a\xe3\x9b\x37\xe6\x0e\x1e\x03", + "\x2a\x55\xca\xa1\x37\xc5\xb0\xb6\x6c\xf3\x80\x9e\xb8\xf7\x30\xc4", + "", "\x2a\x09\x3c\x9e\xd7\x2b\x8f\xf4\x99\x42\x01\xe9\xf9\xe0\x10", + "\x04\x13\x41\x07\x8f\x04\x39\xe5\x0b\x43\xc9\x91\x63\x51\x17", + "\x5b\x8a\x2f\x2d\xa2\x0e\xf6\x57\xc9\x03\xda\x88\xef\x5f\x57\xbb", 1, + 0, 120 }, + { 128, 256, 128, 203, + "\xe7\xf7\xa4\x8d\xf9\x9e\xdd\x92\xb8\x1f\x50\x86\x18\xaa\x96\x52\x6b" + "\x27\x9d\xeb\xd9\xdd\xb2\x92\xd3\x85\xdd\xba\xe8\x0b\x22\x59", + "\x7e\xe3\x76\x91\x0f\x08\xf4\x97\xaa\x6c\x3a\xa7\x11\x36\x97\xfd", + "", + "\x5e\x51\xdb\xbb\x86\x1b\x5e\xc6\x07\x51\xc0\x99\x6e\x00\x52\x7f", + "\x46\x94\x78\xd4\x48\xf7\xe9\x7d\x75\x55\x41\xaa\x09\xad\x95\xb0", + "\x25\x4a\xda\x5c\xf6\x62\xd9\x0c\x5e\x11\xb2\xbd\x9c\x4d\xb4\xc4", 1, + 0, 128 }, + { 128, 256, 128, 204, + "\x4f\x84\x78\x2b\xfb\xb6\x4a\x97\x3c\x3d\xe3\xdc\xfa\x34\x30\x36\x7f" + "\xd6\x8b\xc0\xb4\xc3\xb3\x1e\x5d\x7c\x81\x41\xba\x3e\x6a\x67", + "\x5d\x1b\xde\x6f\xa0\x99\x4b\x33\xef\xd8\xf2\x3f\x53\x12\x48\xa7", + "", + "\x78\xcb\x66\x50\xa1\x90\x8a\x84\x21\x01\xea\x85\x80\x4f\xed\x00\xcc" + "\x56\xfb\xda\xfa\xfb\xa0\xef\x4d\x1c\xa6\x07\xdc\xae\x57\xb6", + "\xcb\x96\x02\x01\xfa\x5a\xd4\x1d\x41\xd1\xc2\xc8\x03\x7c\x71\xd5\x2b" + "\x72\xe7\x6b\x16\xb5\x89\xd7\x1b\x97\x66\x27\xc9\x73\x4c\x9d", + "\x8d\xfc\xe1\x64\x67\xc3\xa6\xeb\xb3\xe7\x24\x2c\x9a\x55\x19\x62", 1, + 0, 256 }, + { 120, 128, 128, 205, + "\x34\xc7\x4e\x28\x18\x29\x48\xe0\x3a\xf0\x2a\x01\xf4\x6e\xb4\xf7", + "\xb0\xa7\x31\x19\xa9\x7d\x62\x38\x06\xb4\x9d\x45\xdd\xf4\xc7", "", + "\xfe\x82\xba\x66\xcf\x2e\x26\x57\x41\xf2\xc8\x6c", + "\x2b\xc3\xef\x8e\x74\x02\xb4\x63\x1f\x48\xe9\xbe", + "\x4b\x6f\x6f\x5b\xe2\x91\xa9\x0b\x9e\x93\xa8\xa8\x2d\xdb\xc8\xd8", 1, + 0, 96 }, + { 160, 128, 128, 206, + "\x55\xcb\x7c\xac\x77\xef\xe1\x8a\x1e\xa3\xb3\x0c\x65\xf3\xf3\x46", + "\xe2\x2b\x6b\x14\x4a\xb2\x6b\x57\x81\x31\x6e\x7a\x42\xa7\x62\x02\xac" + "\x4b\x22\x78", + "", "\x2f\x3d\x11\xea\x32\xbf\x5b\xc7\x2c\xbe\x2b\x8d", + "\x4f\xe1\x3e\xf2\x9f\x11\x8f\x85\xa6\x31\x88\xf8", + "\x05\x97\x5b\x17\x53\x16\xdf\x80\x45\x88\x9f\x43\xe0\xc8\x57\xe0", 1, + 0, 96 }, + { 120, 192, 128, 207, + "\x66\xf7\x5a\xcb\xd8\xd3\xac\xf7\xaf\x47\xd1\x3e\x83\x84\xc2\x80\x9d" + "\x6b\x91\x50\x3a\x7f\x29\x4b", + "\xed\xf9\x3e\x16\x29\x4f\x15\xed\xed\x83\x80\x8f\x09\x32\x0e", "", + "\xa9\x00\xc8\x6b\x6b\x7e\x0e\x55\x63\xf8\xf8\x26", + "\x9a\xf1\xa0\x22\xc6\x1c\x43\x15\xaa\x0e\x92\x3e", + "\x20\x52\x9b\xff\x3c\x59\x22\x2e\xc3\x33\x53\xaf\x33\x7b\x1d\x40", 1, + 0, 96 }, + { 160, 192, 128, 208, + "\xef\x2e\x29\x9d\xd4\xec\xd7\xe3\xb9\xcc\x62\x78\x09\x22\xcc\x2c\x89" + "\xf7\x88\x40\x56\x4d\x12\x76", + "\x13\x0c\x14\xc8\x39\xe3\x5b\x7d\x56\xb3\x35\x0b\x19\x4b\x0d\xa3\x42" + "\xe6\xb6\x5d", + "", "\x03\xf5\x95\x79\xb1\x44\x37\x19\x95\x83\x27\x0e", + "\x07\x3a\x52\x91\xb1\x1d\xf3\x79\xf3\x1b\x4f\x16", + "\x17\x20\x59\x99\x49\x1b\xd4\xc1\xd6\xc7\xec\x3e\x56\x77\x9c\x32", 1, + 0, 96 }, + { 120, 256, 128, 209, + "\xe9\x8b\x06\x69\xa6\x45\xeb\x14\xcd\x06\xdf\x69\x68\xfc\x5f\x10\xed" + "\xc9\xf5\x4f\xee\xd2\x64\xe3\xd4\x10\xcd\xc6\x1b\x72\xef\x51", + "\x17\xca\x25\x0f\xb7\x33\x87\x75\x56\x26\x32\x23\xea\xdd\xe1", "", + "\xf3\x84\xb3\xed\x7b\x27\x46\x41\xf5\xdb\x60\xcf", + "\xfc\x21\x36\x02\xaa\x42\x3b\x87\xd7\xc2\xa8\x74", + "\x36\xb1\x5b\xab\x69\x23\xb1\x72\x18\xfe\x1c\x24\x04\x8e\x23\x91", 1, + 0, 96 }, + { 160, 256, 128, 210, + "\x84\x9b\x3e\x6b\x8c\xdd\x85\xbd\xcf\xb8\xeb\x70\x1a\xa5\x52\x2a\xe2" + "\x34\x0f\xbe\x52\x14\xe3\x89\x62\x2c\xef\x76\x97\x92\x25\xc4", + "\x0f\x9d\x6e\xd7\xee\xf3\x62\xdf\xa4\xa7\xdf\xa5\xc0\xf7\x4c\x5b\x27" + "\xbd\x4e\xbf", + "", "\x8c\x55\x64\xe5\x30\x51\xc0\xde\x27\x31\x99\xb4", + "\xc1\xd7\x62\x33\xe8\xc5\x04\x2e\x92\xbf\x8d\x32", + "\x7c\xf0\x36\xd2\x35\xd3\xb2\xdd\x34\x9a\x8c\x80\x4b\x65\x14\x4a", 1, + 0, 96 }, + { 256, 128, 128, 211, + "\x59\x27\xba\xe7\x48\xbb\x69\xd8\x1b\x5a\x72\x4e\x0a\x16\x56\x52", + "\x36\x5e\x0b\x96\x93\x2b\x13\x30\x6f\x92\xe9\xbb\x23\x84\x71\x65\xbc" + "\xbf\x5d\x35\xe4\x5a\x83\xd7\x5c\x86\xec\xca\x70\x13\x1f\x4c", + "", "\x31\x6b\xf9\x9b\xfa\xfc\x76\xf1\xbf\xc0\xb0\x3c", + "\x53\x48\xaf\x57\xfa\xfe\x24\x85\xb4\x3f\x2b\xc4", + "\x01\x9a\x96\xc5\x37\x3c\x03\x16\x26\xb6\xc0\x30\x0d\x4c\xf7\x8b", 1, + 0, 96 }, + { 512, 128, 128, 212, + "\xdb\xd3\x67\x6f\x29\x34\x09\x27\x3f\x27\xb3\x75\xe0\x37\x93\xa3", + "\x96\x7f\xa7\xc9\x90\xeb\x2b\xec\xbd\x45\x08\x35\xe2\x8e\xa3\xa9\x00" + "\x0c\x72\x16\x28\x5c\xfa\x76\x96\xe8\xc3\xda\xc3\xce\x95\x2a\x1f\xe6" + "\x38\xd7\xc8\xc7\x3e\x1d\x70\x8d\xce\x01\xb5\xa2\x0f\xcc\x9a\xa0\x11" + "\x94\x9d\x2a\x83\x5f\x77\x74\x23\xc1\x72\xfa\x3a\xa0", + "", "\x62\x5e\xfe\xdb\x8b\x7f\x1a\xa6\x22\x38\xa8\xf2", + "\xf5\x59\xb7\x0f\xe1\x14\x9c\xb3\x44\x06\xa2\xc7", + "\x94\x18\x0d\xdb\x7b\xb1\x99\x5a\xbe\x02\x19\xea\xb5\xce\x23\x2f", 1, + 0, 96 }, + { 1024, 128, 128, 213, + "\x7e\x5a\x39\xdc\xda\x7e\x06\x69\x88\xf1\x9a\xdf\x4d\xe4\xd5\x01", + "\x49\x43\x56\xc3\x45\x9d\x60\xe3\xa8\x34\x33\xc9\xbc\xf2\xc0\x45\x4a" + "\x76\x3e\x49\x6e\x4e\xc9\x9b\xfb\xe4\xbb\xb8\x3a\x4f\xda\x76\xb5\x42" + "\x21\x38\x99\xdc\xf5\x52\x1c\xd9\xbb\xbe\x5d\x11\x54\x5b\xda\x44\xa3" + "\xf4\xa6\x81\xce\x28\x43\xac\xea\x73\x0d\x83\xd3\x93\x0e\xa3\x09\x91" + "\xee\x1a\x68\xeb\xf6\xd1\xa5\xa4\x0f\x9b\x02\xa1\xaa\xb0\x91\x29\x8d" + "\xf8\xdd\x68\x9d\xc7\x61\x3b\xcb\xff\x94\xd3\x5f\x2c\xa4\x33\x77\xd8" + "\x16\x18\x56\x2b\xcf\x65\x73\x41\x1e\xc9\xbc\x97\xc5\xa6\x27\x6b\x55" + "\x40\x54\xc0\xfa\x78\x70\x73\xd0\x67", + "", "\xb0\x47\x29\xb4\xad\xba\xac\x63\xc2\xaa\xf8\xd8", + "\x52\x91\xdd\x4d\xa9\x1c\xcc\x2e\x77\x30\x6d\x83", + "\xa7\xf7\xb2\x1a\x3b\x7e\xce\x50\x9e\x92\x26\x47\xfd\x90\x5f\x06", 1, + 0, 96 }, + { 2056, 128, 128, 214, + "\xea\xc3\xf2\x8c\xd9\x37\xff\x29\xeb\x61\x58\xa3\x72\x1b\x51\x45", + "\x6f\xd2\x60\xbb\xa8\x73\x39\x53\x9c\x37\xdc\x68\xfd\xc3\x65\x6f\x63" + "\xc8\x30\x28\xcb\x8a\xdc\xb5\x31\x08\x5e\x98\xbd\x57\x0c\x6b\x73\x5d" + "\x0c\xc4\xb4\xb9\x24\x69\x60\x00\xa2\xd8\x93\x62\x1a\xe6\x4d\xcc\xe9" + "\x92\xb5\x62\xb8\x9a\x52\x85\x64\x3a\x08\xfe\xbc\xcb\xc5\x22\x43\xcb" + "\xfc\x8d\x45\x21\x2e\x04\x7b\x00\xc8\x7c\x6b\x6b\xf1\x75\xf8\xbb\x67" + "\x8e\xc5\x5c\x10\x91\x31\x5c\xbe\xcb\x8b\x85\x70\x0f\x4a\x46\x53\x62" + "\x3f\xb7\x8e\x63\xcf\xff\x7d\x62\x35\xe4\x8e\x98\x32\xc9\xf0\x71\x6d" + "\x10\x99\x2f\xc5\xb0\xad\x4e\x69\x72\xbb\xee\xb1\xad\x67\x0c\xd7\xec" + "\x8f\xac\x82\xe0\x7e\xa5\xa6\x4f\x97\x61\xa3\x97\x14\xaa\xa7\x3a\xff" + "\xd2\xcb\x19\x0a\x7a\xc2\xdf\x5e\x5d\xce\xa6\x81\x2a\xe2\xc8\x72\xc7" + "\xac\x70\x45\x3c\x5e\x7e\xc4\xd0\xb5\xb1\x8c\x6f\xf3\xbf\xb9\xae\x15" + "\xfe\xa4\x4c\xf3\x92\x61\x5b\x80\x03\x4e\xda\xe5\x96\xb8\x82\x1f\x97" + "\xfc\xa5\x8d\x16\x7f\xb4\x4a\x09\x3b\x0c\x00\x9a\x0b\xd5\x63\x13\x55" + "\xb0\xcb\x25\xd9\x3b\xa9\xb7\x9b\x00\x63\x01\xd9\x9d\xb6\x57\xe8\x01" + "\x93\x3f\xc2\x76\x4a\x0c\xe6\x50\xea\xf5\xa1\x29\x9e\xfe\x60\xcb\x53" + "\xb6\x34", + "", "\x09\x89\x12\xa3\x02\x77\x33\x77\xb9\xc2\x6a\xc3", + "\xe3\xbe\x94\x71\x53\xa2\x6a\x3a\x54\xe3\x01\x5c", + "\xfd\x04\x2b\xdd\xe2\x2f\x67\xc4\xfd\x29\x8d\x5d\xc0\x86\x76\x06", 1, + 0, 96 }, + { 256, 192, 128, 215, + "\x8f\x9e\xbc\x67\xa9\xa6\x43\x0c\x2b\x0c\xee\xaf\x98\x3e\x13\x56\x96" + "\x4b\xb9\x28\x63\x5b\x9c\xa4", + "\x36\xe4\xb3\x81\x57\x4d\x17\x1c\x77\x69\xa7\x88\xcb\xc1\x47\x22\x4f" + "\xab\xd8\xb7\x73\xf1\x6b\x8a\xe8\x4d\x8f\x26\x03\xaa\xa4\x40", + "", "\xa3\xa9\x6e\xe9\x4f\x94\xca\xa8\x1e\xbc\xd6\x6d", + "\x8c\x2a\x98\x23\xa3\xb3\xd4\x13\xbe\x69\x63\x87", + "\xfa\xaf\x01\xce\xb4\x0a\x7e\x14\x5e\x8f\xe6\x5a\xa9\xaf\x58\xc0", 1, + 0, 96 }, + { 512, 192, 128, 216, + "\xf4\xbb\xdf\xd0\x6f\x7f\xb1\x43\x48\x80\xe4\x16\x6d\x38\xd5\x6e\x02" + "\xa3\xf0\xdf\x0d\x53\x01\xce", + "\x90\x74\x3b\xd5\xd7\x94\xd5\x2a\xc8\x48\xb7\xe2\x38\x45\x45\xa2\x58" + "\x46\xac\xf1\x43\xbe\x84\xc0\xea\xd0\x43\x2f\xcf\x31\x72\x63\x1c\xf5" + "\x8d\x0c\xa7\x85\x71\xc0\x30\x53\xc1\xe1\xb8\x5e\xd7\x9c\xb5\x30\x3d" + "\x0e\x3a\x98\xff\x4f\x56\xc4\xf0\xa5\xeb\x4f\x0e\xac", + "", "\x39\xd2\xab\xe6\x69\x7f\x17\xec\x27\xf2\xa3\x9c", + "\xa6\x60\xea\x5b\xf0\x7a\x78\xfe\xa0\x12\x01\x73", + "\x74\x04\xfc\x7b\x73\x54\x69\x44\x28\x23\x6f\x20\x3c\x13\x02\x44", 1, + 0, 96 }, + { 1024, 192, 128, 217, + "\x17\x61\xc7\x77\x98\xef\x9c\xdf\xa4\x05\x53\xf3\x46\x14\xfe\x74\x02" + "\x21\x20\x87\xf0\x50\x94\x11", + "\xfb\xb3\xea\xb3\x79\xc9\xb8\x68\x9d\xc3\x0b\x07\x13\x69\x0e\x55\xd5" + "\x1c\x95\x6c\xa3\x6f\xbc\xc7\x3e\xee\xee\x16\xa4\x6d\x7c\x41\xa7\xa9" + "\x62\x6e\x68\xe2\x5d\x68\x5c\x00\x8c\x19\xd3\xb2\xb1\x79\x2b\xdc\x99" + "\xc3\x54\x41\xa6\xfc\xac\x35\xe0\xd6\x44\x6d\xd9\x14\xf5\x43\xab\xd9" + "\xec\xd6\xb0\xcb\x52\x01\xc2\x43\x02\x6c\x4f\x13\x64\x1d\x67\xc8\xd8" + "\xcd\x51\x14\xb6\xe1\x1e\xbb\xc6\xb1\xde\xe2\xa1\x8d\xb2\x15\x0a\x5a" + "\x57\x5d\xcd\x21\x64\x8e\x03\x37\xda\xdb\xcc\xd3\xde\xff\xd6\xd9\x79" + "\xe0\x3e\x6b\x9d\xdf\xee\x0a\xbd\xc2", + "", "\x35\xca\x4e\xb4\x63\xa2\x00\x01\x38\x21\x0b\x4d", + "\xf4\x00\x13\x2f\xf3\x8c\x04\xed\x74\x7d\xde\x34", + "\xca\x15\x34\xe7\xdd\x03\x36\xbb\xb3\x2a\x79\x83\x0c\x71\xa4\x47", 1, + 0, 96 }, + { 2056, 192, 128, 218, + "\xf7\x95\xec\xe7\xde\x18\x81\xfb\xc6\x84\x3e\xb7\x40\xf8\x12\xe4\x1e" + "\x3f\xc4\x9f\xf6\xc7\xb9\x40", + "\x35\x69\xfc\xa7\xc9\xd0\x6e\x2a\x03\xfe\xd1\xaa\xc2\x48\x4f\xd4\x41" + "\x6c\xa0\x7d\x55\xec\xbb\x33\x3e\xc6\x74\xf0\xea\x5c\x6e\x75\xa1\x0d" + "\xfb\x9c\x73\x8b\x69\xda\xb2\xed\xa1\x0a\xda\x72\x1a\x61\xc7\xf0\x2b" + "\x7e\x7f\x79\xe8\xa9\xe2\xdc\x36\xb3\xfd\xf6\x09\xe4\x36\x05\x4c\x82" + "\xa7\x74\xec\x61\x7d\xce\xec\x84\xa5\x77\x03\x7f\xf1\xa3\xf1\x20\xd9" + "\x81\x8d\x04\x20\x63\xac\xb3\x6c\x95\x84\xe8\x1e\xc9\x4f\x11\xf1\xee" + "\x24\x0f\x2e\x45\xe9\x44\x69\x4a\x9c\x8e\x53\x5a\xcb\xb0\x1d\x93\x95" + "\x84\x11\xcf\xf6\x8e\x3d\x32\xf8\x93\x17\x46\xa4\xa0\xce\xce\x65\xe9" + "\x3c\x51\xc7\x0b\x31\x11\x03\x4b\x68\x67\xb4\x07\xe0\x14\x7f\x97\xc5" + "\x76\xd3\xed\x8c\xec\x7e\x8e\xc2\x6e\x95\x64\x3e\x46\xe9\x7e\xa3\x59" + "\x5c\x9c\x31\x72\xb4\x85\x6f\x2d\x2b\x6d\xc8\x56\x46\x66\xdd\xac\x92" + "\xc7\x94\xff\xb2\xd4\xdc\x7f\x46\x17\x61\xf0\xe3\x26\x65\x0f\x48\xd3" + "\x27\x60\x4e\x09\x5b\xd8\x75\x40\x72\x11\x6c\x96\x36\x0d\x09\xf0\x10" + "\xac\x2f\x39\xeb\x96\xb2\x27\xf3\xd7\x38\xde\xb7\x56\xc8\x69\x94\x60" + "\xd8\x8c\xf7\x16\x17\x0a\xe1\x52\x67\xb1\x4f\x4a\x89\x16\x47\x20\xf1" + "\xc6\x02", + "", "\x22\xdb\xd8\x03\x7a\xa0\x5b\x14\xcf\x81\xdd\x23", + "\x13\xa9\x5a\x06\xc1\xbe\xd4\x84\x5a\xf9\xc7\x01", + "\x03\x37\x98\x36\xb0\xc8\x2f\x64\xa1\xbc\xcd\xcd\x76\x3a\xcb\xbc", 1, + 0, 96 }, + { 256, 256, 128, 219, + "\xee\x41\x71\x91\x7d\x23\x37\x49\x68\x12\xa2\x78\x4d\x6a\x71\x30\x0e" + "\x6b\x8c\x1a\xc3\xb1\xef\x58\xce\xe7\x7c\x22\x9a\xea\xf2\xc5", + "\xe8\x26\xa7\x93\x61\xf9\xd5\x82\xb6\x44\x50\xe3\xed\xc8\x25\x89\x48" + "\x78\x53\xd5\xb2\x2f\xea\xa0\xc8\x89\x87\x5b\xd0\xd8\x7c\xd4", + "", "\x94\xd2\xf8\x69\x7f\xac\xaa\xa1\x91\xba\x61\x7a", + "\xa2\x95\xc2\xcb\x27\xce\x23\xd2\x68\x74\xad\xe1", + "\x04\x65\x0a\x78\xbb\xb6\x1d\xb3\x37\xc9\xc3\x2a\xa3\xe7\xb6\xfa", 1, + 0, 96 }, + { 512, 256, 128, 220, + "\x13\x2c\x59\xb4\xbc\xb8\xaf\xb3\x16\x37\x73\x4a\x81\x10\x5b\xb2\xc9" + "\x87\x8f\x32\x0a\xce\x90\x76\xd5\xfd\x7c\x5d\x21\x6c\x8d\x12", + "\xec\x51\xee\x18\xcf\xb4\x68\x97\xd3\x66\x6c\x7d\xf3\x5c\x29\xca\x5d" + "\x89\x82\x41\xc4\xa3\x4f\x89\x3e\xb1\xdb\x5d\x5c\x6b\x76\xe2\x46\x17" + "\x45\x9d\x11\x53\x86\x81\x54\x43\x7a\x0e\x95\xaa\x3c\x26\xe9\x56\xb4" + "\x94\xa5\x2d\xd5\xac\x3b\x93\x31\x11\x6c\x7c\x77\x5f", + "", "\x12\xc7\xbe\x00\xfa\xcd\xa4\x95\x96\xe1\x91\x34", + "\x9c\xdc\xfc\x3a\xaa\x8d\x46\x6f\x25\x58\x8e\x4b", + "\x7e\x80\xf5\x1e\x71\x80\xf1\xcd\x3b\xa8\x43\x49\x88\x8f\xcd\x5c", 1, + 0, 96 }, + { 1024, 256, 128, 221, + "\x7b\x0b\x12\x49\x19\x01\xd6\x2d\x09\x7f\xa2\x6d\xc7\x1e\x15\xcf\xac" + "\xaf\xa3\x22\x67\x19\xe4\x71\x26\xd9\x9c\x79\xd9\x8e\xc2\x22", + "\x7d\x08\xb2\x26\xb4\xa5\xd0\x3f\x6f\x8c\xb3\xa3\xcb\x8d\x1c\xe3\x1b" + "\x05\x9d\xc5\x11\x23\x85\x27\x5e\x38\xa1\x5c\x97\xe0\xf2\x40\x22\xb2" + "\x49\xa5\xf7\x01\x9e\xa5\x77\x19\x8c\xb2\x6a\xc6\x4e\x82\xb2\xb0\x46" + "\x81\x53\x7c\x41\x98\x77\x5a\x52\x3b\x0e\x64\x94\xb8\x4f\xeb\xae\xf3" + "\x39\x9b\x35\xc2\x7b\x09\x69\xfa\x43\x57\x2b\xf5\x82\x7a\x76\x3a\xac" + "\x1a\xf6\x95\x26\xf3\x7e\x38\xac\xb5\xd3\x54\xf2\xb6\x84\x87\xf2\x75" + "\xf4\x36\x1e\xd3\x90\x73\xf7\xdd\x66\x53\xac\x17\xc0\x79\x41\x18\xa0" + "\xcf\x14\x32\x93\xac\x0b\xe6\x62\x29", + "", "\xc8\x03\x12\x59\x07\x00\xc3\xbb\xfa\xcd\x1a\x40", + "\x3f\x3c\x15\x1e\x98\x4d\x05\x94\x62\xf9\xe5\xa0", + "\xe5\x59\xf5\xf7\x55\xaa\x29\x21\x71\xcc\x35\xfb\xf9\x11\xa6\x4f", 1, + 0, 96 }, + { 2056, 256, 128, 222, + "\x3b\xc3\xbf\x39\xd0\xd5\xff\xd9\x4c\xca\x2b\x45\xc6\x78\xa2\xd0\x49" + "\x15\x1e\xd2\xba\xbc\x71\x3b\xe5\x3c\xb6\x6f\x54\xa1\x63\x37", + "\x92\xc2\xce\xe7\xe9\x13\x8b\x18\x6d\xa5\x1f\x14\x6f\xb2\x1f\xd5\xb4" + "\x91\xf1\xa1\x9e\xef\x61\xd4\xed\x14\xce\x6b\x21\xb0\x4f\xdb\x6f\xf8" + "\xeb\xb6\x0f\xdd\xc5\x59\x26\xe7\xbd\xa2\xa8\xf3\x5c\x61\x0b\xb7\x95" + "\x23\x24\x12\x73\x9d\x6c\x2d\x74\x45\x8e\xf5\xa1\xa1\xcd\xe9\xbf\x17" + "\xe4\x7e\x3b\x00\xdb\x0b\x05\x04\xd5\x6d\xc8\xb8\xd3\xde\x23\xf7\xc3" + "\xa5\xd5\x2e\x8d\x0a\xab\x1e\x64\x40\x5a\xaa\x85\x2e\xc2\xdd\x66\x7e" + "\xd9\xc1\xfd\x8d\xc1\xfd\xbb\xc8\x71\x2c\x7a\x38\xf3\x0f\xae\xab\x59" + "\x4f\x33\x89\x7b\x41\xb1\x72\x0f\x3c\x2f\x95\x4e\xd9\x1c\xa4\x50\xd8" + "\x2c\x3d\xcd\x35\x85\x8c\x60\x8a\xd4\x2f\x36\x83\x2e\x56\xb0\x48\x21" + "\xa1\x32\xf7\x2e\x0d\xa7\xb6\x2c\xbd\x39\x25\x25\x0f\x64\xfb\xb3\xf5" + "\xc4\x78\x34\x95\x89\x30\x97\xad\xc0\x9a\x32\xd7\x76\xe0\x4b\xf7\x25" + "\x58\xd3\x78\x30\xb3\x72\x34\x1f\x65\x36\xd8\xee\x9d\xf4\xa8\x2e\x40" + "\x74\xe7\x77\x4a\xb6\x91\x7a\x04\xfa\x8c\x49\x9e\xb4\xb4\x6a\x92\xde" + "\xf3\x65\xda\x8b\x5e\xb1\xe0\xb4\x38\x77\x95\x07\xd1\xf5\x27\x2a\x6e" + "\x86\x29\xa3\xf9\xc7\xbd\x48\x62\xc5\x69\x1e\xe8\xb5\x6b\xfe\x29\x2d" + "\xeb\x4e", + "", "\x81\x25\xee\x76\x37\xd7\xd0\xe0\x3b\xba\xcf\x35", + "\x54\x96\xae\x94\xc3\x32\x2e\xbf\x95\x9e\xa9\xa9", + "\x70\x71\x7c\xc0\x0f\xd1\xff\xa5\x9b\xb0\x43\x29\x22\x6a\x0c\x0a", 1, + 0, 96 }, + { 0, 128, 128, 223, + "\x8f\x3f\x52\xe3\xc7\x5c\x58\xf5\xcb\x26\x1f\x51\x8f\x4a\xd3\x0a", + "", "", "", "", + "\xcf\x71\x97\x8f\xfc\xc7\x78\xf3\xc8\x5a\xc9\xc3\x1b\x6f\xe1\x91", 0, + 0, 0 }, + { 0, 128, 128, 224, + "\x2a\x4b\xf9\x0e\x56\xb7\x0f\xdd\x86\x49\xd7\x75\xc0\x89\xde\x3b", + "", "", + "\x32\x4c\xed\x6c\xd1\x5e\xcc\x5b\x37\x41\x54\x1e\x22\xc1\x8a\xd9", + "\x00\xa2\x9f\x0a\x5e\x2e\x74\x90\x27\x9d\x1f\xaf\x8b\x88\x1c\x7b", + "\xa2\xc7\xe8\xd7\xa1\x9b\x88\x4f\x74\x2d\xfe\xc3\xe7\x6c\x75\xee", 0, + 0, 128 }, + { 0, 192, 128, 225, + "\x0b\x18\xd2\x13\x37\x03\x5c\x7b\xaa\x08\x21\x1b\x70\x2f\xa7\x80\xac" + "\x7c\x09\xbe\x8f\x9e\xd1\x1f", + "", "", "", "", + "\xca\x69\xa2\xeb\x3a\x09\x6e\xa3\x6b\x10\x15\xd5\xdf\xff\xf5\x32", 0, + 0, 0 }, + { 0, 192, 128, 226, + "\xba\x76\xd5\x94\xa6\xdf\x91\x5b\xb7\xab\x7e\x6d\x1a\x8d\x02\x4b\x27" + "\x96\x33\x6c\x1b\x83\x28\xa9", + "", "", + "\xd6\x2f\x30\x27\x42\xd6\x1d\x82\x3e\xa9\x91\xb9\x34\x30\xd5\x89", + "\x50\x9b\x06\x58\xd0\x9f\x7a\x5b\xb9\xdb\x43\xb7\x0c\x83\x87\xf7", + "\x2c\x94\x88\xd5\x3a\x0b\x2b\x53\x08\xc2\x75\x7d\xfa\xc7\x21\x9f", 0, + 0, 128 }, + { 0, 256, 128, 227, + "\x3f\x8c\xa4\x7b\x9a\x94\x05\x82\x64\x4e\x8e\xcf\x9c\x2d\x44\xe8\x13" + "\x83\x77\xa8\x37\x9c\x5c\x11\xaa\xfe\x7f\xec\x19\x85\x6c\xf1", + "", "", "", "", + "\x17\x26\xaa\x69\x5f\xba\xa2\x1a\x1d\xb8\x84\x55\xc6\x70\xa4\xb0", 0, + 0, 0 }, + { 0, 256, 128, 228, + "\x76\x60\xd1\x09\x66\xc6\x50\x39\x03\xa5\x52\xdd\xe2\xa8\x09\xed\xe9" + "\xda\x49\x0e\x5e\x5c\xc3\xe3\x49\xda\x99\x96\x71\x80\x98\x83", + "", "", + "\xc3\x14\x23\x53\x41\xde\xbf\xaf\xa1\x52\x6b\xb6\x10\x44\xa7\xf1", + "\x77\x72\xea\x35\x89\x01\xf5\x71\xd3\xd3\x5c\x19\x49\x76\x39\xd9", + "\x8f\xe0\x52\x0a\xd7\x44\xa1\x1f\x0c\xcf\xd2\x28\x45\x43\x63\xfa", 0, + 0, 128 }, + { 8, 128, 128, 229, + "\x59\xa2\x84\xf5\x0a\xed\xd8\xd3\xe2\xa9\x16\x37\xd3\x81\x55\x79", + "\x80", "", "", "", + "\xaf\x49\x8f\x70\x1d\x24\x70\x69\x5f\x6e\x7c\x83\x27\xa2\x39\x8b", 1, + 0, 0 }, + { 8, 128, 128, 230, + "\xfe\xc5\x8a\xa8\xcf\x06\xbf\xe0\x5d\xe8\x29\xf2\x7e\xc7\x76\x93", + "\x9d", "", + "\xf2\xd9\x9a\x9f\x89\x33\x78\xe0\x75\x7d\x27\xc2\xe3\xa3\x10\x1b", + "\x0a\x24\x61\x2a\x9d\x1c\xbe\x96\x7d\xbf\xe8\x04\xbf\x84\x40\xe5", + "\x96\xe6\xfd\x2c\xdc\x70\x7e\x3e\xe0\xa1\xc9\x0d\x34\xc9\xc3\x6c", 1, + 0, 128 }, + { 16, 128, 128, 231, + "\x88\xa9\x72\xcc\xe9\xea\xf5\xa7\x81\x3c\xe8\x14\x9d\x0c\x1d\x0e", + "\x0f\x2f", "", "", "", + "\x4c\xcf\x1e\xfb\x4d\xa0\x5b\x4a\xe4\x45\x2a\xea\x42\xf5\x42\x4b", 1, + 0, 0 }, + { 16, 128, 128, 232, + "\xb4\x39\x67\xee\x93\x3e\x46\x32\xbd\x65\x62\xba\x12\x01\xbf\x83", + "\x87\x60", "", + "\x5a\x6a\xd6\xdb\x70\x59\x1d\x1e\x52\x0b\x01\x22\xf0\x50\x21\xa0", + "\xba\x3e\x7f\x8b\x29\x99\x99\x5c\x7f\xc4\x00\x6c\xa4\xf4\x75\xff", + "\x98\xf4\x7a\x52\x79\xce\xbb\xca\xc2\x14\x51\x57\x10\xf6\xcd\x8a", 1, + 0, 128 }, + { 32, 128, 128, 233, + "\x4e\x9a\x97\xd3\xed\x54\xc7\xb5\x46\x10\x79\x3a\xb0\x50\x52\xe1", + "\xcc\x85\x19\x57", "", "", "", + "\xe5\x74\xb3\x55\xbd\xa2\x98\x0e\x04\x7e\x58\x4f\xeb\x16\x76\xca", 1, + 0, 0 }, + { 32, 128, 128, 234, + "\xd8\x3c\x1d\x7a\x97\xc4\x3f\x18\x24\x09\xa4\xaa\x56\x09\xc1\xb1", + "\x7b\x5f\xae\xb2", "", + "\xc8\xf0\x7b\xa1\xd6\x55\x54\xa9\xbd\x40\x39\x0c\x30\xc5\x52\x9c", + "\x1b\x84\xba\xea\x9d\xf1\xe6\x5b\xee\x7b\x49\xe4\xa8\xcd\xa1\xec", + "\x5c\x0b\xb7\x9d\x82\x40\x04\x1e\xdc\xe0\xf9\x4b\xd4\xbb\x38\x4f", 1, + 0, 128 }, + { 48, 128, 128, 235, + "\xc6\xa7\x05\x67\x7a\xff\xb4\x9e\x27\x6d\x95\x11\xca\xa4\x61\x45", + "\x4a\xd8\x0c\x28\x54\xfb", "", "", "", + "\x1e\x2e\xd7\x2a\xf5\x90\xca\xfb\x86\x47\xd1\x85\x86\x5f\x54\x63", 1, + 0, 0 }, + { 48, 128, 128, 236, + "\xeb\xa7\x69\x9b\x56\xcc\x0a\xa2\xf6\x6a\x2a\x5b\xe9\x94\x44\x13", + "\xd1\xda\xfc\x8d\xe3\xe3", "", + "\xd0\x21\xe5\x3d\x90\x98\xa2\xdf\x3d\x6b\x90\x3c\xda\xd0\xcd\x9c", + "\x18\x29\x1a\xa8\xdc\x7b\x07\x44\x8a\xa8\xf7\x1b\xb8\xe3\x80\xbf", + "\x9c\x0e\x22\xe5\xc4\x1b\x10\x39\xff\x56\x61\xff\xae\xfa\x8e\x0f", 1, + 0, 128 }, + { 8, 192, 128, 237, + "\xc7\x0c\xe3\x8e\x84\xe5\xf5\x3e\xd4\x1c\x3f\x0d\x2c\xa4\x93\x41\x2a" + "\xd3\x2c\xb0\x4c\x6e\x2e\xfa", + "\xcb", "", "", "", + "\x08\xd9\x6e\xdb\x5e\x22\x87\x4c\xd1\x0c\xb2\x25\x6c\xa0\x4b\xc6", 1, + 0, 0 }, + { 8, 192, 128, 238, + "\x74\xc8\x16\xb8\x3d\xfd\x28\x72\x10\xa3\xe2\xc6\xda\x8d\x30\x53\xbb" + "\xfb\xd9\xb1\x56\xd3\xfd\xd8", + "\x0f", "", + "\xf2\xb7\xb2\xc9\xb3\x12\xcf\x2a\xf7\x8f\x00\x3d\xf1\x5c\x8e\x19", + "\x6c\x5e\x79\x6b\xa9\xa3\xdd\xc6\x4f\x40\x1e\x68\xd1\x35\x10\x1d", + "\x96\xa1\x32\xed\x43\x92\x4e\x98\xfe\xb8\x88\xff\x68\x2b\xda\xef", 1, + 0, 128 }, + { 16, 192, 128, 239, + "\xcb\xf4\x5b\xa4\x88\x93\x2a\xea\x1a\x10\xe5\x86\x2f\x92\xe4\xa7\xe2" + "\x77\xbd\xa9\xf3\x4a\xf6\xd0", + "\x75\xe5", "", "", "", + "\x1f\x0d\x23\x07\x0f\xcd\x74\x8e\x25\xbf\x64\x54\xf5\xc9\x13\x6e", 1, + 0, 0 }, + { 16, 192, 128, 240, + "\xe1\xc0\x44\x6f\x11\xae\x6a\xa4\xfa\x25\x4f\x9a\x84\x6f\xc6\xe1\x3e" + "\x45\xe5\x37\xe4\x7f\x20\x42", + "\x89\x89", "", + "\x3a\x2f\x5a\xd0\xeb\x21\x6e\x54\x6e\x0b\xca\xa3\x77\xb6\xcb\xc7", + "\x55\x0b\x48\xa4\x3e\x82\x1f\xd7\x6f\x49\xf0\xf1\xa8\x97\xae\xad", + "\xf6\xe0\xa9\x79\x48\x1f\x99\x57\xdd\xad\x0f\x21\xa7\x77\xa7\x3a", 1, + 0, 128 }, + { 32, 192, 128, 241, + "\x56\x75\x63\xbf\x4c\xf1\x54\x90\x22\x75\xa5\x3b\xc5\x7c\xd6\xdd\x7b" + "\x37\x0d\x27\x01\x1b\xda\xc8", + "\x68\xd7\xfc\x38", "", "", "", + "\x14\x75\x56\x3e\x32\x12\xf3\xb5\xe4\x00\x62\x56\x9a\xfd\x71\xe3", 1, + 0, 0 }, + { 32, 192, 128, 242, + "\x83\x4d\x0b\xb6\x01\x17\x08\x65\xa7\x81\x39\x42\x8a\x15\x03\x69\x5a" + "\x6a\x29\x1e\xbd\x74\x7c\xd1", + "\xbb\x9d\x2a\xa3", "", + "\x6f\x79\xe1\x8b\x4a\xcd\x5a\x03\xd3\xa5\xf7\xe1\xa8\xd0\xf1\x83", + "\x30\x91\x33\xe7\x61\x59\xfe\x8a\x41\xb2\x08\x43\x48\x65\x11\xab", + "\x03\xab\x26\x99\x3b\x70\x19\x10\xa2\xe8\xec\xcc\xd2\xba\x9e\x52", 1, + 0, 128 }, + { 48, 192, 128, 243, + "\x99\xfb\x18\xf5\xba\x43\x0b\xb9\xea\x94\x29\x68\xec\xb7\x99\xb4\x34" + "\x06\xe1\xaf\x4b\x64\x25\xa1", + "\xa9\x84\xbd\xcd\xca\xe2", "", "", "", + "\xd7\xb9\xa6\xb5\x8a\x97\x98\x29\x16\xe8\x32\x19\xfb\xf7\x1b\x1e", 1, + 0, 0 }, + { 48, 192, 128, 244, + "\xb7\x7b\x24\x2a\xa0\xd5\x1c\x92\xfd\xa0\x13\xe0\xcb\x0e\xf2\x43\x73" + "\x99\xac\xe5\xd3\xf5\x07\xe4", + "\x52\xaa\x01\xe0\xd0\xd6", "", + "\x4b\xa5\x41\xa9\x91\x47\x29\x21\x61\x53\x80\x13\x40\xab\x17\x79", + "\xe0\x82\x61\xe4\x6e\xaf\x90\xd9\x78\xea\x8f\x78\x89\xbc\xcd\x4f", + "\xc0\x52\xa5\x5d\xf3\x92\x6a\x50\x99\x0a\x53\x2e\xfe\x3d\x80\xec", 1, + 0, 128 }, + { 64, 192, 128, 245, + "\xd7\x45\x99\xb3\xd2\xdb\x81\x65\x3d\xe4\x3b\x52\xfc\x99\x4c\x50\xd0" + "\xbe\x75\x9f\xab\x87\xc3\x3a", + "\xd1\xc6\x1c\xf8\x53\x25\x31\xb5", "", "", "", + "\xf9\x4f\x20\x49\xa6\x56\x0c\x47\x0b\x3a\x7c\xa7\xbb\xc3\x1a\x3d", 1, + 0, 0 }, + { 64, 192, 128, 246, + "\x0b\x17\x71\x98\xc8\xb4\x19\xbf\x74\xac\xc3\xbc\x65\xb5\xfb\x3d\x09" + "\xa9\x15\xff\x71\xad\xd7\x54", + "\x8f\x07\x5c\xbc\xda\x98\x31\xc3", "", + "\xc4\xb1\xe0\x5c\xa3\xd5\x91\xf9\x54\x3e\x64\xde\x3f\xc6\x82\xac", + "\x3c\x6e\xc0\xab\x1b\x82\x7b\xf2\x38\xa5\x38\x4f\xb7\xe2\x12\xce", + "\x7d\xb7\x40\x22\x24\xfd\x58\x3e\x31\x2b\xc0\xe6\x1c\xf1\x13\x66", 1, + 0, 128 }, + { 8, 256, 128, 247, + "\x8f\x9a\x38\xc1\x01\x49\x66\xe4\xd9\xae\x73\x61\x39\xc5\xe7\x9b\x99" + "\x34\x58\x74\xf4\x2d\x4c\x7d\x2c\x81\xaa\x67\x97\xc4\x17\xc0", + "\xa9", "", "", "", + "\x2a\x26\x8b\xf3\xa7\x5f\xd7\xb0\x0b\xa2\x30\xb9\x04\xbb\xb0\x14", 1, + 0, 0 }, + { 8, 256, 128, 248, + "\x14\x4c\xd8\x27\x92\x29\xe8\xbb\x2d\xe9\x9d\x24\xe6\x15\x30\x66\x63" + "\x91\x3f\xe9\x17\x7f\xcd\x27\x0f\xaf\xec\x49\x3d\x43\xbc\xa1", + "\xb3", "", + "\x97\x62\x29\xf5\x53\x8f\x96\x36\x47\x6d\x69\xf0\xc3\x28\xe2\x9d", + "\x7b\xea\x30\xec\xc2\xf7\x3f\x8e\x12\x12\x63\xb3\x79\x66\x95\x4c", + "\x8b\xba\xd4\xad\xc5\x4b\x37\xa2\xb2\xf0\xf6\xe8\x61\x75\x48\xc9", 1, + 0, 128 }, + { 16, 256, 128, 249, + "\x7d\x31\x86\x1f\x9d\x35\x36\xe1\x40\x16\xa3\x21\x6b\x10\x42\xe0\xd2" + "\xf7\xd4\x61\x43\x14\x26\x8b\x6f\x83\x4e\xc7\xf3\x8b\xbb\x65", + "\xc3\x32", "", "", "", + "\x1d\x97\x8a\x69\x31\x20\xc1\x1f\x6d\x51\xa3\xed\x88\xcd\x4a\xce", 1, + 0, 0 }, + { 16, 256, 128, 250, + "\x22\xb3\x5f\xe9\x62\x3e\xe1\x1f\x8b\x60\xb6\xd2\x2d\xb3\x76\x5b\x66" + "\x6e\xd9\x72\xfa\x7c\xcd\x92\xb4\x5f\x22\xde\xee\x02\xca\xb1", + "\xda\x6c", "", + "\x53\x41\xc7\x8e\x4c\xe5\xbf\x8f\xbc\x3e\x07\x7d\x19\x90\xdd\x5d", + "\x9c\x39\xf5\xb1\x10\x36\x1e\x9a\x77\x0c\xc5\xe8\xb0\xf4\x44\xbb", + "\xb6\x3f\xf4\x3c\x12\x07\x3e\xc5\x57\x2b\x1b\xe7\x0f\x17\xe2\x31", 1, + 0, 128 }, + { 32, 256, 128, 251, + "\xc2\x24\xe0\xbb\xa3\xd7\xa9\x91\x65\xf7\x99\x6b\x67\xa0\xfc\xe3\xe1" + "\x2f\x2c\x01\x17\x9b\x19\x7b\x69\xb7\xe6\x28\xbc\xa9\x20\x96", + "\x6b\x30\x14\x5e", "", "", "", + "\xae\x6f\x7c\x9a\x29\xf0\xd8\x20\x4c\xa5\x0b\x14\xa1\xe0\xdc\xf2", 1, + 0, 0 }, + { 32, 256, 128, 252, + "\x09\x3e\xb1\x23\x43\x53\x7e\xe8\xe9\x1c\x1f\x71\x5b\x86\x26\x03\xf8" + "\xda\xf9\xd4\xe1\xd7\xd6\x72\x12\xa9\xd6\x8e\x5a\xac\x93\x58", + "\x51\x10\x60\x4c", "", + "\x33\xef\xb5\x8c\x91\xe8\xc7\x02\x71\x87\x0e\xc0\x0f\xe2\xe2\x02", + "\xf7\x3f\x72\xf9\x76\xa2\x96\xba\x3c\xa9\x4b\xc6\xeb\x08\xcd\x46", + "\xb8\x24\xc3\x3c\x13\xf2\x89\x42\x96\x59\xaa\x01\x7c\x63\x2f\x71", 1, + 0, 128 }, + { 48, 256, 128, 253, + "\x98\xe6\xf8\xab\x67\x3e\x80\x4e\x86\x5e\x32\x40\x3a\x65\x51\xbf\x80" + "\x7a\x95\x93\x43\xc6\x0d\x34\x55\x93\x60\xbc\x29\x5e\xcb\x5b", + "\xd4\xd8\x57\x51\x08\x88", "", "", "", + "\x3d\xb1\x67\x25\xfa\xfc\x82\x8d\x41\x4a\xb6\x1c\x16\xa6\xc3\x8f", 1, + 0, 0 }, + { 48, 256, 128, 254, + "\x0b\xd0\xe8\xe7\x78\x11\x66\xe1\xd8\x76\xde\xc8\xfa\xd3\x4b\xa9\x5b" + "\x03\x2a\x27\xca\xc0\x55\x15\x95\x11\x60\x91\x00\x59\x47\xb7", + "\x1b\xdc\xd4\x4b\x66\x3e", "", + "\x91\x22\x22\x63\xb1\x2c\xf5\x61\x6a\x04\x9c\xbe\x29\xab\x9b\x5b", + "\xed\x46\x3f\x4f\x43\x33\x6a\xf3\xf4\xd7\xe0\x87\x70\x20\x11\x45", + "\xc8\xfc\x39\x90\x6a\xca\x0c\x64\xe1\x4a\x43\xff\x75\x0a\xbd\x8a", 1, + 0, 128 }, + { 64, 256, 128, 255, + "\x61\xba\x69\x48\x97\x92\x5d\x1b\x41\x74\xd4\x04\x01\x46\x9c\x3e\xf2" + "\x67\xcd\xb9\xf8\x29\xed\xb1\xa1\x06\x18\xc1\x6d\x66\x60\x59", + "\x0d\x10\xc5\xc8\x4b\x88\xd6\x88", "", "", "", + "\x13\x11\xf9\xf8\x30\xd7\x29\xc1\x89\xb7\x4e\xc4\xf9\x08\x0f\xa1", 1, + 0, 0 }, + { 64, 256, 128, 256, + "\x11\x58\x84\xf6\x93\xb1\x55\x56\x3e\x9b\xfb\x3b\x07\xca\xcb\x2f\x7f" + "\x7c\xaa\x9b\xfe\x51\xf8\x9e\x23\xfe\xb5\xa9\x46\x8b\xfd\xd0", + "\x04\x10\x21\x99\xef\x21\xe1\xdf", "", + "\x82\xe3\xe6\x04\xd2\xbe\x8f\xca\xb7\x4f\x63\x8d\x1e\x70\xf2\x4c", + "\x7e\x0d\xd6\xc7\x2a\xec\x49\xf8\x9c\xc6\xa8\x00\x60\xc0\xb1\x70", + "\xaf\x68\xa3\x7c\xfe\xfe\xcc\x4a\xb9\x9b\xa5\x0a\x53\x53\xed\xca", 1, + 0, 128 }, + { 0, 0, 0, 0, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0 } +}; diff --git a/test/wycheproof/chacha20_poly1305_test.json.c b/test/wycheproof/chacha20_poly1305_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..9a30de045ba3f460e8bf11d1b03075d0968a2753 --- /dev/null +++ b/test/wycheproof/chacha20_poly1305_test.json.c @@ -0,0 +1,4058 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* CHACHA20-POLY1305, 0.8r12 */ +#include "aead_test.h" +const struct aead_test chacha20_poly1305_test_json[] = { + { 96, 256, 128, 1, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x07\x00\x00\x00\x40\x41\x42\x43\x44\x45\x46\x47", + "\x50\x51\x52\x53\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7", + "\x4c\x61\x64\x69\x65\x73\x20\x61\x6e\x64\x20\x47\x65\x6e\x74\x6c\x65" + "\x6d\x65\x6e\x20\x6f\x66\x20\x74\x68\x65\x20\x63\x6c\x61\x73\x73\x20" + "\x6f\x66\x20\x27\x39\x39\x3a\x20\x49\x66\x20\x49\x20\x63\x6f\x75\x6c" + "\x64\x20\x6f\x66\x66\x65\x72\x20\x79\x6f\x75\x20\x6f\x6e\x6c\x79\x20" + "\x6f\x6e\x65\x20\x74\x69\x70\x20\x66\x6f\x72\x20\x74\x68\x65\x20\x66" + "\x75\x74\x75\x72\x65\x2c\x20\x73\x75\x6e\x73\x63\x72\x65\x65\x6e\x20" + "\x77\x6f\x75\x6c\x64\x20\x62\x65\x20\x69\x74\x2e", + "\xd3\x1a\x8d\x34\x64\x8e\x60\xdb\x7b\x86\xaf\xbc\x53\xef\x7e\xc2\xa4" + "\xad\xed\x51\x29\x6e\x08\xfe\xa9\xe2\xb5\xa7\x36\xee\x62\xd6\x3d\xbe" + "\xa4\x5e\x8c\xa9\x67\x12\x82\xfa\xfb\x69\xda\x92\x72\x8b\x1a\x71\xde" + "\x0a\x9e\x06\x0b\x29\x05\xd6\xa5\xb6\x7e\xcd\x3b\x36\x92\xdd\xbd\x7f" + "\x2d\x77\x8b\x8c\x98\x03\xae\xe3\x28\x09\x1b\x58\xfa\xb3\x24\xe4\xfa" + "\xd6\x75\x94\x55\x85\x80\x8b\x48\x31\xd7\xbc\x3f\xf4\xde\xf0\x8e\x4b" + "\x7a\x9d\xe5\x76\xd2\x65\x86\xce\xc6\x4b\x61\x16", + "\x1a\xe1\x0b\x59\x4f\x09\xe2\x6a\x7e\x90\x2e\xcb\xd0\x60\x06\x91", 1, + 96, 912 }, + { 96, 256, 128, 2, + "\x80\xba\x31\x92\xc8\x03\xce\x96\x5e\xa3\x71\xd5\xff\x07\x3c\xf0\xf4" + "\x3b\x6a\x2a\xb5\x76\xb2\x08\x42\x6e\x11\x40\x9c\x09\xb9\xb0", + "\x4d\xa5\xbf\x8d\xfd\x58\x52\xc1\xea\x12\x37\x9d", "", "", "", + "\x76\xac\xb3\x42\xcf\x31\x66\xa5\xb6\x3c\x0c\x0e\xa1\x38\x3c\x8d", 1, + 0, 0 }, + { 96, 256, 128, 3, + "\x7a\x4c\xd7\x59\x17\x2e\x02\xeb\x20\x4d\xb2\xc3\xf5\xc7\x46\x22\x7d" + "\xf5\x84\xfc\x13\x45\x19\x63\x91\xdb\xb9\x57\x7a\x25\x07\x42", + "\xa9\x2e\xf0\xac\x99\x1d\xd5\x16\xa3\xc6\xf6\x89", + "\xbd\x50\x67\x64\xf2\xd2\xc4\x10", "", "", + "\x90\x6f\xa6\x28\x4b\x52\xf8\x7b\x73\x59\xcb\xaa\x75\x63\xc7\x09", 1, + 64, 0 }, + { 96, 256, 128, 4, + "\xcc\x56\xb6\x80\x55\x2e\xb7\x50\x08\xf5\x48\x4b\x4c\xb8\x03\xfa\x50" + "\x63\xeb\xd6\xea\xb9\x1f\x6a\xb6\xae\xf4\x91\x6a\x76\x62\x73", + "\x99\xe2\x3e\xc4\x89\x85\xbc\xcd\xee\xab\x60\xf1", "", "\x2a", + "\x3a", + "\xca\xc2\x7d\xec\x09\x68\x80\x1e\x9f\x6e\xde\xd6\x9d\x80\x75\x22", 1, + 0, 8 }, + { 96, 256, 128, 5, + "\x46\xf0\x25\x49\x65\xf7\x69\xd5\x2b\xdb\x4a\x70\xb4\x43\x19\x9f\x8e" + "\xf2\x07\x52\x0d\x12\x20\xc5\x5e\x4b\x70\xf0\xfd\xa6\x20\xee", + "\xab\x0d\xca\x71\x6e\xe0\x51\xd2\x78\x2f\x44\x03", + "\x91\xca\x6c\x59\x2c\xbc\xca\x53", "\x51", "\xc4", + "\x16\x83\x10\xca\x45\xb1\xf7\xc6\x6c\xad\x4e\x99\xe4\x3f\x72\xb9", 1, + 64, 8 }, + { 96, 256, 128, 6, + "\x2f\x7f\x7e\x4f\x59\x2b\xb3\x89\x19\x49\x89\x74\x35\x07\xbf\x3e\xe9" + "\xcb\xde\x17\x86\xb6\x69\x5f\xe6\xc0\x25\xfd\x9b\xa4\xc1\x00", + "\x46\x1a\xf1\x22\xe9\xf2\xe0\x34\x7e\x03\xf2\xdb", "", "\x5c\x60", + "\x4d\x13", + "\x91\xe8\xb6\x1e\xfb\x39\xc1\x22\x19\x54\x53\x07\x7b\x22\xe5\xe2", 1, + 0, 16 }, + { 96, 256, 128, 7, + "\xc8\x83\x3d\xce\x5e\xa9\xf2\x48\xaa\x20\x30\xea\xcf\xe7\x2b\xff\xe6" + "\x9a\x62\x0c\xaf\x79\x33\x44\xe5\x71\x8f\xe0\xd7\xab\x1a\x58", + "\x61\x54\x6b\xa5\xf1\x72\x05\x90\xb6\x04\x0a\xc6", + "\x88\x36\x4f\xc8\x06\x05\x18\xbf", "\xdd\xf2", "\xb6\x0d", + "\xea\xd0\xfd\x46\x97\xec\x2e\x55\x58\x23\x77\x19\xd0\x24\x37\xa2", 1, + 64, 16 }, + { 96, 256, 128, 8, + "\xbd\x8e\xd7\xfb\x0d\x60\x75\x22\xf0\x4d\x0b\x12\xd4\x2c\x92\x57\x0b" + "\xcc\xc5\xba\x24\x86\x95\x3d\x70\xba\x2e\x81\x93\xf6\x22\x5a", + "\xd2\xab\x0a\xbb\x50\xa8\xe9\xfb\xa2\x54\x29\xe1", "", + "\x20\x12\x21", "\x3c\xf4\x70", + "\xa2\x7a\x69\xc9\xd7\xee\x84\x58\x6f\x11\x38\x8c\x68\x84\xe6\x3a", 1, + 0, 24 }, + { 96, 256, 128, 9, + "\x1c\x8b\x59\xb1\x7a\x5c\xec\xed\x31\xbd\xe9\x7d\x4c\xef\xd9\xaa\xaa" + "\x63\x36\x2e\x09\x6e\x86\x3e\xc1\xc8\x95\x80\xbc\xa7\x9b\x7a", + "\x94\xf3\x2a\x6d\xff\x58\x8f\x2b\x5a\x2e\xad\x45", + "\x6c\x8c\xf2\xab\x38\x20\xb6\x95", "\x45\x3f\x95", "\x61\x09\x25", + "\xa8\xa7\x88\x3e\xb7\xe4\x0b\xc4\x0e\x2e\x59\x22\xae\x95\xdd\xc3", 1, + 64, 24 }, + { 96, 256, 128, 10, + "\xe4\x91\x2c\xb7\x5a\x11\x74\x34\x5f\x1a\x45\x73\x66\xf1\x88\x85\xfe" + "\x84\x60\xb0\x64\x78\xe0\x4b\xe2\xf7\xfb\x4e\xc9\xc1\x13\xe5", + "\x7a\xa5\xad\x8b\xf5\x25\x47\x62\x17\x1e\xc8\x69", "", + "\x9e\x4c\x1d\x03", "\xfe\x68\x49\xaa", + "\x99\xad\x07\x87\x1b\x25\xc2\x7d\xef\xc3\x1a\x54\x1b\xd5\xc4\x18", 1, + 0, 32 }, + { 96, 256, 128, 11, + "\xe0\x57\x77\xef\x3d\x98\x9a\xce\x7d\x2a\xbf\xba\x45\x2b\xfd\xed\x54" + "\x80\x1d\xbd\x5c\x66\xe9\x1c\x0c\x2e\xf0\x04\x79\xd8\x55\x72", + "\xb7\xf5\x26\xe3\xfd\x71\xcf\x57\x20\x96\x1a\xec", + "\x15\xd9\x3a\x96\xd0\xe6\xc5\xa9", "\x17\xbf\xda\x03", + "\xf4\x71\x0e\x51", + "\xb9\x57\xc6\xa3\x7b\x6a\x4c\x94\x99\x6c\x00\x21\x86\xd6\x3b\x2b", 1, + 64, 32 }, + { 96, 256, 128, 12, + "\x1a\x4c\x4f\x39\xab\xe8\x90\xe6\x23\x45\xc9\x47\xbc\xf7\xde\x7c\x2e" + "\x33\xbd\x5c\xee\xda\x0a\x0a\xbf\x0e\x7e\xf9\x35\xdd\xf3\xee", + "\x94\x47\xbf\x85\xd5\xb9\x7d\x8a\xee\x0f\x8e\x51", "", + "\xc1\x5a\x59\x3b\xd0", "\xf7\x11\x64\x7f\xf1", + "\x22\xb1\x2d\xc3\x8c\xb7\x96\x29\xf8\x4c\xdb\xdc\x24\x25\xc0\x9d", 1, + 0, 40 }, + { 96, 256, 128, 13, + "\x80\x0e\x9a\x24\x79\x17\x00\xc9\x60\x97\x36\x69\x5b\xa2\xa8\xb9\x9b" + "\x2d\x57\xf1\xc3\xbf\xb6\x1e\xd4\x9d\xb1\xc6\xc5\x21\x95\x83", + "\x3d\xbe\x87\x6b\xd8\x80\xec\x8e\xa2\x01\x70\x43", + "\x96\x22\x48\x35\x61\x0b\x78\x2b", "\xa7\xbf\xd0\x41\xe3", + "\xd1\x71\xf0\x46\xea", + "\xd1\x79\xb1\xb9\xc4\x18\x43\x78\xdf\x00\x90\x19\xdb\xb8\xc2\x49", 1, + 64, 40 }, + { 96, 256, 128, 14, + "\x20\x8c\x2c\x37\x6c\x94\x30\x43\x3d\xb2\x0e\x1a\x6b\x7b\xa8\x17\xf8" + "\xff\xbf\xa6\x82\x7f\x26\x75\x9c\xce\xde\x42\xe5\x91\xd3\xec", + "\x27\xfb\x58\xec\x6a\x21\xe8\x46\x96\xcb\x88\x30", "", + "\xaf\x10\x4b\x5c\xcd\x0e", "\x93\x51\xb1\xb1\xb0\x82", + "\x56\x07\x85\x50\x9f\x60\xf2\x6b\x68\x19\x33\xd9\xcd\xbf\xd2\x9f", 1, + 0, 48 }, + { 96, 256, 128, 15, + "\x2e\xb1\x68\xe5\x3b\x07\xab\x04\x35\x5e\xa7\x92\xfe\x11\xa6\xbe\x2c" + "\xe9\xc3\x9c\xfe\x15\xa9\x97\x07\x6b\x1e\x38\xc1\x7a\xd6\x20", + "\xb5\x96\x54\x70\xc3\x83\xfd\x29\xfe\x7e\xae\xe7", + "\x6d\x52\xfe\xb2\x50\x9f\x7f\xbf", "\x6f\xdf\x29\x27\xe1\x69", + "\x41\xab\xff\x7b\x71\xcc", + "\x9b\x51\x74\x29\x7c\x03\xcf\x89\x02\xd1\xf7\x06\xfd\x00\x89\x02", 1, + 64, 48 }, + { 96, 256, 128, 16, + "\x55\x56\x81\x58\xd3\xa6\x48\x3f\x1f\x70\x21\xea\xb6\x9b\x70\x3f\x61" + "\x42\x51\xca\xdc\x1a\xf5\xd3\x4a\x37\x4f\xdb\xfc\x5a\xda\xc7", + "\x3c\x4e\x65\x4d\x66\x3f\xa4\x59\x6d\xc5\x5b\xb7", "", + "\xab\x85\xe9\xc1\x57\x17\x31", "\x5d\xfe\x34\x40\xdb\xb3\xc3", + "\xed\x7a\x43\x4e\x26\x02\xd3\x94\x28\x1e\x0a\xfa\x9f\xb7\xaa\x42", 1, + 0, 56 }, + { 96, 256, 128, 17, + "\xe3\xc0\x9e\x7f\xab\x1a\xef\xb5\x16\xda\x6a\x33\x02\x2a\x1d\xd4\xeb" + "\x27\x2c\x80\xd5\x40\xc5\xda\x52\xa7\x30\xf3\x4d\x84\x0d\x7f", + "\x58\x38\x93\x75\xc6\x9e\xe3\x98\xde\x94\x83\x96", + "\x84\xe4\x6b\xe8\xc0\x91\x90\x53", "\x4e\xe5\xcd\xa2\x0d\x42\x90", + "\x4b\xd4\x72\x12\x94\x1c\xe3", + "\x18\x5f\x14\x08\xee\x7f\xbf\x18\xf5\xab\xad\x6e\x22\x53\xa1\xba", 1, + 64, 56 }, + { 96, 256, 128, 18, + "\x51\xe4\xbf\x2b\xad\x92\xb7\xaf\xf1\xa4\xbc\x05\x55\x0b\xa8\x1d\xf4" + "\xb9\x6f\xab\xf4\x1c\x12\xc7\xb0\x0e\x60\xe4\x8d\xb7\xe1\x52", + "\x4f\x07\xaf\xed\xfd\xc3\xb6\xc2\x36\x18\x23\xd3", "", + "\xbe\x33\x08\xf7\x2a\x2c\x6a\xed", + "\x8e\x94\x39\xa5\x6e\xee\xc8\x17", + "\xfb\xe8\xa6\xed\x8f\xab\xb1\x93\x75\x39\xdd\x6c\x00\xe9\x00\x21", 1, + 0, 64 }, + { 96, 256, 128, 19, + "\x11\x31\xc1\x41\x85\x77\xa0\x54\xde\x7a\x4a\xc5\x51\x95\x0f\x1a\x05" + "\x3f\x9a\xe4\x6e\x5b\x75\xfe\x4a\xbd\x56\x08\xd7\xcd\xda\xdd", + "\xb4\xea\x66\x6e\xe1\x19\x56\x33\x66\x48\x4a\x78", + "\x66\xc0\xae\x70\x07\x6c\xb1\x4d", + "\xa4\xc9\xc2\x80\x1b\x71\xf7\xdf", + "\xb9\xb9\x10\x43\x3a\xf0\x52\xb0", + "\x45\x30\xf5\x1a\xee\xe0\x24\xe0\xa4\x45\xa6\x32\x8f\xa6\x7a\x18", 1, + 64, 64 }, + { 96, 256, 128, 20, + "\xe1\x09\x49\x67\xf8\x6d\x89\x3c\xdf\xe2\xe2\xe6\xd5\xc7\xee\x4d\xfe" + "\xf6\x7d\xa3\xc9\xc5\xd6\x4e\x6a\xd7\xc1\x57\x7d\xcb\x38\xc5", + "\x80\x92\xfc\x24\x5b\x33\x26\xcd\xdb\xd1\x42\x4c", "", + "\xc3\x7a\xa7\x91\xdd\xd6\xac\xcf\x91", + "\xd9\xd8\x97\xa9\xc1\xc5\xbb\x9f\x01", + "\x08\x5a\x43\x03\x73\x05\x8f\x1a\x12\xa0\xd5\x89\xfd\x5b\xe6\x8b", 1, + 0, 72 }, + { 96, 256, 128, 21, + "\x23\x6f\x9b\xae\xe4\xf9\xda\x15\xbe\xec\xa4\x0f\xf4\xaf\x7c\x76\x0f" + "\x25\x4a\x64\xbc\x3a\x3d\x7f\x4f\xad\x55\x7e\x61\xb6\x85\x86", + "\xf1\xca\x81\x33\x86\x29\x58\x7a\xcf\x93\x72\xbf", + "\x8c\x32\xf4\x7a\x38\x61\x52\xec", + "\xd7\xf2\x6d\x52\x52\xe1\x76\x5f\x5b", + "\x8f\xdb\x42\x9d\x47\x76\x1c\xbf\x8e", + "\x8e\xf6\x47\xed\x33\x4f\xde\xbb\xc2\xbe\xf8\x0b\xe0\x28\x84\xe0", 1, + 64, 72 }, + { 96, 256, 128, 22, + "\x4d\xe2\x07\xa3\xb7\x0c\x51\xe5\xf2\x30\x48\xee\xd5\xa5\xda\x9b\xb6" + "\x5e\x91\x7a\x69\xaa\x93\xe7\xc8\xb4\xa8\x15\xcd\x97\x24\xde", + "\x4c\x15\xa7\x1d\xc6\x79\x1a\x8c\x00\x5a\xd5\x02", "", + "\xf2\xc5\x4b\x6b\x5e\x49\x0d\xa1\x86\x59", + "\x70\x0d\x35\xad\xf5\x10\x0a\x22\xa1\xde", + "\x10\x2d\x99\x2f\xfa\xff\x59\x9b\x5b\xdd\xdd\xeb\x2d\xfb\x39\x9b", 1, + 0, 80 }, + { 96, 256, 128, 23, + "\x6d\x66\x7f\xd7\x9e\x5f\xb7\x25\xf5\x03\x43\xdc\xcc\x48\x63\x22\x7c" + "\x75\xee\x3f\x7a\x57\x84\x76\xe3\xe9\xf3\x25\x98\xd8\x15\x59", + "\x62\x20\x52\x7a\xba\x88\xe2\x7f\x76\x66\x58\xb2", + "\xe1\xe2\x7c\xcd\xdb\x3c\xb4\x07", + "\x0c\x8c\x5a\x25\x26\x81\xf2\xb5\xb4\xc0", + "\x04\xaa\xd6\x6c\x60\xe0\xbf\x8e\xbb\xa9", + "\xc1\x5f\x69\xa4\xd2\xae\xf9\x7d\x77\x48\x75\x6f\xf4\x9d\x89\x4b", 1, + 64, 80 }, + { 96, 256, 128, 24, + "\x8f\x4b\xd9\x4e\xf7\x3e\x75\xd1\xe0\x68\xc3\x0b\x37\xea\xd5\x76\xc5" + "\x34\x4e\x09\x3e\xce\x13\x30\xe9\x10\x1c\x82\xf7\x93\xcf\x05", + "\xec\x1e\x29\x67\xf0\xf6\x97\x9e\x5f\x5b\x07\xfb", "", + "\xb8\x98\x12\xb3\x4d\x9b\xce\xd4\xa0\xba\x07", + "\x1c\x3d\x53\xba\xaa\x36\xea\xa1\xd8\xec\x4d", + "\x4d\x94\xeb\xf9\x60\xf1\x24\x33\xbe\xc4\x3a\xa8\x6d\x7e\x6e\x6d", 1, + 0, 88 }, + { 96, 256, 128, 25, + "\x2a\xa3\xbc\x70\x33\x35\x1c\xac\x51\x36\x4c\xda\xf6\xff\xac\x2c\x20" + "\xf6\x40\x46\xe1\x55\x0a\x7b\x1c\x65\xf4\x18\x00\x59\x90\x19", + "\x28\xcc\xe5\x7a\x5d\xb2\xcd\x20\x63\x21\xe3\x40", + "\xa9\xbc\x35\x0e\xaf\x2e\x6e\x3d", + "\x83\x01\x68\x23\x12\x34\x84\xb5\x60\x95\xb0", + "\x1c\x85\x78\xf8\xe7\x52\x03\xd0\x33\x6a\x52", + "\x59\x10\xf7\xa9\xd5\xe4\xdf\x05\xd7\x24\x8b\xd7\xa8\xd6\x5e\x63", 1, + 64, 88 }, + { 96, 256, 128, 26, + "\x99\xb6\x2b\xd5\xaf\xbe\x3f\xb0\x15\xbd\xe9\x3f\x0a\xbf\x48\x39\x57" + "\xa1\xc3\xeb\x3c\xa5\x9c\xb5\x0b\x39\xf7\xf8\xa9\xcc\x51\xbe", + "\x9a\x59\xfc\xe2\x6d\xf0\x00\x5e\x07\x53\x86\x56", "", + "\x42\xba\xae\x59\x78\xfe\xaf\x5c\x36\x8d\x14\xe0", + "\xff\x7d\xc2\x03\xb2\x6c\x46\x7a\x6b\x50\xdb\x33", + "\x57\x8c\x0f\x27\x58\xc2\xe1\x4e\x36\xd4\xfc\x10\x6d\xcb\x29\xb4", 1, + 0, 96 }, + { 96, 256, 128, 27, + "\x85\xf3\x5b\x62\x82\xcf\xf4\x40\xbc\x10\x20\xc8\x13\x6f\xf2\x70\x31" + "\x11\x0f\xa6\x3e\xc1\x6f\x1e\x82\x51\x18\xb0\x06\xb9\x12\x57", + "\x58\xdb\xd4\xad\x2c\x4a\xd3\x5d\xd9\x06\xe9\xce", + "\xa5\x06\xe1\xa5\xc6\x90\x93\xf9", + "\xfd\xc8\x5b\x94\xa4\xb2\xa6\xb7\x59\xb1\xa0\xda", + "\x9f\x88\x16\xde\x09\x94\xe9\x38\xd9\xe5\x3f\x95", + "\xd0\x86\xfc\x6c\x9d\x8f\xa9\x15\xfd\x84\x23\xa7\xcf\x05\x07\x2f", 1, + 64, 96 }, + { 96, 256, 128, 28, + "\xfa\xf4\xbf\xe8\x01\x9a\x89\x1c\x74\x90\x1b\x17\xf4\xf4\x8c\xee\x5c" + "\xd0\x65\xd5\x5f\xde\xa6\x01\x18\xaa\xf6\xc4\x31\x9a\x0e\xa5", + "\xb7\x76\xc3\xfd\xdb\xa7\xc8\x13\x62\xce\x6e\x1b", "", + "\x8d\xad\xff\x8d\x60\xc8\xe8\x8f\x60\x4f\x27\x48\x33", + "\xe6\xb3\x3a\x74\xa4\xac\x44\x3b\xd9\x3f\x9c\x1b\x94", + "\x0c\x11\x51\x72\xbd\xb0\x2b\xba\xd3\x13\x0f\xff\x22\x79\x0d\x60", 1, + 0, 104 }, + { 96, 256, 128, 29, + "\x84\x10\x20\xd1\x60\x6e\xdc\xfc\x53\x6a\xbf\xb1\xa6\x38\xa7\xb9\x58" + "\xe2\x1e\xfc\x10\xc3\x86\xac\x45\xa1\x84\x93\x45\x0a\xfd\x5f", + "\x6d\x62\xf1\x59\x73\x1b\x14\x0e\xb1\x8c\xe0\x74", + "\x5a\x8e\x1c\x7a\xa3\x98\x10\xd5", + "\xd6\xaf\x13\x8f\x70\x1b\x80\x1e\x60\xc8\x5f\xfd\x5c", + "\xb0\xa7\x50\x0a\xca\x45\xbb\x15\xf0\x1e\xce\x43\x89", + "\x01\x60\xe8\x3a\xdb\xec\x7f\x6a\x2e\xe2\xff\x02\x15\xf9\xef\x00", 1, + 64, 104 }, + { 96, 256, 128, 30, + "\x47\x0f\x9c\xe3\xd2\x25\x0b\xd6\x0c\xbb\xef\xdb\x2e\x6a\x11\x78\xc0" + "\x12\x29\x9b\x55\x90\x63\x9c\x77\x97\xb6\x02\x4f\xa7\x03\xd8", + "\xa9\xea\x4d\x61\x9f\xe4\x05\xd0\x4c\xba\x7d\x7a", "", + "\x6c\xa6\x7d\xd0\x23\xfb\xa6\x50\x7b\x9f\x9a\x1f\x66\x7e", + "\xd3\x01\x7e\x0b\xb1\x70\x5b\x38\x0b\x34\xcc\x33\x34\x50", + "\x57\x08\xe7\x2c\xa2\xbd\x35\x4f\x48\x7f\x82\xf6\x7f\xbc\x3a\xcb", 1, + 0, 112 }, + { 96, 256, 128, 31, + "\xe4\xb9\x7e\x91\xe4\xc8\xe8\x5e\xb7\xce\x0a\x7f\x30\xbf\x8a\x0a\xbf" + "\x44\x68\x25\x1e\x4c\x63\x86\xc0\xe7\xaa\xcb\x8e\x87\x9a\xa8", + "\x0e\x23\xc9\x42\xa0\xc9\xfb\x52\x65\x86\xee\xad", + "\xea\xaa\xea\xb2\x69\x57\xf9\xa1", + "\xb8\x4b\x3f\x74\xcd\x23\x06\x4b\xb4\x26\xfe\x2c\xed\x2b", + "\x52\xe9\x67\x2b\x41\x6d\x84\xd9\x70\x33\x79\x60\x72\xd0", + "\xe8\x38\x39\xdc\x1f\xd9\xb8\xb9\xd1\x44\x4c\x40\xe4\x88\xd4\x93", 1, + 64, 112 }, + { 96, 256, 128, 32, + "\x67\x11\x96\x27\xbd\x98\x8e\xda\x90\x62\x19\xe0\x8c\x0d\x0d\x77\x9a" + "\x07\xd2\x08\xce\x8a\x4f\xe0\x70\x9a\xf7\x55\xee\xec\x6d\xcb", + "\x68\xab\x7f\xdb\xf6\x19\x01\xda\xd4\x61\xd2\x3c", "", + "\x51\xf8\xc1\xf7\x31\xea\x14\xac\xdb\x21\x0a\x6d\x97\x3e\x07", + "\x0b\x29\x63\x8e\x1f\xbd\xd6\xdf\x53\x97\x0b\xe2\x21\x00\x42", + "\x2a\x91\x34\x08\x7d\x67\xa4\x6e\x79\x17\x8d\x0a\x93\xf5\xe1\xd2", 1, + 0, 120 }, + { 96, 256, 128, 33, + "\xe6\xf1\x11\x8d\x41\xe4\xb4\x3f\xb5\x82\x21\xb7\xed\x79\x67\x38\x34" + "\xe0\xd8\xac\x5c\x4f\xa6\x0b\xbc\x8b\xc4\x89\x3a\x58\x89\x4d", + "\xd9\x5b\x32\x43\xaf\xae\xf7\x14\xc5\x03\x5b\x6a", + "\x64\x53\xa5\x33\x84\x63\x22\x12", + "\x97\x46\x9d\xa6\x67\xd6\x11\x0f\x9c\xbd\xa1\xd1\xa2\x06\x73", + "\x32\xdb\x66\xc4\xa3\x81\x9d\x81\x55\x74\x55\xe5\x98\x0f\xed", + "\xfe\xae\x30\xde\xc9\x4e\x6a\xd3\xa9\xee\xa0\x6a\x0d\x70\x39\x17", 1, + 64, 120 }, + { 96, 256, 128, 34, + "\x59\xd4\xea\xfb\x4d\xe0\xcf\xc7\xd3\xdb\x99\xa8\xf5\x4b\x15\xd7\xb3" + "\x9f\x0a\xcc\x8d\xa6\x97\x63\xb0\x19\xc1\x69\x9f\x87\x67\x4a", + "\x2f\xcb\x1b\x38\xa9\x9e\x71\xb8\x47\x40\xad\x9b", "", + "\x54\x9b\x36\x5a\xf9\x13\xf3\xb0\x81\x13\x1c\xcb\x6b\x82\x55\x88", + "\xe9\x11\x0e\x9f\x56\xab\x3c\xa4\x83\x50\x0c\xea\xba\xb6\x7a\x13", + "\x83\x6c\xca\xbf\x15\xa6\xa2\x2a\x51\xc1\x07\x1c\xfa\x68\xfa\x0c", 1, + 0, 128 }, + { 96, 256, 128, 35, + "\xb9\x07\xa4\x50\x75\x51\x3f\xe8\xa8\x01\x9e\xde\xe3\xf2\x59\x14\x87" + "\xb2\xa0\x30\xb0\x3c\x6e\x1d\x77\x1c\x86\x25\x71\xd2\xea\x1e", + "\x11\x8a\x69\x64\xc2\xd3\xe3\x80\x07\x1f\x52\x66", + "\x03\x45\x85\x62\x1a\xf8\xd7\xff", + "\x55\xa4\x65\x64\x4f\x5b\x65\x09\x28\xcb\xee\x7c\x06\x32\x14\xd6", + "\xe4\xb1\x13\xcb\x77\x59\x45\xf3\xd3\xa8\xae\x9e\xc1\x41\xc0\x0c", + "\x7c\x43\xf1\x6c\xe0\x96\xd0\xdc\x27\xc9\x58\x49\xdc\x38\x3b\x7d", 1, + 64, 128 }, + { 96, 256, 128, 36, + "\x3b\x24\x58\xd8\x17\x6e\x16\x21\xc0\xcc\x24\xc0\xc0\xe2\x4c\x1e\x80" + "\xd7\x2f\x7e\xe9\x14\x9a\x4b\x16\x61\x76\x62\x96\x16\xd0\x11", + "\x45\xaa\xa3\xe5\xd1\x6d\x2d\x42\xdc\x03\x44\x5d", "", + "\x3f\xf1\x51\x4b\x1c\x50\x39\x15\x91\x8f\x0c\x0c\x31\x09\x4a\x6e" + "\x1f", + "\x02\xcc\x3a\xcb\x5e\xe1\xfc\xdd\x12\xa0\x3b\xb8\x57\x97\x64\x74" + "\xd3", + "\xd8\x3b\x74\x63\xa2\xc3\x80\x0f\xe9\x58\xc2\x8e\xaa\x29\x08\x13", 1, + 0, 136 }, + { 96, 256, 128, 37, + "\xf6\x0c\x6a\x1b\x62\x57\x25\xf7\x6c\x70\x37\xb4\x8f\xe3\x57\x7f\xa7" + "\xf7\xb8\x7b\x1b\xd5\xa9\x82\x17\x6d\x18\x23\x06\xff\xb8\x70", + "\xf0\x38\x4f\xb8\x76\x12\x14\x10\x63\x3d\x99\x3d", + "\x9a\xaf\x29\x9e\xee\xa7\x8f\x79", + "\x63\x85\x8c\xa3\xe2\xce\x69\x88\x7b\x57\x8a\x3c\x16\x7b\x42\x1c" + "\x9c", + "\x35\x76\x64\x88\xd2\xbc\x7c\x2b\x8d\x17\xcb\xbb\x9a\xbf\xad\x9e" + "\x6d", + "\x1f\x39\x1e\x65\x7b\x27\x38\xdd\xa0\x84\x48\xcb\xa2\x81\x1c\xeb", 1, + 64, 136 }, + { 96, 256, 128, 38, + "\x37\xce\xb5\x74\xcc\xb0\xb7\x01\xdd\x11\x36\x93\x88\xca\x27\x10\x17" + "\x32\x33\x9f\x49\xd8\xd9\x08\xac\xe4\xb2\x3a\xf0\xb7\xce\x89", + "\x37\x27\x0b\x36\x8f\x6b\x1e\x3e\x2c\xa5\x17\x44", "", + "\xf2\x69\x91\x53\x72\x57\x37\x81\x51\xf4\x77\x6a\xad\x28\xae\x8b\xd1" + "\x6b", + "\xb6\x21\xd7\x6a\x8d\xac\xff\x00\xb3\xf8\x40\xcd\xf2\x6c\x89\x4c\xc5" + "\xd1", + "\xe0\xa2\x17\x16\xed\x94\xc0\x38\x2f\xa9\xb0\x90\x3d\x15\xbb\x68", 1, + 0, 144 }, + { 96, 256, 128, 39, + "\x68\x88\x83\x61\x91\x9b\xc1\x06\x22\xf4\x5d\xf1\x68\xe5\xf6\xa0\x3b" + "\xd8\xe8\x84\xc0\x61\x1b\xea\x2f\x34\xc1\x88\x2e\xd9\x83\x2b", + "\xbf\xd6\xff\x40\xf2\xdf\x8c\xa7\x84\x59\x80\xcc", + "\xb8\x37\x34\x38\xdd\xb2\xd6\xc3", + "\xff\x97\xf2\xee\xfb\x34\x01\xac\x31\xfc\x8d\xc1\x59\x0d\x1a\x92\xcb" + "\xc1", + "\xe0\xa7\x45\x18\x6c\x1a\x7b\x14\x7f\x74\xfa\xff\x2a\x71\x5d\xf5\xc1" + "\x9d", + "\x91\x7b\xaf\x70\x3e\x35\x5d\x4d\x95\x0e\x6c\x05\xfe\x8f\x34\x9f", 1, + 64, 144 }, + { 96, 256, 128, 40, + "\x1b\x35\xb8\x56\xb5\xa8\x6d\x34\x03\xd2\x8f\xc2\x10\x3a\x63\x1d\x42" + "\xde\xca\x51\x75\xcd\xb0\x66\x9a\x5e\x5d\x90\xb2\xca\xaf\xc5", + "\x23\x43\xde\x88\xbe\x6c\x71\x96\xd3\x3b\x86\x94", "", + "\x21\xef\x18\x5c\x3a\xe9\xa9\x6f\xa5\xeb\x47\x38\x78\xf4\xd0\xb2\x42" + "\x78\x1d", + "\xd6\xe0\xed\x54\xfc\xce\xf3\x0b\xd6\x05\xd7\x2d\xa3\x32\x0e\x24\x9a" + "\x9c\xb5", + "\xc6\x8b\xc6\x72\x4e\xc8\x03\xc4\x39\x84\xce\x42\xf6\xbd\x09\xff", 1, + 0, 152 }, + { 96, 256, 128, 41, + "\xd6\x48\x4e\x39\x73\xf6\xbe\x8c\x83\xed\x32\x08\xd5\xbe\x5c\xfa\x06" + "\xfd\xa7\x2f\xbf\xdc\x5b\x19\xd0\x9b\xe3\xf4\xe4\xeb\xa2\x9d", + "\x1a\xf1\xd9\x0e\x87\x7e\x11\xa4\x96\xef\xa3\xdf", + "\xcc\x4e\xfd\x83\x64\xfb\x11\x4a", + "\x73\x35\xab\x04\xb0\x3e\x70\x61\x09\xec\x3e\xe8\x35\xdb\x9a\x24\x6e" + "\xa0\xad", + "\x29\xe5\x4d\x60\x82\x37\xc3\xc3\x60\x9d\xba\x16\xe6\xed\xf4\x38\x42" + "\xd7\x2f", + "\xd3\x36\x5f\xdc\xd5\x06\xaa\xaa\x53\x68\x66\x1e\x80\xe9\xd9\x9b", 1, + 64, 152 }, + { 96, 256, 128, 42, + "\x42\x2a\xdd\x37\x84\x9d\x6e\x4c\x3d\xfd\x80\x20\xdc\x6a\x07\xe8\xa2" + "\x49\x78\x8f\x3d\x6a\x83\xb9\xcb\x4d\x80\x23\x62\xc9\x75\x42", + "\x1e\x7e\x67\xbe\x94\x8d\xe7\x35\x2f\xfd\xb7\x27", "", + "\xd7\xf5\xe6\x11\xdd\x3a\x27\x50\xfb\x84\x3f\xc1\xb6\xb9\x30\x87\x31" + "\x0d\xc8\x7d", + "\x7f\xe6\x06\x65\x2d\x85\x8f\x59\x5e\xc2\xe7\x06\x75\x4f\xa3\xd9\x33" + "\xfc\xc8\x34", + "\x78\xd5\x92\x35\xaa\x5d\x03\xa4\xc3\x25\x90\xe5\x90\xc0\x4d\x22", 1, + 0, 160 }, + { 96, 256, 128, 43, + "\xcd\xcc\xfe\x3f\x46\xd7\x82\xef\x47\xdf\x4e\x72\xf0\xc0\x2d\x9c\x7f" + "\x77\x4d\xef\x97\x0d\x23\x48\x6f\x11\xa5\x7f\x54\x24\x7f\x17", + "\x37\x61\x87\x89\x46\x05\xa8\xd4\x5e\x30\xde\x51", + "\x95\x68\x46\xa2\x09\xe0\x87\xed", + "\xe2\x8e\x0e\x9f\x9d\x22\x46\x3a\xc0\xe4\x26\x39\xb5\x30\xf4\x21\x02" + "\xfd\xed\x75", + "\x14\xf7\x07\xc4\x46\x98\x8a\x49\x03\x77\x5e\xc7\xac\xec\x6d\xa1\x14" + "\xd4\x31\x12", + "\x98\x7d\x4b\x14\x7c\x49\x0d\x43\xd3\x76\xa1\x98\xca\xb3\x83\xf0", 1, + 64, 160 }, + { 96, 256, 128, 44, + "\xe7\x9d\xfc\x6d\x2f\xc4\x65\xb8\x43\x9e\x1c\x5b\xac\xcb\x5d\x8e\xf2" + "\x85\x38\x99\xfc\x19\x75\x3b\x39\x7e\x6c\x25\xb3\x5e\x97\x7e", + "\xf9\xd6\x32\x0d\x7c\xe5\x1d\x8e\xd0\x67\x7d\x3a", "", + "\x4f\x54\x3e\x79\x38\xd1\xb8\x78\xda\xca\xee\xc8\x1d\xce\x48\x99\x97" + "\x48\x16\x81\x3b", + "\x10\x03\xf1\x3e\xa1\x32\x9c\xbb\x18\x73\x16\xf6\x4c\x3f\xf3\xa8\x7c" + "\xf5\xb9\x66\x61", + "\xd2\x32\x3a\xd6\x25\x09\x4b\xec\x84\x79\x0d\x79\x58\xd5\x58\x3f", 1, + 0, 168 }, + { 96, 256, 128, 45, + "\x1d\x7b\x8f\x1d\x96\xa1\x42\x49\x23\xae\xf8\xa9\x84\x86\x9d\x4a\x77" + "\x7a\x11\x09\x90\xba\x46\x56\x27\xac\xf8\x03\x96\xc7\xf3\x76", + "\x50\xba\x19\x62\xcd\xc3\x2a\x5a\x2d\x36\xe6\x40", + "\x09\x30\x53\xe2\x02\x61\xda\xab", + "\x5d\x3e\xfd\x57\x67\xf3\xc1\x2e\xfd\x08\xaf\x9a\x44\xe0\x28\xae\x68" + "\xc9\xef\xf8\x43", + "\x2d\x48\xb0\x83\x4e\x9f\xfe\x30\x46\x10\x3e\xf7\xa2\x14\xf0\x2e\x8e" + "\x4d\x33\x36\x0e", + "\xd5\x33\xad\x08\x9b\xe2\x29\xea\x60\x6e\xc0\xf3\xfa\x22\xeb\x33", 1, + 64, 168 }, + { 96, 256, 128, 46, + "\xdd\x43\x3e\x28\xcf\xbc\xb5\xde\x4a\xb3\x6a\x02\xbf\x38\x68\x6d\x83" + "\x20\x87\x71\xa0\xe6\x3d\xcd\x08\xb4\xdf\x1a\x07\xac\x47\xa1", + "\xc9\xcc\x0a\x1a\xfc\x38\xec\x6c\x30\xc3\x8c\x68", "", + "\x8a\x3e\x17\xab\xa9\x60\x6d\xd4\x9e\x3b\x1a\x4d\x9e\x5e\x42\xf1\x74" + "\x23\x73\x63\x24\x89", + "\xe9\x91\x7f\xf3\xe6\x4b\xbe\x17\x83\x57\x93\x75\xe7\x5e\xa8\x23\x97" + "\x6b\x35\x53\x99\x49", + "\x07\x4a\x89\x06\x69\xb2\x51\x05\x43\x4c\x75\xbe\xed\x32\x48\xdb", 1, + 0, 176 }, + { 96, 256, 128, 47, + "\xa6\x09\x24\x10\x1b\x42\xac\x24\x15\x4a\x88\xde\x42\x14\x2b\x23\x34" + "\xcf\x59\x91\x76\xca\xf4\xd1\x22\x6f\x71\x2d\xd9\x17\x29\x30", + "\x8b\xa7\x76\x44\xb0\x8d\x65\xd5\xe9\xf3\x19\x42", + "\xb2\xa4\xe1\x2a\x19\xa6\x1c\x75", + "\xc9\x49\x95\x7e\x66\x43\x9d\xee\xe4\xb2\xac\x1d\x4a\x6c\x98\xa6\xc5" + "\x27\xb9\x0f\x52\xab", + "\xdb\x4c\x70\x05\x13\x81\x89\x72\xb0\xdc\x0e\x53\x1b\x1c\x28\x1c\xa0" + "\x3e\x40\xc6\x0d\xea", + "\x63\xf4\x47\x8b\xba\x2a\xf4\x69\xa7\xa4\xdc\x3b\x4f\x14\x13\x60", 1, + 64, 176 }, + { 96, 256, 128, 48, + "\x1a\xa4\x20\x27\x83\x69\x65\xb1\xe6\x08\x6f\xa1\x37\xf9\xcf\x7f\x1f" + "\xf4\x86\x76\x69\x68\x29\xbd\x28\x1f\xf8\x1c\x8e\xa0\xa4\xa9", + "\x4b\x3d\xca\x84\xec\xc4\x07\xf4\x24\xf2\x81\xa9", "", + "\x37\x25\x2a\x3e\xb5\xc8\x96\x0f\x05\x67\xe5\x03\xa9\x03\x57\x83\xb3" + "\xd0\xa1\x9a\x4b\x9a\x47", + "\xb5\xf1\x46\x17\x49\x1f\xc9\x23\xb6\x83\xe2\xcc\x95\x62\xd0\x43\xdd" + "\x59\x86\xb9\x7d\xbd\xbd", + "\x97\x2c\xe5\x47\x13\xc0\x5c\x4b\xb4\xd0\x88\xc0\xa3\x0c\xac\xd3", 1, + 0, 184 }, + { 96, 256, 128, 49, + "\x5d\x40\xdb\x0c\xc1\x8e\xf2\xe4\x28\x15\xd3\xb6\x24\x5a\x46\x6a\x0b" + "\x30\xa0\xf9\x3e\x31\x8a\xc1\x0e\xdd\xe3\xbf\x8a\xd9\x81\x60", + "\xac\xad\x61\x80\x39\xb3\x17\x47\x0d\x21\x62\x1b", + "\x41\x30\x36\x41\x1a\xf7\x57\x45", + "\x95\x9d\xde\x1e\xf3\x12\x9b\x27\x70\x2c\x55\x88\x49\xe4\x66\xf2\xba" + "\xca\x1a\x45\xbd\xf4\xb2", + "\xb7\xca\x38\x79\xf9\x51\x40\xbf\x6a\x97\xb3\x21\x22\x18\xb7\xbf\x86" + "\x4a\x51\xe5\xbb\x0b\x3e", + "\xfe\x55\x8f\xb5\x70\x14\x54\x70\xea\x69\x3e\xb7\x6e\xb7\x31\x71", 1, + 64, 184 }, + { 96, 256, 128, 50, + "\x02\x12\xa8\xde\x50\x07\xed\x87\xb3\x3f\x1a\x70\x90\xb6\x11\x4f\x9e" + "\x08\xce\xfd\x96\x07\xf2\xc2\x76\xbd\xcf\xdb\xc5\xce\x9c\xd7", + "\xe6\xb1\xad\xf2\xfd\x58\xa8\x76\x2c\x65\xf3\x1b", "", + "\x10\xf1\xec\xf9\xc6\x05\x84\x66\x5d\x9a\xe5\xef\xe2\x79\xe7\xf7\x37" + "\x7e\xea\x69\x16\xd2\xb1\x11", + "\x42\xf2\x6c\x56\xcb\x4b\xe2\x1d\x9d\x8d\x0c\x80\xfc\x99\xdd\xe0\x0d" + "\x75\xf3\x80\x74\xbf\xe7\x64", + "\x54\xaa\x7e\x13\xd4\x8f\xff\x7d\x75\x57\x03\x94\x57\x04\x0a\x3a", 1, + 0, 192 }, + { 96, 256, 128, 51, + "\xc5\xbc\x09\x56\x56\x46\xe7\xed\xda\x95\x4f\x1f\x73\x92\x23\xda\xda" + "\x20\xb9\x5c\x44\xab\x03\x3d\x0f\xae\x4b\x02\x83\xd1\x8b\xe3", + "\x6b\x28\x2e\xbe\xcc\x54\x1b\xcd\x78\x34\xed\x55", + "\x3e\x8b\xc5\xad\xe1\x82\xff\x08", + "\x92\x22\xf9\x01\x8e\x54\xfd\x6d\xe1\x20\x08\x06\xa9\xee\x8e\x4c\xc9" + "\x04\xd2\x9f\x25\xcb\xa1\x93", + "\x12\x30\x32\x43\x7b\x4b\xfd\x69\x20\xe8\xf7\xe7\xe0\x08\x7a\xe4\x88" + "\x9e\xbe\x7a\x0a\xd0\xe9\x00", + "\x3c\xf6\x8f\x17\x95\x50\xda\x63\xd3\xb9\x6c\x2d\x55\x41\x18\x65", 1, + 64, 192 }, + { 96, 256, 128, 52, + "\x94\x60\xb3\xc4\x4e\xd8\x6e\x70\xf3\xbd\xa6\x63\x85\xe1\xca\x10\xb0" + "\xc1\x67\x7e\xf4\xf1\x36\x05\x32\x83\x0d\x17\x53\x5f\x99\x6f", + "\xab\xfa\xf4\x2e\x0d\xba\x88\x4e\xfc\xf0\x78\x23", "", + "\x5c\x5c\xce\x88\x1b\x93\xfb\x7a\x1b\x79\x39\xaf\x1f\xfc\x5f\x84\xd3" + "\x28\x0a\xda\x77\x8c\xca\x09\x53", + "\x1d\x21\x8c\x9f\x1f\x9f\x02\xf2\x48\xa6\xf9\x76\xa7\x55\x70\x57\xf3" + "\x7d\x93\x93\xd9\xf2\x13\xc1\xf3", + "\xbc\x88\x34\x4c\x6f\xdc\x89\x8f\xee\xd3\x94\xfb\x28\x51\x13\x16", 1, + 0, 200 }, + { 96, 256, 128, 53, + "\xc1\x11\xd6\xd5\xd7\x8a\x07\x1b\x15\xab\x37\xcc\x8c\x38\x19\x19\x93" + "\x87\xab\x7c\x19\x33\xaa\x97\xb1\x48\x9f\x65\x84\xba\x8e\x2a", + "\x85\xf1\x8a\xd8\xff\x72\xca\xfe\xe2\x45\x2a\xb8", + "\x84\xcd\xff\x93\x93\x91\xc0\x22", + "\x69\x89\xc6\x46\xa1\x0b\x7c\x76\xf4\xd9\xf7\xd5\x74\xda\x40\xe1\x52" + "\x01\x3c\xf0\xdd\x78\xf5\xaa\x8a", + "\x97\x15\xd3\x44\xe8\xd3\xf3\xa3\xea\xa9\x8a\x9c\xea\x57\xc0\xcd\x71" + "\x7c\x6e\xf5\x07\x60\x27\xc9\xec", + "\x30\x56\xff\x5e\xe0\xaa\x86\x36\xbb\x63\x99\x84\xed\xb5\x23\x6b", 1, + 64, 200 }, + { 96, 256, 128, 54, + "\x8a\x1b\x1e\x69\x9a\x0c\x4a\x3e\x61\x0b\x10\x90\x2d\xae\xda\xb1\xbf" + "\x1e\xa0\xd5\x05\xc4\x7d\x78\x42\xcb\xce\xe0\xd3\xb1\xb6\xe6", + "\xa6\xf9\xa8\xd3\x35\xfa\x84\xc3\xb2\x7d\xcd\x2a", "", + "\xee\x6a\x15\xfc\x18\x31\x08\xf0\x87\x7e\x7f\x2b\x8a\x96\x15\xf4\xb3" + "\xfc\x36\xe1\xc8\x34\x40\xf6\x6a\xad", + "\x90\x89\xbb\xdb\x8b\xcf\xd1\x24\xe2\x27\xbf\x75\xc4\xbf\xe1\xcb\xa2" + "\x00\x4a\x27\x4f\xc3\x1a\xa3\x23\x58", + "\xfd\x2e\x21\xc6\x4a\x01\x96\x21\xc6\x85\x94\x82\x6c\xd7\xb1\xcd", 1, + 0, 208 }, + { 96, 256, 128, 55, + "\x74\xb3\x84\xe6\xe0\x13\xec\x41\x72\xed\x7a\x28\xa1\x0f\xb9\xbb\x79" + "\xb4\xbe\x2a\x24\xf6\x99\x9e\x3d\x3c\xaa\x28\xe6\x4a\x86\x56", + "\xeb\xc1\x9f\xc9\xec\xb2\x33\x99\x08\xea\x38\x36", + "\x85\x07\x3f\x2e\xdc\x13\xd3\xa1", + "\x3a\xa9\xf7\x37\x2f\x05\x6e\x5a\x07\x29\x75\x2d\x9a\x37\x13\x2d\x6d" + "\xd0\x7c\x56\x79\x2e\x1c\x75\x82\xa9", + "\x79\x6f\xfb\x70\xab\x43\xe7\xfa\x79\xf9\x55\x83\xe3\x84\x52\x47\x27" + "\xbb\x3e\x47\xfc\x45\xb9\x69\xf7\x14", + "\xc3\x32\x2b\x44\x45\xde\x5f\x3c\x9f\x18\xdc\xc8\x47\xcc\x94\xc3", 1, + 64, 208 }, + { 96, 256, 128, 56, + "\x77\xd8\x24\x79\x5d\x20\x29\xf0\xeb\x0e\x0b\xaa\xb5\xcf\xeb\x32\xf7" + "\xe9\x34\x74\x91\x3a\x7f\x95\xc7\x37\xa6\x67\xa3\xc3\x33\x14", + "\xf3\x30\x74\x30\xf4\x92\xd2\xb8\xa7\x2d\x3a\x81", "", + "\x0c\x41\x79\xa4\x97\xd8\xfd\xd7\x27\x96\xfb\x72\x56\x92\xb8\x05\xd6" + "\x3b\x7c\x71\x83\x59\xcf\x10\x51\x8a\xee", + "\x49\xc8\x1d\x17\xd6\x7d\x7b\xa9\x95\x4f\x49\x7d\x0b\x0d\xdc\x21\xf3" + "\xf8\x39\xc9\xd2\xcc\x19\x8d\x30\xbc\x2c", + "\x50\x00\x98\x99\xe5\xb2\xa9\x72\x6c\x8f\x35\x56\xca\xdf\xbe\x84", 1, + 0, 216 }, + { 96, 256, 128, 57, + "\xbe\xc5\xea\xc6\x8f\x89\x39\x51\xcb\xd7\xd1\xec\xd3\xee\x66\x11\x13" + "\x0d\xd9\xc3\xf8\x0c\xdd\xf9\x51\x11\xd0\x7d\x5e\xdd\x76\xd1", + "\x34\x2a\xda\x4f\x0c\x11\x51\x24\xb2\x22\xdf\x80", + "\x73\x36\x5f\x6d\x80\xed\xb1\xd8", + "\x48\x14\x33\xd8\xb1\xcd\x38\xaf\x4a\x75\x0e\x13\xa6\x4b\x7a\x4e\x85" + "\x07\x68\x2b\x35\x17\x59\x59\x38\xa2\x0e", + "\x4c\x12\x9f\xc1\x3c\xbd\xd9\xd3\xfe\x81\xac\x75\x5b\xf4\xfb\xea\x2f" + "\xdd\x7e\x0a\xca\x05\x05\xa6\xee\x96\x37", + "\x9c\xed\xe1\xd3\x0a\x03\xdb\x5d\x55\x26\x5d\x36\x48\xbc\x40\xd4", 1, + 64, 216 }, + { 96, 256, 128, 58, + "\xa5\x9c\x1e\x13\x06\x4d\xf8\xf2\xb8\xdf\x77\xa4\x92\xb0\xca\x2e\xae" + "\x92\x1b\x52\xa8\x4b\x30\x5a\x3a\x9a\x51\x40\x8a\x9e\xcb\x69", + "\x95\x44\xd4\x1e\xce\x0c\x92\xef\x01\xcf\xac\x2d", "", + "\x1c\x35\xb8\x98\x82\x1b\xa5\x5c\x26\x17\xc2\x5d\xf9\xe6\xdf\x2a\x80" + "\x02\xb3\x84\x90\x21\x86\xcd\x69\xdf\xd2\x0e", + "\xa6\xfa\x8f\x57\xdd\xc8\x1d\x60\x99\xf6\x67\xdd\x62\x40\x2b\x6a\x5d" + "\x5b\x7d\x05\xa3\x29\x29\x80\x29\x11\x31\x69", + "\xbb\x24\xe3\x8b\x31\xdb\xbc\x3e\x57\x5b\x9e\x3e\xe0\x76\xaf\x2a", 1, + 0, 224 }, + { 96, 256, 128, 59, + "\x08\x4b\x5d\x73\x65\xf1\xa8\xfe\xc6\x36\x59\x39\xed\x74\x1e\x6e\xa5" + "\x89\x3e\x03\x18\xd8\x2a\xb4\x75\x00\xa9\x7d\x77\xaa\xa0\x41", + "\x82\x9f\x00\x5e\x98\x0f\x0a\x6e\x2f\x98\x3e\xaa", + "\x77\x0f\x6e\x6e\x89\xa3\xfe\x8e", + "\x75\x10\x01\x6e\xfa\xdc\x38\x5a\x71\xed\x68\x9c\xeb\x59\x0c\x8e\xa9" + "\xcc\x1e\x81\xb7\x93\x33\x8b\xdd\xf5\xf1\x0c", + "\xfd\x42\xcb\x5c\xf8\x94\xf8\x79\xe3\xcf\x75\x16\x62\xaa\xa5\x8a\x22" + "\x88\xcc\x53\x54\x88\x02\xbe\xca\xf4\x23\x59", + "\x18\x83\x29\x43\x8a\xfe\x1c\xd7\x22\x5d\x04\x78\xaa\x90\xc7\x73", 1, + 64, 224 }, + { 96, 256, 128, 60, + "\x5a\x7f\x85\x0a\x1d\x9a\xaf\xa7\x7d\x59\xae\x1b\x73\x19\x65\xe8\xaa" + "\xec\x63\x52\x28\x0f\xc7\x6a\x7b\x5e\x23\xef\x36\x10\xcf\xe4", + "\x49\x46\xa0\xd6\xad\xea\x93\xb8\x2d\x43\x32\xe5", "", + "\x3c\x16\x1d\x79\x1f\x62\x4f\xb0\x38\x8e\x80\x8f\x0f\x69\xed\x79\x0d" + "\xbe\x4c\xbd\x08\x9e\xba\xc4\x66\x27\xbc\xf0\x1d", + "\x40\x23\x02\xb5\x61\x40\xc4\xdc\xc3\x97\x74\x73\x2c\x55\x88\x3d\xe1" + "\x24\xce\x4b\xf0\xa0\x26\x1c\xfa\x15\x69\xe2\xcf", + "\xe8\x30\xbf\xe9\x33\xa9\x67\x86\xcf\xf2\xdd\x72\xb8\x2c\x4b\xd5", 1, + 0, 232 }, + { 96, 256, 128, 61, + "\xe6\xd5\xa4\x24\x6f\x6f\x05\x61\x8b\x59\xc8\xf9\xec\x3a\xc8\x06\x8c" + "\xc0\xd3\xf3\x51\xc5\x71\xaa\x52\xb0\x9c\xb2\x51\xf9\xc2\xf6", + "\x2f\x90\xa6\x5e\x9e\x48\x72\x5d\xe6\xff\xc7\x27", + "\xf2\x41\x53\x77\xad\x28\x3f\xd8", + "\x96\x4f\xc9\xe0\xe8\x35\x59\x47\xaa\x1c\x2c\xaa\xdd\x7b\x3d\xbe\xf8" + "\x2a\x10\x24\xe6\x23\x60\x6f\xac\x43\x6e\xf5\x73", + "\xd0\x52\x93\x2b\xad\x6e\x6c\x4f\x83\x5f\x02\x01\x9e\x52\xd7\xff\x80" + "\x7d\xc2\xa5\xaa\xc2\x04\x08\x83\xc7\x9d\xd3\xd5", + "\x65\x5f\x93\x39\x6b\x4d\x75\x5d\xc4\x47\x57\x21\x66\x5f\xed\x91", 1, + 64, 232 }, + { 96, 256, 128, 62, + "\x09\xe8\x22\x12\x3a\xdb\xb1\xed\x89\xb7\x9a\x58\x61\x9c\x64\x85\x39" + "\x92\xf8\x37\x1d\x46\x33\x87\x12\xf6\xc9\x1a\xb1\x1a\x68\xbb", + "\xa7\x97\x20\x5a\x6c\xac\xdd\x7e\x47\xa4\x78\x9d", "", + "\x80\xb7\x1b\xbe\x83\x36\x29\x84\x1b\xd3\xae\xae\xb9\xdb\x61\x23\xe5" + "\x1d\x36\x7b\x43\x6f\xe9\xd2\xd3\x45\x4b\x62\xcf\xad", + "\x83\xf5\xc7\x73\x96\xca\xbd\x28\xdf\xcc\x00\x2c\xba\x07\x56\xd4\xea" + "\x54\x55\xe0\x26\x1d\x84\x7d\x57\x08\xaa\xc2\x1e\x8d", + "\x70\x5a\x05\x82\x0a\x21\xf3\x81\xd2\x44\xd4\x0e\x58\xd2\xf1\x6b", 1, + 0, 240 }, + { 96, 256, 128, 63, + "\x62\x57\x35\xfe\x7f\x8f\xc8\x1b\x0c\x1e\xdc\x3d\x08\xa7\x8b\x41\x26" + "\x8f\x87\xa3\xc6\x84\x88\xb6\x74\x22\x26\x30\xc1\xd5\x87\xa5", + "\x9d\x8c\xdf\x28\x9d\xdd\xd0\x9a\xfd\xc1\xb0\x2f", + "\x20\x0a\x9c\x95\x94\x6f\xf0\x5c", + "\x67\xae\x18\x82\xd0\xb1\xc1\xb2\x48\x5b\xec\x98\x11\x5e\xcf\x53\xb9" + "\xb4\x38\xde\xb1\xd0\x40\x05\x31\x70\x50\x38\x87\x3a", + "\x20\x9b\x75\x39\x38\x5c\x8b\x19\xec\xd0\xfd\x8b\x50\x11\xb2\x99\x6e" + "\x31\x6f\x19\x42\x06\x4e\x68\xed\xfa\x36\x3a\xcb\xcd", + "\xfa\x2f\x45\x4b\x9f\xa2\x60\x8f\x78\x0f\x7c\x6f\x9b\x78\x0f\xe1", 1, + 64, 240 }, + { 96, 256, 128, 64, + "\x2e\xb5\x1c\x46\x9a\xa8\xeb\x9e\x6c\x54\xa8\x34\x9b\xae\x50\xa2\x0f" + "\x0e\x38\x27\x11\xbb\xa1\x15\x2c\x42\x4f\x03\xb6\x67\x1d\x71", + "\x04\xa9\xbe\x03\x50\x8a\x5f\x31\x37\x1a\x6f\xd2", "", + "\xb0\x53\x99\x92\x86\xa2\x82\x4f\x42\xcc\x8c\x20\x3a\xb2\x4e\x2c\x97" + "\xa6\x85\xad\xcc\x2a\xd3\x26\x62\x55\x8e\x55\xa5\xc7\x29", + "\x45\xc7\xd6\xb5\x3a\xca\xd4\xab\xb6\x88\x76\xa6\xe9\x6a\x48\xfb\x59" + "\x52\x4d\x2c\x92\xc9\xd8\xa1\x89\xc9\xfd\x2d\xb9\x17\x46", + "\x56\x6d\x3c\xa1\x0e\x31\x1b\x69\x5f\x3e\xae\x15\x51\x65\x24\x93", 1, + 0, 248 }, + { 96, 256, 128, 65, + "\x7f\x5b\x74\xc0\x7e\xd1\xb4\x0f\xd1\x43\x58\xfe\x2f\xf2\xa7\x40\xc1" + "\x16\xc7\x70\x65\x10\xe6\xa4\x37\xf1\x9e\xa4\x99\x11\xce\xc4", + "\x47\x0a\x33\x9e\xcb\x32\x19\xb8\xb8\x1a\x1f\x8b", + "\x37\x46\x18\xa0\x6e\xa9\x8a\x48", + "\xf4\x52\x06\xab\xc2\x55\x52\xb2\xab\xc9\xab\x7f\xa2\x43\x03\x5f\xed" + "\xaa\xdd\xc3\xb2\x29\x39\x56\xf1\xea\x6e\x71\x56\xe7\xeb", + "\x46\xa8\x0c\x41\x87\x02\x47\x20\x08\x46\x27\x58\x00\x80\xdd\xe5\xa3" + "\xf4\xa1\x10\x93\xa7\x07\x6e\xd6\xf3\xd3\x26\xbc\x7b\x70", + "\x53\x4d\x4a\xa2\x83\x5a\x52\xe7\x2d\x14\xdf\x0e\x4f\x47\xf2\x5f", 1, + 64, 248 }, + { 96, 256, 128, 66, + "\xe1\x73\x1d\x58\x54\xe1\xb7\x0c\xb3\xff\xe8\xb7\x86\xa2\xb3\xeb\xf0" + "\x99\x43\x70\x95\x47\x57\xb9\xdc\x8c\x7b\xc5\x35\x46\x34\xa3", + "\x72\xcf\xd9\x0e\xf3\x02\x6c\xa2\x2b\x7e\x6e\x6a", "", + "\xb9\xc5\x54\xcb\xc3\x6a\xc1\x8a\xe8\x97\xdf\x7b\xee\xca\xc1\xdb\xeb" + "\x4e\xaf\xa1\x56\xbb\x60\xce\x2e\x5d\x48\xf0\x57\x15\xe6\x78", + "\xea\x29\xaf\xa4\x9d\x36\xe8\x76\x0f\x5f\xe1\x97\x23\xb9\x81\x1e\xd5" + "\xd5\x19\x93\x4a\x44\x0f\x50\x81\xac\x43\x0b\x95\x3b\x0e\x21", + "\x22\x25\x41\xaf\x46\xb8\x65\x33\xc6\xb6\x8d\x2f\xf1\x08\xa7\xea", 1, + 0, 256 }, + { 96, 256, 128, 67, + "\x27\xd8\x60\x63\x1b\x04\x85\xa4\x10\x70\x2f\xea\x61\xbc\x87\x3f\x34" + "\x42\x26\x0c\xad\xed\x4a\xbd\xe2\x5b\x78\x6a\x2d\x97\xf1\x45", + "\x26\x28\x80\xd4\x75\xf3\xda\xc5\x34\x0d\xd1\xb8", + "\x23\x33\xe5\xce\x0f\x93\xb0\x59", + "\x6b\x26\x04\x99\x6c\xd3\x0c\x14\xa1\x3a\x52\x57\xed\x6c\xff\xd3\xbc" + "\x5e\x29\xd6\xb9\x7e\xb1\x79\x9e\xb3\x35\xe2\x81\xea\x45\x1e", + "\x6d\xad\x63\x78\x97\x54\x4d\x8b\xf6\xbe\x95\x07\xed\x4d\x1b\xb2\xe9" + "\x54\xbc\x42\x7e\x5d\xe7\x29\xda\xf5\x07\x62\x84\x6f\xf2\xf4", + "\x7b\x99\x7d\x93\xc9\x82\x18\x9d\x70\x95\xdc\x79\x4c\x74\x62\x32", 1, + 64, 256 }, + { 96, 256, 128, 68, + "\x51\x55\xde\xe9\xaa\xde\x1c\xc6\x1e\xe7\xe3\xf9\x26\x60\xf7\x59\x0f" + "\x5e\x5b\xa8\x2f\x1b\x59\xb8\x50\xe3\xfa\x45\x3d\x2f\xa6\xb3", + "\xc2\x6c\x4b\x3b\xfd\xb9\x7e\xe6\xb0\xf6\x3c\xa1", "", + "\x27\x34\xe0\x8e\xff\x8f\x5c\x4f\x84\xfa\x0c\x20\x7f\x49\xc7\xfd\x78" + "\xaf\x1a\xd5\x12\x3f\xf8\x1f\x83\xf5\x00\xed\xf4\xed\xa0\x9e\xdf", + "\xf5\x98\x2b\x60\x1c\x7a\x18\xfc\x72\xa6\x5b\x21\x8c\x44\x97\x4d\xc5" + "\x64\xd8\x31\x4c\xbe\x6f\x87\xfc\xf6\xc6\xcf\xbe\x61\x8b\x34\xb1", + "\xc4\x36\x32\xf5\x57\x60\xb5\xd1\xed\x37\x55\x6a\x94\xd0\x49\xb5", 1, + 0, 264 }, + { 96, 256, 128, 69, + "\x57\x3f\x08\xeb\xbe\x0c\xce\x4a\xc9\x61\x8e\x8c\x3b\x22\x4b\xea\x0a" + "\x32\xf0\x55\xc6\x99\x68\x38\xa3\x2f\x52\x7c\xa3\xc3\xb6\x95", + "\xad\x80\x50\xdc\x6d\x12\x2d\xce\x3e\x56\x39\xed", + "\xe9\x96\x98\x24\x1c\x59\x9b\x5f", + "\x66\x8d\x5e\x3f\x95\xfe\x03\x0d\xaf\x43\x2a\x5f\xc5\x83\x7a\xf3\xa7" + "\x9c\x81\xe9\x4b\x28\xd8\x20\x4c\x5e\xe2\x62\xab\x3c\x99\x08\xa7", + "\xea\xf6\x81\x0e\x6e\xc1\xcb\x7a\x29\x18\x85\x62\x57\xd1\xaa\x3d\x51" + "\xa8\x27\x87\x91\x46\xc6\x33\x7e\xcf\x53\x5e\x9c\x89\xb1\x49\xc5", + "\xa2\x95\x0c\x2f\x39\x4a\x34\x66\xc3\x45\xf7\x96\x32\x3c\x1a\xa7", 1, + 64, 264 }, + { 96, 256, 128, 70, + "\xcf\x0d\x40\xa4\x64\x4e\x5f\x51\x81\x51\x65\xd5\x30\x1b\x22\x63\x1f" + "\x45\x44\xc4\x9a\x18\x78\xe3\xa0\xa5\xe8\xe1\xaa\xe0\xf2\x64", + "\xe7\x4a\x51\x5e\x7e\x21\x02\xb9\x0b\xef\x55\xd2", "", + "\x97\x3d\x0c\x75\x38\x26\xba\xe4\x66\xcf\x9a\xbb\x34\x93\x15\x2e\x9d" + "\xe7\x81\x9e\x2b\xd0\xc7\x11\x71\x34\x6b\x4d\x2c\xeb\xf8\x04\x1a\xa3" + "\xce\xdc\x0d\xfd\x7b\x46\x7e\x26\x22\x8b\xc8\x6c\x9a", + "\xfb\xa7\x8a\xe4\xf9\xd8\x08\xa6\x2e\x3d\xa4\x0b\xe2\xcb\x77\x00\xc3" + "\x61\x3d\x9e\xb2\xc5\x29\xc6\x52\xe7\x6a\x43\x2c\x65\x8d\x27\x09\x5f" + "\x0e\xb8\xf9\x40\xc3\x24\x98\x1e\xa9\x35\xe5\x07\xf9", + "\x8f\x04\x69\x56\xdb\x3a\x51\x29\x08\xbd\x7a\xfc\x8f\x2a\xb0\xa9", 1, + 0, 376 }, + { 96, 256, 128, 71, + "\x6c\xbf\xd7\x1c\x64\x5d\x18\x4c\xf5\xd2\x3c\x40\x2b\xdb\x0d\x25\xec" + "\x54\x89\x8c\x8a\x02\x73\xd4\x2e\xb5\xbe\x10\x9f\xdc\xb2\xac", + "\xd4\xd8\x07\x34\x16\x83\x82\x5b\x31\xcd\x4d\x95", + "\xb3\xe4\x06\x46\x83\xb0\x2d\x84", + "\xa9\x89\x95\x50\x4d\xf1\x6f\x74\x8b\xfb\x77\x85\xff\x91\xee\xb3\xb6" + "\x60\xea\x9e\xd3\x45\x0c\x3d\x5e\x7b\x0e\x79\xef\x65\x36\x59\xa9\x97" + "\x8d\x75\x54\x2e\xf9\x1c\x45\x67\x62\x21\x56\x40\xb9", + "\xa1\xff\xed\x80\x76\x18\x29\xec\xce\x24\x2e\x0e\x88\xb1\x38\x04\x90" + "\x16\xbc\xa0\x18\xda\x2b\x6e\x19\x98\x6b\x3e\x31\x8c\xae\x8d\x80\x61" + "\x98\xfb\x4c\x52\x7c\xc3\x93\x50\xeb\xdd\xea\xc5\x73", + "\xc4\xcb\xf0\xbe\xfd\xa0\xb7\x02\x42\xc6\x40\xd7\xcd\x02\xd7\xa3", 1, + 64, 376 }, + { 96, 256, 128, 72, + "\x5b\x1d\x10\x35\xc0\xb1\x7e\xe0\xb0\x44\x47\x67\xf8\x0a\x25\xb8\xc1" + "\xb7\x41\xf4\xb5\x0a\x4d\x30\x52\x22\x6b\xaa\x1c\x6f\xb7\x01", + "\xd6\x10\x40\xa3\x13\xed\x49\x28\x23\xcc\x06\x5b", "", + "\xd0\x96\x80\x31\x81\xbe\xef\x9e\x00\x8f\xf8\x5d\x5d\xdc\x38\xdd\xac" + "\xf0\xf0\x9e\xe5\xf7\xe0\x7f\x1e\x40\x79\xcb\x64\xd0\xdc\x8f\x5e\x67" + "\x11\xcd\x49\x21\xa7\x88\x7d\xe7\x6e\x26\x78\xfd\xc6\x76\x18\xf1\x18" + "\x55\x86\xbf\xea\x9d\x4c\x68\x5d\x50\xe4\xbb\x9a\x82", + "\x9a\x4e\xf2\x2b\x18\x16\x77\xb5\x75\x5c\x08\xf7\x47\xc0\xf8\xd8\xe8" + "\xd4\xc1\x8a\x9c\xc2\x40\x5c\x12\xbb\x51\xbb\x18\x72\xc8\xe8\xb8\x77" + "\x67\x8b\xec\x44\x2c\xfc\xbb\x0f\xf4\x64\xa6\x4b\x74\x33\x2c\xf0\x72" + "\x89\x8c\x7e\x0e\xdd\xf6\x23\x2e\xa6\xe2\x7e\xfe\x50", + "\x9f\xf3\x42\x7a\x0f\x32\xfa\x56\x6d\x9c\xa0\xa7\x8a\xef\xc0\x13", 1, + 0, 512 }, + { 96, 256, 128, 73, + "\x97\xd6\x35\xc4\xf4\x75\x74\xd9\x99\x8a\x90\x87\x5d\xa1\xd3\xa2\x84" + "\xb7\x55\xb2\xd3\x92\x97\xa5\x72\x52\x35\x19\x0e\x10\xa9\x7e", + "\xd3\x1c\x21\xab\xa1\x75\xb7\x0d\xe4\xeb\xb1\x9c", + "\x71\x93\xf6\x23\x66\x33\x21\xa2", + "\x94\xee\x16\x6d\x6d\x6e\xcf\x88\x32\x43\x71\x36\xb4\xae\x80\x5d\x42" + "\x88\x64\x35\x95\x86\xd9\x19\x3a\x25\x01\x62\x93\xed\xba\x44\x3c\x58" + "\xe0\x7e\x7b\x71\x95\xec\x5b\xd8\x45\x82\xa9\xd5\x6c\x8d\x4a\x10\x8c" + "\x7d\x7c\xe3\x4e\x6c\x6f\x8e\xa1\xbe\xc0\x56\x73\x17", + "\x5f\xbb\xde\xcc\x34\xbe\x20\x16\x14\xf6\x36\x03\x1e\xeb\x42\xf1\xca" + "\xce\x3c\x79\xa1\x2c\xff\xd8\x71\xee\x8e\x73\x82\x0c\x82\x97\x49\xf1" + "\xab\xb4\x29\x43\x67\x84\x9f\xb6\xc2\xaa\x56\xbd\xa8\xa3\x07\x8f\x72" + "\x3d\x7c\x1c\x85\x20\x24\xb0\x17\xb5\x89\x73\xfb\x1e", + "\x09\x26\x3d\xa7\xb4\xcb\x92\x14\x52\xf9\x7d\xca\x40\xf5\x80\xec", 1, + 64, 512 }, + { 96, 256, 128, 74, + "\xfe\x6e\x55\xbd\xae\xd1\xf7\x28\x4c\xa5\xfc\x0f\x8c\x5f\x2b\x8d\xf5" + "\x6d\xc0\xf4\x9e\x8c\xa6\x6a\x41\x99\x5e\x78\x33\x51\xf9\x01", + "\x17\xc8\x6a\x8a\xbb\xb7\xe0\x03\xac\xde\x27\x99", "", + "\xb4\x29\xeb\x80\xfb\x8f\xe8\xba\xed\xa0\xc8\x5b\x9c\x33\x34\x58\xe7" + "\xc2\x99\x2e\x55\x84\x75\x06\x9d\x12\xd4\x5c\x22\x21\x75\x64\x12\x15" + "\x88\x03\x22\x97\xef\xf5\x67\x83\x74\x2a\x5f\xc2\x2d\x74\x10\xff\xb2" + "\x9d\x66\x09\x86\x61\xd7\x6f\x12\x6c\x3c\x27\x68\x9e\x43\xb3\x72\x67" + "\xca\xc5\xa3\xa6\xd3\xab\x49\xe3\x91\xda\x29\xcd\x30\x54\xa5\x69\x2e" + "\x28\x07\xe4\xc3\xea\x46\xc8\x76\x1d\x50\xf5\x92", + "\xd0\x10\x2f\x6c\x25\x8b\xf4\x97\x42\xce\xc3\x4c\xf2\xd0\xfe\xdf\x23" + "\xd1\x05\xfb\x4c\x84\xcf\x98\x51\x5e\x1b\xc9\xa6\x4f\x8a\xd5\xbe\x8f" + "\x07\x21\xbd\xe5\x06\x45\xd0\x00\x83\xc3\xa2\x63\xa3\x10\x53\xb7\x60" + "\x24\x5f\x52\xae\x28\x66\xa5\xec\x83\xb1\x9f\x61\xbe\x1d\x30\xd5\xc5" + "\xd9\xfe\xcc\x4c\xbb\xe0\x8f\xd3\x85\x81\x3a\x2a\xa3\x9a\x00\xff\x9c" + "\x10\xf7\xf2\x37\x02\xad\xd1\xe4\xb2\xff\xa3\x1c", + "\x41\x86\x5f\xc7\x1d\xe1\x2b\x19\x61\x21\x27\xce\x49\x99\x3b\xb0", 1, + 0, 776 }, + { 96, 256, 128, 75, + "\xaa\xbc\x06\x34\x74\xe6\x5c\x4c\x3e\x9b\xdc\x48\x0d\xea\x97\xb4\x51" + "\x10\xc8\x61\x88\x46\xff\x6b\x15\xbd\xd2\xa4\xa5\x68\x2c\x4e", + "\x46\x36\x2f\x45\xd6\x37\x9e\x63\xe5\x22\x94\x60", + "\xa1\x1c\x40\xb6\x03\x76\x73\x30", + "\xce\xb5\x34\xce\x50\xdc\x23\xff\x63\x8a\xce\x3e\xf6\x3a\xb2\xcc\x29" + "\x73\xee\xad\xa8\x07\x85\xfc\x16\x5d\x06\xc2\xf5\x10\x0f\xf5\xe8\xab" + "\x28\x82\xc4\x75\xaf\xcd\x05\xcc\xd4\x9f\x2e\x7d\x8f\x55\xef\x3a\x72" + "\xe3\xdc\x51\xd6\x85\x2b\x8e\x6b\x9e\x7a\xec\xe5\x7b\xe6\x55\x6b\x0b" + "\x6d\x94\x13\xe3\x3f\xc5\xfc\x24\xa9\xa2\x05\xad\x59\x57\x4b\xb3\x9d" + "\x94\x4a\x92\xdc\x47\x97\x0d\x84\xa6\xad\x31\x76", + "\x75\x45\x39\x1b\x51\xde\x01\xd5\xc5\x3d\xfa\xca\x77\x79\x09\x06\x3e" + "\x58\xed\xee\x4b\xb1\x22\x7e\x71\x10\xac\x4d\x26\x20\xc2\xae\xc2\xf8" + "\x48\xf5\x6d\xee\xb0\x37\xa8\xdc\xed\x75\xaf\xa8\xa6\xc8\x90\xe2\xde" + "\xe4\x2f\x95\x0b\xb3\x3d\x9e\x24\x24\xd0\x8a\x50\x5d\x89\x95\x63\x97" + "\x3e\xd3\x88\x70\xf3\xde\x6e\xe2\xad\xc7\xfe\x07\x2c\x36\x6c\x14\xe2" + "\xcf\x7c\xa6\x2f\xb3\xd3\x6b\xee\x11\x68\x54\x61", + "\xb7\x0d\x44\xef\x8c\x66\xc5\xc7\xbb\xf1\x0d\xca\xdd\x7f\xac\xf6", 1, + 64, 776 }, + { 96, 256, 128, 76, + "\xd7\xad\xdd\x38\x89\xfa\xdf\x8c\x89\x3e\xee\x14\xba\x2b\x7e\xa5\xbf" + "\x56\xb4\x49\x90\x48\x69\x61\x5b\xd0\x5d\x5f\x11\x4c\xf3\x77", + "\x8a\x3a\xd2\x6b\x28\xcd\x13\xba\x65\x04\xe2\x60", "", + "\xc8\x77\xa7\x6b\xf5\x95\x56\x07\x72\x16\x7c\x6e\x3b\xcc\x70\x53\x05" + "\xdb\x9c\x6f\xcb\xeb\x90\xf4\xfe\xa8\x51\x16\x03\x8b\xc5\x3c\x3f\xa5" + "\xb4\xb4\xea\x0d\xe5\xcc\x53\x4f\xbe\x1c\xf9\xae\x44\x82\x4c\x6c\x2c" + "\x0a\x5c\x88\x5b\xd8\xc3\xcd\xc9\x06\xf1\x26\x75\x73\x7e\x43\x4b\x98" + "\x3e\x1e\x23\x1a\x52\xa2\x75\xdb\x5f\xb1\xa0\xca\xc6\xa0\x7b\x3b\x7d" + "\xcb\x19\x48\x2a\x5d\x3b\x06\xa9\x31\x7a\x54\x82\x6c\xea\x6b\x36\xfc" + "\xe4\x52\xfa\x9b\x54\x75\xe2\xaa\xf2\x54\x99\x49\x9d\x8a\x89\x32\xa1" + "\x9e\xb9\x87\xc9\x03\xbd\x85\x02\xfe", + "\x29\x4a\x76\x4c\x03\x35\x3f\x5f\x4f\x6e\x93\xcd\x7e\x97\x74\x80\xd6" + "\xc3\x43\x07\x1d\xb0\xb7\xc1\xf0\xdb\x1e\x95\xb8\x5e\x60\x53\xf0\x42" + "\x31\x68\xa9\xc7\x53\x32\x68\xdb\x9a\x19\x4e\x76\x65\x35\x9d\x14\x48" + "\x9b\xc4\x71\x72\xa9\xf2\x13\x70\xe8\x9b\x0b\xd0\xe5\xef\x96\x61\x73" + "\x8d\xe2\x82\x57\x2b\xcc\x3e\x54\x12\x47\x62\x6e\x57\xe7\x5d\xec\x0f" + "\x91\xac\x5c\x53\x0b\xd1\xa5\x32\x71\x84\x29\x96\xdc\xd0\x4d\x86\x53" + "\x21\xb1\xec\xb6\xe7\x63\x01\x14\xfe\x78\x02\x91\xb8\xdc\x3e\x5d\x0a" + "\xbc\x8e\x65\xb1\xc5\x49\x3e\x9a\xf0", + "\xf2\xb9\x74\xca\x0f\x14\xfb\x9f\x92\x01\x4b\xff\x18\x57\x3c\xff", 1, + 0, 1024 }, + { 96, 256, 128, 77, + "\x80\xbe\x86\xfb\x6f\xc4\x9b\xc7\x34\x28\xca\xb5\x76\xf6\xad\x72\xff" + "\x6a\xca\x04\x00\x1b\x8b\x1c\x57\xa7\x12\x8b\xe7\x39\x00\xaa", + "\x90\x31\x88\x43\x3c\x1c\xe8\x97\x1a\xa1\x9b\x9d", + "\x05\x87\xaf\x85\x30\xad\x05\x47", + "\x67\xce\x49\x9c\xd8\xed\x68\xbd\x71\x7d\xfe\x61\xc6\x0f\x27\xd2\x60" + "\xb1\xc1\x63\xa7\x2e\x8c\xc8\x59\x72\x53\xd3\xd9\x87\xc2\xdb\xe1\xbf" + "\xf2\xe4\x4d\x9b\xd4\x76\x5d\x3e\x53\xd9\xc3\xf8\xeb\x3b\x90\xe7\x51" + "\xf4\x7c\x71\x57\xbd\xc1\x14\x2b\xc3\x3f\x58\x33\xac\x1c\xd1\x26\x2c" + "\xbb\x23\x90\x66\xb3\x34\xa4\xed\x99\xae\x82\xc7\x4f\x2b\x49\x54\x0f" + "\x1a\x61\x4b\xc2\x39\xd8\xfc\x5a\xdd\x8c\x17\x81\x84\xe4\x12\x81\xf6" + "\xe6\x6c\x5c\x31\x17\xfd\x95\x35\x47\xf7\xc8\x29\x42\x5b\x50\x82\xaa" + "\x69\x68\x68\x47\xea\xf5\x78\x46\x92", + "\x2b\x90\xb4\xf3\xde\x28\x0c\x44\x91\x3d\x19\x84\xbd\xd5\xdf\xa0\x56" + "\x6c\x6a\x14\xa0\x58\x65\x9a\x9b\x62\x32\x77\xb0\xbb\x6e\x82\x10\x1e" + "\x79\x39\x5d\x12\xe6\x43\xf6\x2d\x9a\x82\x2b\xae\x49\x79\x07\x49\x3e" + "\x4f\x82\x13\xfc\xf9\x9d\xa8\xa7\x8f\xdf\x86\x7a\xf3\x6b\xc8\xb0\x93" + "\x1c\x18\x86\xb4\xf0\xae\x57\x29\x98\x64\x94\xdb\xd5\x97\x37\xe9\x56" + "\xcd\x8f\x22\x6c\x7c\x52\x26\x89\xd0\x82\xf0\x23\x89\x4d\x54\xac\xab" + "\x0c\x4d\x60\x9f\x37\x46\xa6\x73\x69\xbb\x88\x76\x00\x8f\x7f\xd3\xdc" + "\x66\x81\xc5\xfb\x9d\x72\x8c\x59\x11", + "\xf0\x05\xeb\xe1\xc1\xad\xa7\x5a\x9c\xee\x8d\x63\x08\x81\xd5\xb8", 1, + 64, 1024 }, + { 96, 256, 128, 78, + "\x7d\x00\xb4\x80\x95\xad\xfa\x32\x72\x05\x06\x07\xb2\x64\x18\x50\x02" + "\xba\x99\x95\x7c\x49\x8b\xe0\x22\x77\x0f\x2c\xe2\xf3\x14\x3c", + "\x87\x34\x5f\x10\x55\xfd\x9e\x21\x02\xd5\x06\x56", "\x02", + "\xe5\xcc\xaa\x44\x1b\xc8\x14\x68\x8f\x8f\x6e\x8f\x28\xb5\x00\xb2", + "\x7e\x72\xf5\xa1\x85\xaf\x16\xa6\x11\x92\x1b\x43\x8f\x74\x9f\x0b", + "\x12\x42\xc6\x70\x73\x23\x34\x02\x9a\xdf\xe1\xc5\x00\x16\x51\xe4", 1, + 8, 128 }, + { 96, 256, 128, 79, + "\x64\x32\x71\x7f\x1d\xb8\x5e\x41\xac\x78\x36\xbc\xe2\x51\x85\xa0\x80" + "\xd5\x76\x2b\x9e\x2b\x18\x44\x4b\x6e\xc7\x2c\x3b\xd8\xe4\xdc", + "\x87\xa3\x16\x3e\xc0\x59\x8a\xd9\x5b\x3a\xa7\x13", "\xb6\x48", + "\x02\xcd\xe1\x68\xfb\xa3\xf5\x44\xbb\xd0\x33\x2f\x7a\xde\xad\xa8", + "\x85\xf2\x9a\x71\x95\x57\xcd\xd1\x4d\x1f\x8f\xff\xab\x6d\x9e\x60", + "\x73\x2c\xa3\x2b\xec\xd5\x15\xa1\xed\x35\x3f\x54\x2e\x99\x98\x58", 1, + 16, 128 }, + { 96, 256, 128, 80, + "\x7a\xfa\x0f\x59\xdf\xcb\x5a\xd3\xa7\x64\x90\xc5\xc8\x04\x32\x7c\x8d" + "\x05\x2b\xe7\x37\xa6\x0f\xa8\xbc\xbf\x0a\x2c\x36\x63\x0a\x43", + "\x25\xb7\xbd\xf4\xa6\xdc\xbf\x7c\x9a\x3e\xc2\xb3", "\x8b\x71\xac", + "\x62\x3e\x6b\xa6\xd3\x16\x6a\x33\x8b\xfc\xc7\xaf\x90\xa2\x30\xc8", + "\xd4\x6e\x82\x65\xa8\xc6\xa2\x53\x93\xdd\x95\x6b\xb4\x43\x97\xad", + "\xe2\x8f\x3a\xd9\xe3\xef\x4a\x3d\x94\xee\x07\xbf\x53\x8e\xaa\xfb", 1, + 24, 128 }, + { 96, 256, 128, 81, + "\x2e\xc2\x5b\x0e\xc7\xac\x24\x42\x24\xe9\xc7\xfc\x2f\xa5\xd3\xef\x17" + "\x80\x9e\x19\xfd\x6e\x95\x41\x58\xdd\x0d\x72\x73\x8a\x4c\xc8", + "\x6f\xb0\xd1\x41\x7c\xdf\xff\x4d\xf3\x7d\xb0\x8c", + "\x3a\x5d\xdf\x40", + "\xa1\xc9\x33\x76\x8a\x6d\x57\x3e\xbf\x68\xa9\x9e\x5e\x18\xda\xe8", + "\x2d\x3c\xb2\xd9\x30\x34\x91\xe2\x64\xf2\x90\x4f\x0e\x07\x53\xf4", + "\x6c\x1d\xb9\x59\x36\x2d\x21\x7b\x23\x22\xb4\x66\x53\x6b\xfe\xa0", 1, + 32, 128 }, + { 96, 256, 128, 82, + "\x0a\x2c\xf5\x23\x71\xcf\x9d\x9f\x95\xb1\x01\x08\xfc\x82\xb4\xfd\x61" + "\x10\xa8\xba\x9a\x88\xa2\x60\x83\x68\x5a\xd2\x98\x26\x89\x1a", + "\x25\x38\xfc\x67\xaf\xb9\xea\xb3\x33\xf8\x32\x90", + "\x9e\xec\x54\x0b\xb0", + "\x0d\x8c\x69\x1d\x04\x4a\x39\x78\xd7\x90\x43\x2d\xc7\x1d\x69\xf8", + "\xa9\x88\xc0\x3c\x71\xb9\x56\xff\x08\x6d\x04\x70\xd7\x06\xbd\x34", + "\xb3\x5d\x7c\xbf\x2b\xeb\x89\x4b\x0c\x74\x6e\x07\x30\x42\x9e\x15", 1, + 40, 128 }, + { 96, 256, 128, 83, + "\x30\x7e\x88\x6b\x38\xbb\x18\xb4\x45\xf8\xa2\xc6\xd6\xf8\x93\x24\x92" + "\xa9\xce\xa8\xd0\x41\xba\x72\xeb\x5e\xfd\xfa\x70\xd0\xb8\xd2", + "\xa0\x71\xbe\x99\x91\x51\xe2\xa1\xc4\x1c\x81\xe9", + "\x56\xe0\x14\xd9\x7c\x74", + "\x9a\xba\x22\xb4\x95\xcb\x7e\xc8\x87\xdd\xaa\x62\x01\x9a\xa1\x4d", + "\x32\xbf\x95\xd4\xc1\x95\xdb\xaf\x58\xd9\xaf\x40\x01\xc6\xe5\x7d", + "\x43\x93\x80\x87\x03\xd6\x7a\x90\x87\x05\x78\x04\x6c\xd8\xb5\x25", 1, + 48, 128 }, + { 96, 256, 128, 84, + "\xda\xcd\x51\xa8\xa8\xe4\xd5\x90\x5b\x4c\xbb\x94\x7e\xf4\x01\x3e\xb2" + "\x96\x88\x93\x53\xf3\xc9\xee\x35\xf5\x57\x7b\x26\x73\x7a\x51", + "\x3f\xa3\x78\xa1\xbe\xfd\xdd\xd6\x1a\xe6\x8c\xf4", + "\xbb\x5a\x38\x12\xf0\xae\xfd", + "\xe1\x48\x31\x38\x83\xa7\x7d\xa1\x21\x12\x4d\x06\xb1\xc7\x7d\xca", + "\x2a\x20\x7c\xa7\xe9\xda\x6b\x13\xa2\x29\x60\x43\x04\xd8\x7e\xb1", + "\x8a\x6b\x6a\xfe\xc8\x7d\x93\xec\x6e\x8d\xbe\x13\xd8\x4c\x0f\x8c", 1, + 56, 128 }, + { 96, 256, 128, 85, + "\x7b\x5f\xbb\xb2\x02\xc1\x61\x08\xfd\x13\x06\x64\x46\x85\x3a\x85\x0d" + "\x8b\x34\xe9\xda\x40\x51\x95\x80\xda\x44\x6a\x92\x2f\x91\x62", + "\xaa\x07\x7a\x5c\xe9\x16\x1b\xde\x8d\x8e\xdc\x40", + "\xf9\x4b\xb9\x2c\x1c\x66\x8a\x69\x5b", + "\xda\x47\x1c\xd6\x93\x5a\x0c\xa8\x30\x7d\xde\xdc\x6b\x95\x99\x62", + "\x54\x8a\x5c\xa0\xae\x49\x21\x1c\xdf\x30\xbb\xdc\xb1\x35\x2d\x31", + "\x20\x4d\xac\xb9\x8f\x8c\x89\x08\xcc\x5e\xa2\x2b\xb2\x3f\x90\x1f", 1, + 72, 128 }, + { 96, 256, 128, 86, + "\x1f\xfd\x10\x1e\xb9\x75\x31\xf6\xfa\xa8\x21\xec\x4d\x5c\x57\x02\x72" + "\x5d\xd0\x33\xd3\xb8\x30\xbb\x76\x0c\x4e\xf2\x7b\xa9\x83\xdf", + "\x59\x81\x14\xe8\xcf\x7f\xbd\xea\x8a\xd2\x96\x83", + "\x21\x55\x62\x7e\xc1\x5a\x97\x8f\xbc\xb2", + "\x28\x66\x8c\xa8\xdb\x53\x5c\x7e\x8e\xb2\x74\x91\xad\x0f\xb7\xcb", + "\x28\xce\xda\xc2\x4f\x14\xca\xa3\x26\xc7\xfe\x40\x1f\x68\xa8\x7c", + "\x2b\xf1\xb2\xc4\x3d\x30\x39\xf8\xf5\xce\x35\x9c\x11\x02\xf8\x79", 1, + 80, 128 }, + { 96, 256, 128, 87, + "\xd2\xd0\xa9\x73\xd5\x95\x1a\xf3\x52\xcb\xee\x57\xac\x9d\xab\x1c\x28" + "\x4c\x99\xaf\x3b\x99\x2c\xe0\x15\xf2\x19\x50\x6f\x64\x88\x8d", + "\x9a\xcd\x21\x35\x70\xce\x9b\xb9\xd8\x86\xc6\xef", + "\x37\xad\x66\x8d\x4d\x4f\xe8\x89\x94\x97\x63", + "\x3f\x3f\x00\x76\x25\x03\x52\xe1\xb6\xb5\xc1\x2c\xfa\x12\x62\x5e", + "\x72\x56\xe8\x56\x87\x2a\xd3\xa5\x4b\x34\xa2\xa6\xbd\xca\x88\x38", + "\x3b\x12\xe4\x58\x6e\x45\x22\x3f\x78\xa6\xee\xa8\x11\xef\xb8\x63", 1, + 88, 128 }, + { 96, 256, 128, 88, + "\xad\xcc\x52\x0b\x38\x13\x82\x23\x7d\x05\xa6\x40\x0a\x7d\xfb\xcd\x07" + "\x71\xb6\xaa\x9e\xdb\x79\x66\x13\x1d\xde\xf6\xaf\x21\xf1\xbe", + "\x91\x83\xcd\xf3\xa8\xba\x73\x97\xb6\xb2\xd5\xd5", + "\xb3\x34\x37\x54\x15\xf6\x21\x5c\x0b\xf8\x9a\x9a", + "\x95\x82\x95\x61\x9c\xf1\xb3\x6f\x0b\x47\x46\x63\xc0\xbc\x79\xeb", + "\x85\x2c\x14\x1b\x42\x39\xa3\x1f\xee\xda\x03\x55\x0d\x70\xa2\xbe", + "\x5f\xc5\x92\x87\xb9\x2d\x3f\xcf\x7d\x66\xf1\x3d\xef\xb1\x1b\x0d", 1, + 96, 128 }, + { 96, 256, 128, 89, + "\xbd\x53\x4f\x7a\xde\xca\x46\x68\x44\xfb\x3b\xa3\x46\x58\xbe\x80\x7f" + "\x15\xc5\x29\x1e\xd6\x02\x68\x60\xa2\x4f\x17\x9b\x71\x2c\x89", + "\x41\x2c\x3e\x13\xee\x1f\x78\x64\xbd\x15\xce\x39", + "\x28\x66\xaf\xff\x0b\xcc\x61\x35\xdc\x63\xaf\x88\xc8", + "\xd9\x2f\x8c\xe5\xd8\xd0\xad\x2e\xb5\xf1\x1a\xf0\x2e\xf6\x39\x49", + "\x89\xd6\xd0\x89\xc4\xa2\x55\x95\x2a\xca\x11\xb2\x4a\x01\xff\x95", + "\xf8\x8f\xa4\x53\x12\x04\xda\x31\x5e\x73\x17\x97\x02\x40\xce\x9e", 1, + 104, 128 }, + { 96, 256, 128, 90, + "\x91\x0a\xde\x7d\x32\x4d\x2c\x96\x88\x43\x9e\x1f\x14\x2e\x0e\x5f\x9d" + "\x13\x0f\xf8\x32\xe5\x07\xfe\x19\x85\xe5\xa2\x64\x52\xa6\xd0", + "\x9b\xe0\x90\xdb\xa9\x3d\xef\xf2\x7a\xdf\x99\xee", + "\xea\x25\x75\xf1\x23\x26\x8e\x93\x6c\x8e\x4c\x8c\x1b\xb8", + "\x6e\x35\x60\x94\xed\x9d\x9a\x70\x53\xc7\x90\x6c\x48\xba\x3d\x9f", + "\x01\xff\xb3\x43\xc7\x57\xb2\x78\x43\xd8\xa9\x00\xa3\x6c\xe3\x9d", + "\xa3\x15\x54\x1b\x7d\x63\x13\xc6\xfd\xdf\x64\xb3\x03\xd7\x1d\x60", 1, + 112, 128 }, + { 96, 256, 128, 91, + "\x8e\x34\xcf\x73\xd2\x45\xa1\x08\x2a\x92\x0b\x86\x36\x4e\xb8\x96\xc4" + "\x94\x64\x67\xbc\xb3\xd5\x89\x29\xfc\xb3\x66\x90\xe6\x39\x4f", + "\x6f\x57\x3a\xa8\x6b\xaa\x49\x2b\xa4\x65\x96\xdf", + "\xbd\x4c\xd0\x2f\xc7\x50\x2b\xbd\xbd\xf6\xc9\xa3\xcb\xe8\xf0", + "\x16\xdd\xd2\x3f\xf5\x3f\x3d\x23\xc0\x63\x34\x48\x70\x40\xeb\x47", + "\xc1\xb2\x95\x93\x6d\x56\xfa\xda\xc0\x3e\x5f\x74\x2b\xff\x73\xa1", + "\x39\xc4\x57\xdb\xab\x66\x38\x2b\xab\xb3\xb5\x58\x00\xcd\xa5\xb8", 1, + 120, 128 }, + { 96, 256, 128, 92, + "\xcb\x55\x75\xf5\xc7\xc4\x5c\x91\xcf\x32\x0b\x13\x9f\xb5\x94\x23\x75" + "\x60\xd0\xa3\xe6\xf8\x65\xa6\x7d\x4f\x63\x3f\x2c\x08\xf0\x16", + "\x1a\x65\x18\xf0\x2e\xde\x1d\xa6\x80\x92\x66\xd9", + "\x89\xcc\xe9\xfb\x47\x44\x1d\x07\xe0\x24\x5a\x66\xfe\x8b\x77\x8b", + "\x62\x3b\x78\x50\xc3\x21\xe2\xcf\x0c\x6f\xbc\xc8\xdf\xd1\xaf\xf2", + "\xc8\x4c\x9b\xb7\xc6\x1c\x1b\xcb\x17\x77\x2a\x1c\x50\x0c\x50\x95", + "\xdb\xad\xf7\xa5\x13\x8c\xa0\x34\x59\xa2\xcd\x65\x83\x1e\x09\x2f", 1, + 128, 128 }, + { 96, 256, 128, 93, + "\xa5\x56\x9e\x72\x9a\x69\xb2\x4b\xa6\xe0\xff\x15\xc4\x62\x78\x97\x43" + "\x68\x24\xc9\x41\xe9\xd0\x0b\x2e\x93\xfd\xdc\x4b\xa7\x76\x57", + "\x56\x4d\xee\x49\xab\x00\xd2\x40\xfc\x10\x68\xc3", + "\xd1\x9f\x2d\x98\x90\x95\xf7\xab\x03\xa5\xfd\xe8\x44\x16\xe0\x0c" + "\x0e", + "\x87\xb3\xa4\xd7\xb2\x6d\x8d\x32\x03\xa0\xde\x1d\x64\xef\x82\xe3", + "\x94\xbc\x80\x62\x1e\xd1\xe7\x1b\x1f\xd2\xb5\xc3\xa1\x5e\x35\x68", + "\x33\x35\x11\x86\x17\x96\x97\x84\x01\x59\x8b\x96\x37\x22\xf5\xb3", 1, + 136, 128 }, + { 96, 256, 128, 94, + "\x56\x20\x74\x65\xb4\xe4\x8e\x6d\x04\x63\x0f\x4a\x42\xf3\x5c\xfc\x16" + "\x3a\xb2\x89\xc2\x2a\x2b\x47\x84\xf6\xf9\x29\x03\x30\xbe\xe0", + "\xdf\x87\x13\xe8\x7e\xc3\xdb\xcf\xad\x14\xd5\x3e", + "\x5e\x64\x70\xfa\xcd\x99\xc1\xd8\x1e\x37\xcd\x44\x01\x5f\xe1\x94\x80" + "\xa2\xa4\xd3\x35\x2a\x4f\xf5\x60\xc0\x64\x0f\xdb\xda", + "\xe6\x01\xb3\x85\x57\x79\x7d\xa2\xf8\xa4\x10\x6a\x08\x9d\x1d\xa6", + "\x29\x9b\x5d\x3f\x3d\x03\xc0\x87\x20\x9a\x16\xe2\x85\x14\x31\x11", + "\x4b\x45\x4e\xd1\x98\xde\x11\x7e\x83\xec\x49\xfa\x8d\x85\x08\xd6", 1, + 240, 128 }, + { 96, 256, 128, 95, + "\x07\x74\x33\x02\x2a\xb3\x4d\x38\x0f\xc1\x92\xfc\x24\xc2\xed\xc6\x30" + "\x1f\xec\x6f\x24\x44\x2f\x57\x2a\x10\x87\xff\x2e\x05\xb3\x9a", + "\x28\xad\xcb\xc7\x43\x64\xf2\x6d\xd4\xb3\x10\x8b", + "\xe0\x10\x0e\xb1\x16\xcd\xc5\xe2\x2a\x3b\x9f\x9b\x41\x26\xc1\x49\x59" + "\x5e\x75\x10\x7f\x6e\x23\x7c\x69\xe8\x29\x60\x05\x22\x70", + "\x03\xc8\x74\xee\xaa\xa6\xfa\x9f\x0d\xa6\x2c\x75\x8f\xb0\xad\x04", + "\x1e\x96\x87\xb3\x5f\xbc\x8e\xaa\x18\x25\xed\x38\x47\x79\x8f\x76", + "\x07\x88\xbf\x70\xfd\x04\x03\x0e\xcd\x1c\x96\xd0\xbc\x1f\xcd\x5d", 1, + 248, 128 }, + { 96, 256, 128, 96, + "\x39\x37\x98\x6a\xf8\x6d\xaf\xc1\xba\x0c\x46\x72\xd8\xab\xc4\x6c\x20" + "\x70\x62\x68\x2d\x9c\x26\x4a\xb0\x6d\x6c\x58\x07\x20\x51\x30", + "\x8d\xf4\xb1\x5a\x88\x8c\x33\x28\x6a\x7b\x76\x51", + "\xba\x44\x6f\x6f\x9a\x0c\xed\x22\x45\x0f\xeb\x10\x73\x7d\x90\x07\xfd" + "\x69\xab\xc1\x9b\x1d\x4d\x90\x49\xa5\x55\x1e\x86\xec\x2b\x37", + "\xdc\x9e\x9e\xaf\x11\xe3\x14\x18\x2d\xf6\xa4\xeb\xa1\x7a\xec\x9c", + "\x60\x5b\xbf\x90\xae\xb9\x74\xf6\x60\x2b\xc7\x78\x05\x6f\x0d\xca", + "\x38\xea\x23\xd9\x90\x54\xb4\x6b\x42\xff\xe0\x04\x12\x9d\x22\x04", 1, + 256, 128 }, + { 96, 256, 128, 97, + "\x36\x37\x2a\xbc\xdb\x78\xe0\x27\x96\x46\xac\x3d\x17\x6b\x96\x74\xe9" + "\x15\x4e\xec\xf0\xd5\x46\x9c\x65\x1e\xc7\xe1\x6b\x4c\x11\x99", + "\xbe\x40\xe5\xf1\xa1\x18\x17\xa0\xa8\xfa\x89\x49", + "\xd4\x1a\x82\x8d\x5e\x71\x82\x92\x47\x02\x19\x05\x40\x2e\xa2\x57\xdc" + "\xcb\xc3\xb8\x0f\xcd\x56\x75\x05\x6b\x68\xbb\x59\xe6\x2e\x88\x73", + "\x81\xce\x84\xed\xe9\xb3\x58\x59\xcc\x8c\x49\xa8\xf6\xbe\x7d\xc6", + "\x7b\x7c\xe0\xd8\x24\x80\x9a\x70\xde\x32\x56\x2c\xcf\x2c\x2b\xbd", + "\x15\xd4\x4a\x00\xce\x0d\x19\xb4\x23\x1f\x92\x1e\x22\xbc\x0a\x43", 1, + 264, 128 }, + { 96, 256, 128, 98, + "\x9f\x14\x79\xed\x09\x7d\x7f\xe5\x29\xc1\x1f\x2f\x5a\xdd\x9a\xaf\xf4" + "\xa1\xca\x0b\x68\x99\x7a\x2c\xb7\xf7\x97\x49\xbd\x90\xaa\xf4", + "\x84\xc8\x7d\xae\x4e\xee\x27\x73\x0e\xc3\x5d\x12", + "\x3f\x2d\xd4\x9b\xbf\x09\xd6\x9a\x78\xa3\xd8\x0e\xa2\x56\x66\x14\xfc" + "\x37\x94\x74\x19\x6c\x1a\xae\x84\x58\x3d\xa7\x3d\x7f\xf8\x5c\x6f\x42" + "\xca\x42\x05\x6a\x97\x92\xcc\x1b\x9f\xb3\xc7\xd2\x61", + "\xa6\x67\x47\xc8\x9e\x85\x7a\xf3\xa1\x8e\x2c\x79\x50\x00\x87\xed", + "\xca\x82\xbf\xf3\xe2\xf3\x10\xcc\xc9\x76\x67\x2c\x44\x15\xe6\x9b", + "\x57\x63\x8c\x62\xa5\xd8\x5d\xed\x77\x4f\x91\x3c\x81\x3e\xa0\x32", 1, + 376, 128 }, + { 96, 256, 128, 99, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x65\xb6\x3b\xf0\x74\xb7\x28\x39\x92\xe2\x4b\x1a\xc0\xdf\x0d\x22\xb5" + "\x55\xdb\xe2\x25\x4d\x94\xa4\x3f\x1d\xe7\x48\xd3\xcc\x6f\x0d", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x39\xf4\xfc\xe3\x02\x6d\x83\x78\x9f\xfd\x1e\xe6\xf2\xcd\x7c\x4f", 1, + 128, 256 }, + { 96, 256, 128, 100, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x65\xb6\x3b\xf0\x74\xb7\x28\x39\x92\xe2\x4b\x1a\xc0\xdf\x0d\x22\xb5" + "\x55\xdb\xe2\x25\x4d\x94\xa4\x3f\x1d\xe7\x48\xd3\xcc\x6f\x0d\x20\xc1" + "\x42\xfe\x89\x8f\xbb\xe6\x68\xd4\x32\x43\x94\x43\x4c\x1b\x18\xb5\x8e" + "\xad\x71\x0a\xed\x9c\x31\xdb\x1f\x2a\x8a\x1f\x1b\xb2", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\xf5\xea\xa8\x04\x60\x5c\x3a\x47\x85\xf9\xd7\xf1\x3b\x6f\x67\xd6", 1, + 128, 512 }, + { 96, 256, 128, 101, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x65\xb6\x3b\xf0\x74\xb7\x28\x39\x92\xe2\x4b\x1a\xc0\xdf\x0d\x22\xb5" + "\x55\xdb\xe2\x25\x4d\x94\xa4\x3f\x1d\xe7\x48\xd3\xcc\x6f\x0d\x20\xc1" + "\x42\xfe\x89\x8f\xbb\xe6\x68\xd4\x32\x43\x94\x43\x4c\x1b\x18\xb5\x8e" + "\xad\x71\x0a\xed\x9c\x31\xdb\x1f\x2a\x8a\x1f\x1b\xb2\x44\x05\xc1\x83" + "\xaf\x94\xee\x1a\xd6\x30\xcd\x93\x11\x58\xa6\x21\x3d\x48\xc8\xff\xf1" + "\x0d\x0a\x1f\x9e\xf7\x60\x18\x8e\x65\x88\x02\xaa\xd5\x5e\x41\xa1\xd9" + "\x90\x69\xa1\x8d\xb5\x5c\x56\xaf\x7c\x10\xa6\xf2\x1e\xcc\x8a\xf9\xb7" + "\xce\x0a\x7e\xa0\xb6\x74\x26\xe9\x25", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x9b\x5c\x43\xa7\x8d\x95\x4e\x8a\x3c\x65\x9e\xeb\xc1\x3d\x5d\x55", 1, + 128, 1024 }, + { 96, 256, 128, 102, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x9a\x49\xc4\x0f\x8b\x48\xd7\xc6\x6d\x1d\xb4\xe5\x3f\x20\xf2\xdd\x4a" + "\xaa\x24\x1d\xda\xb2\x6b\x5b\xc0\xe2\x18\xb7\x2c\x33\x90\xf2", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x37\xe3\x39\x9d\x9c\xa6\x96\x79\x9f\x08\xf4\xf7\x2b\xc0\xcd\xd8", 1, + 128, 256 }, + { 96, 256, 128, 103, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x9a\x49\xc4\x0f\x8b\x48\xd7\xc6\x6d\x1d\xb4\xe5\x3f\x20\xf2\xdd\x4a" + "\xaa\x24\x1d\xda\xb2\x6b\x5b\xc0\xe2\x18\xb7\x2c\x33\x90\xf2\xdf\x3e" + "\xbd\x01\x76\x70\x44\x19\x97\x2b\xcd\xbc\x6b\xbc\xb3\xe4\xe7\x4a\x71" + "\x52\x8e\xf5\x12\x63\xce\x24\xe0\xd5\x75\xe0\xe4\x4d", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x3d\x52\x71\x0b\xec\x86\xd4\xea\x9f\xea\x2f\xf2\x69\x54\x91\x91", 1, + 128, 512 }, + { 96, 256, 128, 104, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x9a\x49\xc4\x0f\x8b\x48\xd7\xc6\x6d\x1d\xb4\xe5\x3f\x20\xf2\xdd\x4a" + "\xaa\x24\x1d\xda\xb2\x6b\x5b\xc0\xe2\x18\xb7\x2c\x33\x90\xf2\xdf\x3e" + "\xbd\x01\x76\x70\x44\x19\x97\x2b\xcd\xbc\x6b\xbc\xb3\xe4\xe7\x4a\x71" + "\x52\x8e\xf5\x12\x63\xce\x24\xe0\xd5\x75\xe0\xe4\x4d\xbb\xfa\x3e\x7c" + "\x50\x6b\x11\xe5\x29\xcf\x32\x6c\xee\xa7\x59\xde\xc2\xb7\x37\x00\x0e" + "\xf2\xf5\xe0\x61\x08\x9f\xe7\x71\x9a\x77\xfd\x55\x2a\xa1\xbe\x5e\x26" + "\x6f\x96\x5e\x72\x4a\xa3\xa9\x50\x83\xef\x59\x0d\xe1\x33\x75\x06\x48" + "\x31\xf5\x81\x5f\x49\x8b\xd9\x16\xda", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x51\x35\x63\x29\xe2\x80\xb1\x2d\x55\xd3\xd9\x8f\x0a\x58\x0c\xbe", 1, + 128, 1024 }, + { 96, 256, 128, 105, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80", + "\x65\xb6\x3b\x70\x74\xb7\x28\xb9\x92\xe2\x4b\x9a\xc0\xdf\x0d\xa2\xb5" + "\x55\xdb\x62\x25\x4d\x94\x24\x3f\x1d\xe7\xc8\xd3\xcc\x6f\x8d", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80", + "\xc1\x52\xa4\xb9\x0c\x54\x8c\x71\xdc\x47\x9e\xde\xaf\x92\x11\xbf", 1, + 128, 256 }, + { 96, 256, 128, 106, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80", + "\x65\xb6\x3b\x70\x74\xb7\x28\xb9\x92\xe2\x4b\x9a\xc0\xdf\x0d\xa2\xb5" + "\x55\xdb\x62\x25\x4d\x94\x24\x3f\x1d\xe7\xc8\xd3\xcc\x6f\x8d\x20\xc1" + "\x42\x7e\x89\x8f\xbb\x66\x68\xd4\x32\xc3\x94\x43\x4c\x9b\x18\xb5\x8e" + "\x2d\x71\x0a\xed\x1c\x31\xdb\x1f\xaa\x8a\x1f\x1b\x32", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00" + "\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00" + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80", + "\x40\xef\x63\x83\x05\x2d\x91\xc2\xe4\xb4\x61\x1b\x0e\x32\xc5\xff", 1, + 128, 512 }, + { 96, 256, 128, 107, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80", + "\x65\xb6\x3b\x70\x74\xb7\x28\xb9\x92\xe2\x4b\x9a\xc0\xdf\x0d\xa2\xb5" + "\x55\xdb\x62\x25\x4d\x94\x24\x3f\x1d\xe7\xc8\xd3\xcc\x6f\x8d\x20\xc1" + "\x42\x7e\x89\x8f\xbb\x66\x68\xd4\x32\xc3\x94\x43\x4c\x9b\x18\xb5\x8e" + "\x2d\x71\x0a\xed\x1c\x31\xdb\x1f\xaa\x8a\x1f\x1b\x32\x44\x05\xc1\x03" + "\xaf\x94\xee\x9a\xd6\x30\xcd\x13\x11\x58\xa6\xa1\x3d\x48\xc8\x7f\xf1" + "\x0d\x0a\x9f\x9e\xf7\x60\x98\x8e\x65\x88\x82\xaa\xd5\x5e\xc1\xa1\xd9" + "\x90\xe9\xa1\x8d\xb5\xdc\x56\xaf\x7c\x90\xa6\xf2\x1e\x4c\x8a\xf9\xb7" + "\x4e\x0a\x7e\xa0\x36\x74\x26\xe9\xa5", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00" + "\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00" + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80" + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00" + "\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00" + "\x80\x00\x00\x00\x80\x00\x00\x00\x80", + "\xae\x9b\x54\x25\x41\xe8\x4f\xc7\x45\x42\xee\xd6\xbe\x63\x8f\xee", 1, + 128, 1024 }, + { 96, 256, 128, 108, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00", + "\xe5\xb6\x3b\xf0\xf4\xb7\x28\x39\x12\xe2\x4b\x1a\x40\xdf\x0d\x22\x35" + "\x55\xdb\xe2\xa5\x4d\x94\xa4\xbf\x1d\xe7\x48\x53\xcc\x6f\x0d", + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80" + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00", + "\x10\xfe\xe3\xec\xfb\xa9\xcd\xf7\x97\xba\xe3\x7a\x62\x6e\xc8\x3b", 1, + 128, 256 }, + { 96, 256, 128, 109, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00", + "\xe5\xb6\x3b\xf0\xf4\xb7\x28\x39\x12\xe2\x4b\x1a\x40\xdf\x0d\x22\x35" + "\x55\xdb\xe2\xa5\x4d\x94\xa4\xbf\x1d\xe7\x48\x53\xcc\x6f\x0d\xa0\xc1" + "\x42\xfe\x09\x8f\xbb\xe6\xe8\xd4\x32\x43\x14\x43\x4c\x1b\x98\xb5\x8e" + "\xad\xf1\x0a\xed\x9c\xb1\xdb\x1f\x2a\x0a\x1f\x1b\xb2", + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80" + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00" + "\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00", + "\x74\x90\x79\x5b\xdb\xbb\xf5\xd0\xae\xcb\x9a\x4f\x65\xaa\x37\x9f", 1, + 128, 512 }, + { 96, 256, 128, 110, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00", + "\xe5\xb6\x3b\xf0\xf4\xb7\x28\x39\x12\xe2\x4b\x1a\x40\xdf\x0d\x22\x35" + "\x55\xdb\xe2\xa5\x4d\x94\xa4\xbf\x1d\xe7\x48\x53\xcc\x6f\x0d\xa0\xc1" + "\x42\xfe\x09\x8f\xbb\xe6\xe8\xd4\x32\x43\x14\x43\x4c\x1b\x98\xb5\x8e" + "\xad\xf1\x0a\xed\x9c\xb1\xdb\x1f\x2a\x0a\x1f\x1b\xb2\xc4\x05\xc1\x83" + "\x2f\x94\xee\x1a\x56\x30\xcd\x93\x91\x58\xa6\x21\xbd\x48\xc8\xff\x71" + "\x0d\x0a\x1f\x1e\xf7\x60\x18\x0e\x65\x88\x02\x2a\xd5\x5e\x41\x21\xd9" + "\x90\x69\x21\x8d\xb5\x5c\xd6\xaf\x7c\x10\x26\xf2\x1e\xcc\x0a\xf9\xb7" + "\xce\x8a\x7e\xa0\xb6\xf4\x26\xe9\x25", + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80" + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00" + "\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00" + "\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80" + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00" + "\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00" + "\x00\x80\x00\x00\x00\x80\x00\x00\x00", + "\x1d\x10\x96\xa8\xca\x9e\x2b\xda\x27\x62\xc4\x1d\x5b\x16\xf6\x2f", 1, + 128, 1024 }, + { 96, 256, 128, 111, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", + "\x9a\x49\xc4\x8f\x8b\x48\xd7\x46\x6d\x1d\xb4\x65\x3f\x20\xf2\x5d\x4a" + "\xaa\x24\x9d\xda\xb2\x6b\xdb\xc0\xe2\x18\x37\x2c\x33\x90\x72", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", + "\xaf\x84\x92\xc7\x92\xbf\x8d\x80\x62\xbe\x74\xff\x6e\xfb\x38\x69", 1, + 128, 256 }, + { 96, 256, 128, 112, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", + "\x9a\x49\xc4\x8f\x8b\x48\xd7\x46\x6d\x1d\xb4\x65\x3f\x20\xf2\x5d\x4a" + "\xaa\x24\x9d\xda\xb2\x6b\xdb\xc0\xe2\x18\x37\x2c\x33\x90\x72\xdf\x3e" + "\xbd\x81\x76\x70\x44\x99\x97\x2b\xcd\x3c\x6b\xbc\xb3\x64\xe7\x4a\x71" + "\xd2\x8e\xf5\x12\xe3\xce\x24\xe0\x55\x75\xe0\xe4\xcd", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff" + "\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff" + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", + "\xf2\x4d\xb6\x8c\x46\xb6\x7d\x6f\x40\x2f\xa6\xc8\x97\x91\x33\x68", 1, + 128, 512 }, + { 96, 256, 128, 113, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", + "\x9a\x49\xc4\x8f\x8b\x48\xd7\x46\x6d\x1d\xb4\x65\x3f\x20\xf2\x5d\x4a" + "\xaa\x24\x9d\xda\xb2\x6b\xdb\xc0\xe2\x18\x37\x2c\x33\x90\x72\xdf\x3e" + "\xbd\x81\x76\x70\x44\x99\x97\x2b\xcd\x3c\x6b\xbc\xb3\x64\xe7\x4a\x71" + "\xd2\x8e\xf5\x12\xe3\xce\x24\xe0\x55\x75\xe0\xe4\xcd\xbb\xfa\x3e\xfc" + "\x50\x6b\x11\x65\x29\xcf\x32\xec\xee\xa7\x59\x5e\xc2\xb7\x37\x80\x0e" + "\xf2\xf5\x60\x61\x08\x9f\x67\x71\x9a\x77\x7d\x55\x2a\xa1\x3e\x5e\x26" + "\x6f\x16\x5e\x72\x4a\x23\xa9\x50\x83\x6f\x59\x0d\xe1\xb3\x75\x06\x48" + "\xb1\xf5\x81\x5f\xc9\x8b\xd9\x16\x5a", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff" + "\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff" + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f" + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff" + "\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff" + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", + "\x43\xf6\x51\xab\x2e\x2e\xb0\xf0\x4b\xf6\x89\xa4\x0d\x32\xda\x24", 1, + 128, 1024 }, + { 96, 256, 128, 114, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff", + "\x1a\x49\xc4\x0f\x0b\x48\xd7\xc6\xed\x1d\xb4\xe5\xbf\x20\xf2\xdd\xca" + "\xaa\x24\x1d\x5a\xb2\x6b\x5b\x40\xe2\x18\xb7\xac\x33\x90\xf2", + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f" + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff", + "\x60\xd9\x52\x94\xa3\x69\x4c\xfa\xa6\x4b\x2f\x63\xbc\x1f\x82\xec", 1, + 128, 256 }, + { 96, 256, 128, 115, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff", + "\x1a\x49\xc4\x0f\x0b\x48\xd7\xc6\xed\x1d\xb4\xe5\xbf\x20\xf2\xdd\xca" + "\xaa\x24\x1d\x5a\xb2\x6b\x5b\x40\xe2\x18\xb7\xac\x33\x90\xf2\x5f\x3e" + "\xbd\x01\xf6\x70\x44\x19\x17\x2b\xcd\xbc\xeb\xbc\xb3\xe4\x67\x4a\x71" + "\x52\x0e\xf5\x12\x63\x4e\x24\xe0\xd5\xf5\xe0\xe4\x4d", + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f" + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff" + "\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff", + "\xbe\xac\xa0\xb4\x70\x27\x19\x61\x76\x18\x6d\x94\x40\x19\xc1\xc8", 1, + 128, 512 }, + { 96, 256, 128, 116, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff", + "\x1a\x49\xc4\x0f\x0b\x48\xd7\xc6\xed\x1d\xb4\xe5\xbf\x20\xf2\xdd\xca" + "\xaa\x24\x1d\x5a\xb2\x6b\x5b\x40\xe2\x18\xb7\xac\x33\x90\xf2\x5f\x3e" + "\xbd\x01\xf6\x70\x44\x19\x17\x2b\xcd\xbc\xeb\xbc\xb3\xe4\x67\x4a\x71" + "\x52\x0e\xf5\x12\x63\x4e\x24\xe0\xd5\xf5\xe0\xe4\x4d\x3b\xfa\x3e\x7c" + "\xd0\x6b\x11\xe5\xa9\xcf\x32\x6c\x6e\xa7\x59\xde\x42\xb7\x37\x00\x8e" + "\xf2\xf5\xe0\xe1\x08\x9f\xe7\xf1\x9a\x77\xfd\xd5\x2a\xa1\xbe\xde\x26" + "\x6f\x96\xde\x72\x4a\xa3\x29\x50\x83\xef\xd9\x0d\xe1\x33\xf5\x06\x48" + "\x31\x75\x81\x5f\x49\x0b\xd9\x16\xda", + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f" + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff" + "\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff" + "\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f" + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff" + "\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff" + "\xff\x7f\xff\xff\xff\x7f\xff\xff\xff", + "\xd4\x81\x10\x28\xa5\x77\xd4\xdd\x69\xd6\xb3\x5d\x71\x7f\x73\xe3", 1, + 128, 1024 }, + { 96, 256, 128, 117, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff", + "\x65\xb6\x3b\xf0\x8b\x48\xd7\xc6\x92\xe2\x4b\x1a\x3f\x20\xf2\xdd\xb5" + "\x55\xdb\xe2\xda\xb2\x6b\x5b\x3f\x1d\xe7\x48\x2c\x33\x90\xf2", + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00" + "\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff", + "\x10\xfb\x61\x27\x2b\x55\x5b\xee\x10\x4f\x5a\x71\x81\x87\x16\xd6", 1, + 128, 256 }, + { 96, 256, 128, 118, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff", + "\x65\xb6\x3b\xf0\x8b\x48\xd7\xc6\x92\xe2\x4b\x1a\x3f\x20\xf2\xdd\xb5" + "\x55\xdb\xe2\xda\xb2\x6b\x5b\x3f\x1d\xe7\x48\x2c\x33\x90\xf2\x20\xc1" + "\x42\xfe\x76\x70\x44\x19\x68\xd4\x32\x43\x6b\xbc\xb3\xe4\x18\xb5\x8e" + "\xad\x8e\xf5\x12\x63\x31\xdb\x1f\x2a\x75\xe0\xe4\x4d", + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00" + "\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00" + "\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00" + "\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff", + "\x47\x56\x76\x4e\x59\x58\x35\x04\x18\x28\x77\xd8\xc3\x31\x20\xf0", 1, + 128, 512 }, + { 96, 256, 128, 119, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff", + "\x65\xb6\x3b\xf0\x8b\x48\xd7\xc6\x92\xe2\x4b\x1a\x3f\x20\xf2\xdd\xb5" + "\x55\xdb\xe2\xda\xb2\x6b\x5b\x3f\x1d\xe7\x48\x2c\x33\x90\xf2\x20\xc1" + "\x42\xfe\x76\x70\x44\x19\x68\xd4\x32\x43\x6b\xbc\xb3\xe4\x18\xb5\x8e" + "\xad\x8e\xf5\x12\x63\x31\xdb\x1f\x2a\x75\xe0\xe4\x4d\x44\x05\xc1\x83" + "\x50\x6b\x11\xe5\xd6\x30\xcd\x93\xee\xa7\x59\xde\x3d\x48\xc8\xff\x0e" + "\xf2\xf5\xe0\x9e\xf7\x60\x18\x71\x9a\x77\xfd\xaa\xd5\x5e\x41\x5e\x26" + "\x6f\x96\xa1\x8d\xb5\x5c\xa9\x50\x83\xef\xa6\xf2\x1e\xcc\x75\x06\x48" + "\x31\x0a\x7e\xa0\xb6\x8b\xd9\x16\xda", + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00" + "\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00" + "\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00" + "\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00" + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff" + "\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff" + "\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff" + "\xff\x00\x00\x00\x00\xff\xff\xff\xff", + "\x95\xa2\xb1\x2a\x4a\x28\x00\x89\xd4\xbd\x4f\x90\x42\x53\xe7\x54", 1, + 128, 1024 }, + { 96, 256, 128, 120, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00", + "\x9a\x49\xc4\x0f\x74\xb7\x28\x39\x6d\x1d\xb4\xe5\xc0\xdf\x0d\x22\x4a" + "\xaa\x24\x1d\x25\x4d\x94\xa4\xc0\xe2\x18\xb7\xd3\xcc\x6f\x0d", + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff" + "\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00", + "\x60\xdc\xd4\x59\x74\xbe\xbe\x03\x2e\xb7\xb8\x6c\x9d\x06\x34\x52", 1, + 128, 256 }, + { 96, 256, 128, 121, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00", + "\x9a\x49\xc4\x0f\x74\xb7\x28\x39\x6d\x1d\xb4\xe5\xc0\xdf\x0d\x22\x4a" + "\xaa\x24\x1d\x25\x4d\x94\xa4\xc0\xe2\x18\xb7\xd3\xcc\x6f\x0d\xdf\x3e" + "\xbd\x01\x89\x8f\xbb\xe6\x97\x2b\xcd\xbc\x94\x43\x4c\x1b\xe7\x4a\x71" + "\x52\x71\x0a\xed\x9c\xce\x24\xe0\xd5\x8a\x1f\x1b\xb2", + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff" + "\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff" + "\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff" + "\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00", + "\xf0\xe6\xa3\xc1\xf2\x8a\xd9\x2d\x0d\xbc\x90\x0b\xe2\x91\xd8\x77", 1, + 128, 512 }, + { 96, 256, 128, 122, + "\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90" + "\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00", + "\x9a\x49\xc4\x0f\x74\xb7\x28\x39\x6d\x1d\xb4\xe5\xc0\xdf\x0d\x22\x4a" + "\xaa\x24\x1d\x25\x4d\x94\xa4\xc0\xe2\x18\xb7\xd3\xcc\x6f\x0d\xdf\x3e" + "\xbd\x01\x89\x8f\xbb\xe6\x97\x2b\xcd\xbc\x94\x43\x4c\x1b\xe7\x4a\x71" + "\x52\x71\x0a\xed\x9c\xce\x24\xe0\xd5\x8a\x1f\x1b\xb2\xbb\xfa\x3e\x7c" + "\xaf\x94\xee\x1a\x29\xcf\x32\x6c\x11\x58\xa6\x21\xc2\xb7\x37\x00\xf1" + "\x0d\x0a\x1f\x61\x08\x9f\xe7\x8e\x65\x88\x02\x55\x2a\xa1\xbe\xa1\xd9" + "\x90\x69\x5e\x72\x4a\xa3\x56\xaf\x7c\x10\x59\x0d\xe1\x33\x8a\xf9\xb7" + "\xce\xf5\x81\x5f\x49\x74\x26\xe9\x25", + "\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff" + "\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff" + "\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff" + "\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff" + "\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00" + "\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00" + "\x00\x00\xff\xff\xff\xff\x00\x00\x00\x00\xff\xff\xff\xff\x00\x00\x00" + "\x00\xff\xff\xff\xff\x00\x00\x00\x00", + "\x57\xef\xf4\xa5\x25\xee\xff\x2e\xbd\x7a\x28\xeb\x89\x42\x82\xbe", 1, + 128, 1024 }, + { 96, 256, 128, 123, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf5\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 124, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf6\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 125, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\x74\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 126, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x41\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 127, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\x37\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 128, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x28\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 129, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x2b\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 130, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x88\x14\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 131, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x15\xac\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 132, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\x8c\x51\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 133, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x50\x40\x54\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 134, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x55\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 135, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x56\x32\x3f\x44", 0, + 24, 0 }, + { 96, 256, 128, 136, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x45", 0, + 24, 0 }, + { 96, 256, 128, 137, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x46", 0, + 24, 0 }, + { 96, 256, 128, 138, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\x04", 0, + 24, 0 }, + { 96, 256, 128, 139, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x08\x14\xac\x51\x40\x54\x32\x3f\xc4", 0, + 24, 0 }, + { 96, 256, 128, 140, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xf4\x40\x9b\xb7\x29\x03\x9d\x88\x14\xac\x51\x40\x54\x32\x3f\xc4", 0, + 24, 0 }, + { 96, 256, 128, 141, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 24, 0 }, + { 96, 256, 128, 142, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "", "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 24, 0 }, + { 96, 256, 128, 143, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x28\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 144, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x2b\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 145, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\xa9\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 146, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x90\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 147, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x87\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 148, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa7\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 149, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa4\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 150, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\x53\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 151, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf0\x09\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 152, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x29\xbb\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 153, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xba\xa2\x1c\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 154, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1d\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 155, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1e\xe9\xa7\xd6", 0, + 24, 128 }, + { 96, 256, 128, 156, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd7", 0, + 24, 128 }, + { 96, 256, 128, 157, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\xd4", 0, + 24, 128 }, + { 96, 256, 128, 158, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\x96", 0, + 24, 128 }, + { 96, 256, 128, 159, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\xd3\xf1\x09\xbb\xa2\x1c\xe9\xa7\x56", 0, + 24, 128 }, + { 96, 256, 128, 160, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x29\x91\x40\x07\xa6\x11\x9d\x53\xf1\x09\xbb\xa2\x1c\xe9\xa7\x56", 0, + 24, 128 }, + { 96, 256, 128, 161, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 24, 128 }, + { 96, 256, 128, 162, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 24, 128 }, + { 96, 256, 128, 163, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x67\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 164, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x64\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 165, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\xe6\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 166, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x41\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 167, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x96\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 168, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe9\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 169, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xea\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 170, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\x3a\x92\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 171, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x93\xaa\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 172, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\x8a\x47\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 173, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x46\xf5\xce\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 174, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xcf\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 175, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xcc\xa5\x2b\x7a", 0, + 24, 264 }, + { 96, 256, 128, 176, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x7b", 0, + 24, 264 }, + { 96, 256, 128, 177, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x78", 0, + 24, 264 }, + { 96, 256, 128, 178, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\x3a", 0, + 24, 264 }, + { 96, 256, 128, 179, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\xba\x92\xaa\x47\xf5\xce\xa5\x2b\xfa", 0, + 24, 264 }, + { 96, 256, 128, 180, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x66\x40\x5a\x16\xe8\xb4\x4e\x3a\x92\xaa\x47\xf5\xce\xa5\x2b\xfa", 0, + 24, 264 }, + { 96, 256, 128, 181, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 24, 264 }, + { 96, 256, 128, 182, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", "\x00\x01\x02", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20", + "\xd0\x3b\xcb\x3c\xa5\x2d\x48\xd1\xd2\x03\xb1\xe7\xb1\xa5\x99\x5a\xf1" + "\xa0\x46\x6a\x61\xbb\x38\x6a\x2e\x12\xd1\x89\xa2\xc4\xea\x15\xe9", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 24, 264 }, + { 96, 256, 128, 183, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x8c" + "\x38\x2c\xf0\x71\x74\x14\x2e\xa5\x64\x92\x06\x12\x99\x7b\x1c\x2e\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x0f\xf9\xbc" + "\x3b\x94\x58\x29\x7b\xa0\x96\x7d\x86\xed\x09\x0b\x43\x51\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x03\xf3\xb7\x35\xb7" + "\x34\x54\x28\xeb\x93\xb3\xdb\x1d\x9b\x51\x87\xce\xbb\x88\x9a\xa1\x77" + "\xd8\x3e\x4f\x63\xfc\x9a\x5c\x05\x96\xee\xd9\x39\x88\x3d\x06\xaa\xcd" + "\xfd\xea\x44\xfd\xec\xdf\x5c\xb7\xfc", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xc2\x96\x43\x62\x46\xc3\xa7\xc4\xb3\xba\x09\xab\x2a\x6a\x08\x89", 1, + 512, 1024 }, + { 96, 256, 128, 184, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x05\x1e\x93\x73", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x93\x12\x27\x27\x4a\x89\xd0\xb3\xaa\xde\x7f\xac\x62\xc9\x62\x62\xc1" + "\xe7\x7b\x8d\xaf\xd2\x48\xf1\x0a\xd3\x7c\x6c\xcb\x69\xcb\x71\x31\xb0" + "\x41\x59\x3c\x8b\xb8\xc3\xdb\x38\xf3\x9d\xd8\xa1\x24\xc4\x24\xfc\xe4" + "\x38\x9d\xed\xe1\xd3\xcb\x9d\x46\xcf\x95\x97\x0a\xea\x98\x56\xb6\xe3" + "\x13\xd7\x56\x19\x7b\xaf\x4f\xcb\x58\xdf\x27\x5b\xca\x8a\x21\x88\xf9" + "\xe8\xa1\xad\x04\x35\x4e\xde\x54\x2d\xdc\x30\xe8\xb7\x35\xb2\xf5\x90" + "\x5f\x58\x11\x79\x92\x82\xbe\x94\xae\x84\x2e\xc1\x26\xc5\x5d\x2e\x66" + "\x72\x35\xe9\xac\xf1\xd4\x87\x98\xf0", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x99\xa3\xb0\xff\xf6\xfd\xcb\xcc\xe9\xdc\x58\x20\xf2\xa6\x48\x61", 1, + 512, 1024 }, + { 96, 256, 128, 185, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x04\x8c\x3c\x5f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x0d\xf9\x1f\x31\x23\x0e\x89\x41\xe7\x00\xa7\x52\xfe\xf0\x8c\x89\x7c" + "\x51\x1e\xd6\x18\xfd\xf8\xa3\x78\xa1\xf4\x39\x01\x3b\x40\xa4\x8d\x46" + "\x34\xc2\x7d\x9a\xda\x7c\x0b\xb6\xf3\xfa\x92\xe3\x41\x42\x59\x03\xd7" + "\xec\xd0\xc4\x9b\xee\x4c\x77\xe8\x4b\x11\xf1\xc7\x21\x92\x23\x08\x64" + "\x28\x85\xb8\x13\xfa\xe3\x64\xda\x32\xea\xf1\x20\xd6\xa4\x3a\x74\xfb" + "\x16\x32\x44\x36\x67\xbf\xea\x6e\xef\x1b\xe7\x3e\xb1\xc3\xc0\xb5\xa5" + "\x7c\xee\x8d\xc4\xfe\xed\x4a\x1f\xb9\xae\x02\xf7\xb1\x69\x55\x88\xc3" + "\xc8\x78\x45\x1c\xb6\xee\x0c\xb3\xdc", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xea\xff\x8f\x47\xef\x92\x68\xfd\x0d\x94\xe8\xa9\xc4\xb7\x8d\x24", 1, + 512, 1024 }, + { 96, 256, 128, 186, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x42" + "\x8a\x85\x43\x14\x30\xea\xda\x56\xa2\xc5\xdc\x94\x4b\x6a\xa6\xce\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x58\xe0\x94" + "\x3e\x30\xf9\x1b\xa4\x1b\x43\x62\xfa\x9e\xd6\x03\x7b\x21\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xe2\x59\x0a\xf1\x20" + "\x1c\x7c\xfe\xc2\x29\x0c\xfc\xe9\x8a\x82\x2e\xbb\x8d\x1e\xd9\xdc\x4e" + "\x20\xd2\x41\x75\x5a\xff\x91\xcd\xfd\x10\xfd\xb6\x9e\xfa\x0d\x5c\x80" + "\x82\x69\x26\x01\xcb\xfb\xb9\x55\xc7", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x86\xed\x21\xfd\xa0\x80\xa7\xd1\x39\x81\x07\x8d\x86\xb3\xe3\xcd", 1, + 512, 1024 }, + { 96, 256, 128, 187, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x66\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x66" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xe8\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x50\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x64\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\x98\x00\xac\x4e\xf9" + "\xd4\xe4\x01\x45\x24\xbc\x7c\xd3\x38\x72\x42\xe7\x74\xf4\xd1\xa7\xa0" + "\x52\x1e\x42\xec\x44\x84\x4d\x0b\xd8\xb9\xd7\x3f\xec\x95\x92\x12\xfd" + "\x7e\x8e\xac\xf4\xd9\x84\x99\x6d\x9b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x34\xf9\xe0\xfa\xa5\x15\xee\xe0\xe7\x84\xe6\xef\x26\x78\xbe\xfa", 1, + 512, 1024 }, + { 96, 256, 128, 188, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x26\xc6\x96\x1b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xe9\x72\x44\x25\x9a\xf5\xa3\x79\x23\x8d\xa0\xca\xd2\xa5\xf4\x93\x65" + "\x5e\xc0\xe5\x02\x4f\xd5\x53\xbb\xb3\xde\xb6\x6a\x94\x03\x6d\x10\x6c" + "\x3d\x51\x34\x07\xb2\xdd\x1c\xc5\x93\x6c\x4c\x9c\x1e\x4f\x4b\x37\xb5" + "\x4d\xec\x26\x1c\x60\x1d\xc9\x9e\x90\x68\x0e\x23\xe2\xdc\x5c\x9a\x8d" + "\x50\x3d\x8b\xea\x49\xa8\xcd\xca\x37\x06\xbf\xd2\xa3\xda\xa0\xaf\xb1" + "\x9a\x70\xfd\x3d\x35\x5f\xc3\x7c\x13\xf3\xf9\xe5\xc8\xd0\x86\x4a\x5f" + "\x80\xa7\x80\xb3\x6d\x46\x98\xec\x2c\xe9\xcc\xc2\x7b\x97\xec\xbe\x67" + "\x2e\x41\x62\x8e\xbd\x77\x3a\xcb\x81", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x3c\x94\xb9\xfe\x60\xbd\xb3\x5c\x6b\x7b\x73\xb7\x65\x08\x34\x92", 1, + 512, 1024 }, + { 96, 256, 128, 189, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x01\x3d\xa0\x60", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x94\x53\xaa\x15\x9c\x3d\x87\xf1\x7e\x21\xe8\x8a\xda\xbc\x37\xe5\x53" + "\xb9\x04\xd0\x0e\xef\xc6\x6b\x8e\x09\x05\xe2\x35\x76\xfb\xdc\x9c\x7b" + "\xea\x97\x77\xf3\xb8\x36\x84\x81\x93\x25\x34\xb3\x34\x4d\x30\x9e\x63" + "\x07\xcd\xdf\xe7\xb3\x54\x93\x00\xdd\x9c\xda\x7e\xfe\x9d\x43\xc8\xa1" + "\x15\x91\x2a\x39\x29\x04\x07\x9e\xe9\x2b\xcd\x33\x09\x9f\x70\x22\xea" + "\x94\xc1\xe7\x35\x3b\x89\xbf\xc5\x4d\xe3\xce\xb5\x6f\x52\x9a\x1a\x60" + "\x8b\xb5\xa9\x70\xe1\x35\x96\x09\xd1\xf5\x68\x06\xb3\x7f\x86\x05\xf4" + "\xc2\x74\x51\xda\x60\x66\xfc\x55\x7a", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x2b\x11\xcf\x9f\x8d\xb8\x49\x0d\x40\x9f\xc6\x2a\xfd\x73\x79\xf3", 1, + 512, 1024 }, + { 96, 256, 128, 190, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x07\xdb\x33\xde", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x2e\x18\x36\x64\x0d\x81\x0c\x27\x09\xfb\x83\xcc\xf1\xae\xf3\xa9\x71" + "\x08\x5d\x1b\xbf\xb5\x8a\x42\x5a\xbf\x75\xcc\xec\x70\xb3\xab\xde\x0e" + "\x80\x53\x9e\x83\xa8\x25\x46\xe7\x37\x2a\x19\x48\x15\x47\x05\x33\x08" + "\xdd\x78\x42\x67\x5e\x9c\x4f\x61\x30\x24\x26\xda\x0d\x71\xc1\xda\x31" + "\x02\x03\x10\x30\xed\x92\x81\x52\xbe\x00\x9b\x15\xb5\x2f\x71\xb5\x91" + "\x19\x91\xd3\x9f\x68\xa8\x65\x8d\x99\x72\x9d\xf2\xbb\xef\x31\xc8\x98" + "\x9f\x96\x04\x55\x8d\xf9\xf2\xab\xa4\xb3\x76\x6c\x58\xaa\xef\x35\x48" + "\xde\x54\x5e\xc1\xf0\x80\x22\x5a\x88", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xc9\xc8\x36\x69\x20\xf8\x83\x81\x40\x77\x12\xce\xc6\x1e\x66\x07", 1, + 512, 1024 }, + { 96, 256, 128, 191, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x02\xa1\x19\x42", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x0e\xcb\x4d\x85\xc9\x56\xb5\x26\x8c\x9b\x35\xa8\xc6\x3b\x4e\x9d\x3e" + "\x5c\xb7\x2b\x64\xef\x98\x77\x38\x41\xb9\x47\xbd\x7d\x59\xef\x7d\x0e" + "\xb0\xe1\xc0\x50\xd4\x9a\x54\x24\xce\x7d\xeb\x52\x7d\x76\x08\x7e\x47" + "\x46\x67\x4c\x95\x89\x65\xdf\x32\xd9\xe5\xfb\x03\xb4\x65\x01\x70\x61" + "\x28\xd4\x81\x21\x7a\xae\xae\x2f\x78\xf9\x25\x92\x73\x35\x8a\x29\x54" + "\xca\xc0\xbc\x2f\xbf\xe7\x74\x47\xd1\xd3\x87\xb9\x31\x4c\x65\x41\xb6" + "\x9f\x12\x70\xb3\x43\x8b\x10\x42\xb2\xb4\x66\x3e\x62\xba\x4d\x49\xc0" + "\x7a\xc6\xf1\x63\x03\x4a\xfa\x80\xaf", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x23\x73\xcf\xa2\xab\x24\x44\x6a\xd5\xa2\x36\x16\x7b\x80\x27\xfe", 1, + 512, 1024 }, + { 96, 256, 128, 192, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x3c\x0d\xf6\x37", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x2e\x8e\x45\xe9\x03\xbf\xab\x32\xf2\xf0\xd4\x9d\x9a\x3e\x44\x9b\xef" + "\x6f\x40\x93\xe2\x72\x2c\xda\xb2\xcf\x93\x5c\x18\x22\xb8\x30\xfb\x5a" + "\x40\x56\x51\x6d\x56\x0d\xfc\x86\x38\xc9\xa5\x7d\x29\x27\x20\x0a\x56" + "\xf0\xb6\x71\x53\x27\x1d\x49\x8e\x8f\x08\xdc\x88\x8c\x61\xef\x63\x4f" + "\x7a\xe4\x0f\x46\x08\xf9\x6f\x92\xfe\xa5\xa1\xe5\xbd\x45\x13\x11\x20" + "\x09\x8d\xc5\xde\x03\x78\xe5\x8f\x2d\xdb\x46\xfa\x4a\xa5\xad\xb3\x8f" + "\xe0\x06\xbb\x19\xb6\x91\x46\x38\x2f\x77\xa7\x9e\x06\x21\x4d\xef\x54" + "\x7c\xfb\x5c\xe3\x7a\x70\x08\xb9\xb6", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x5f\x93\x94\x64\x78\xd8\x08\x1e\x72\x47\xf4\x14\xad\x39\xa5\x15", 1, + 512, 1024 }, + { 96, 256, 128, 193, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x0b\xc6\x72\xc3", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x36\x19\xcb\x47\x0a\xf8\x6d\xce\xce\xb6\x94\x0f\x2d\x9a\xbb\x34\xc9" + "\xa9\x13\x14\x76\x05\x33\x87\x44\x5f\xfe\xbb\xe2\x40\xd4\xf9\x81\x83" + "\x77\x85\x56\x52\xf4\x6a\x82\x19\xc7\xf7\x1c\x35\x54\xf8\xac\xef\x82" + "\x58\xde\x4b\x7d\x17\xc0\xf3\xd3\x53\xac\x98\x1c\xc6\xa1\x32\x87\xbe" + "\x1e\x6b\x41\xdc\x6d\x13\x3d\xf4\xab\xab\xeb\xdf\x43\xd6\x65\xce\x7a" + "\x4a\x5c\x98\x2a\x0b\x13\x9c\xb8\x20\x2e\xeb\xc7\x41\x73\xe3\x22\x4a" + "\x44\x0e\x4c\x37\xd2\xb5\x95\xf3\x84\x29\x0e\x93\x9b\xa0\x16\xdf\x0d" + "\x49\xb3\x6c\xdb\x4b\xd9\x1c\x39", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x13\x3f\xe6\x23\x91\x74\x4d\x11\xce\x44\x59\x4b\x96\xc5\x3b\xaf", 1, + 512, 1016 }, + { 96, 256, 128, 194, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x03\xe9\xb9\xa4", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xaf\x20\x5b\xda\x81\x9f\x74\x51\xbe\x0f\x28\x66\x7d\x4b\x01\xb5\x9f" + "\xf2\xda\xa8\x17\x3c\xab\x52\x04\x6c\x3c\x9e\x0d\x98\x98\x89\xc5\xe0" + "\x21\xef\x7a\xfd\x06\xe9\xce\x6c\xc3\x0e\x3a\x6e\xba\xb5\x09\x13\x4b" + "\xa1\x0d\x10\xe5\x70\xc5\x55\x87\xc1\x3e\xee\x53\xe7\x3b\xe5\x48\x04" + "\xc8\x53\x9f\xfb\xf2\x3b\x35\x92\x2b\x1c\xa3\x7b\x9e\x9b\xc2\x4e\xe2" + "\x04\x83\x7c\xa5\xa2\x94\xce\x05\xd1\x26\x00\xc7\xef\xf6\xae\xe3\x22" + "\x70\xdb\x2f\xef\xf4\x7d\xc5\xa0\x41\x76\x16\x9e\x15\x85\x06\x28\xe6" + "\x03\x5f\x78\x99\x4f\x9f\x56\x03", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\xe3\x45\x1a\xdb\x9d\x23\xa7\x71\x0a\x1a\xaf\xba\x26\xf5\x63\x87", 1, + 512, 1016 }, + { 96, 256, 128, 195, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x07\x00\xb9\x82", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x68\xc6\x72\x72\x03\x6f\xb6\x52\xa0\x18\x2e\xeb\x47\x81\x35\x8e\x47" + "\x04\xa4\xa7\x02\xfd\x73\x1b\xf3\xb3\xea\x99\x47\x17\x98\x9e\x7d\x91" + "\x04\xe0\xae\x81\x73\x2a\x8c\x7e\x9a\x82\xb3\xd3\x1d\x54\x17\x61\xa3" + "\x66\xb6\x7c\x33\x96\xf1\xa6\xc6\x7e\x29\x3d\xdb\x65\xa5\x9e\x42\x54" + "\x1d\xda\x14\x4d\xc6\xc7\x83\x88\xcf\xca\x98\x2e\x23\x35\x09\x58\xac" + "\x5b\x3d\x54\xa1\x72\x2f\xd6\x47\x33\x57\x78\x62\xe1\x87\x9c\x9e\x94" + "\x45\xeb\xde\xc5\x31\x5d\x17\x06\xdb\x7e\xbb\xed\xd4\xc7\x79\x93\x5e" + "\x72\x05\x7e\x5b\x0e\xcd\xe0\x81", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\xb0\xbb\x8a\x55\xff\x5f\x52\xa5\x04\x3c\x6e\x77\x95\x84\x75\x57", 1, + 512, 1016 }, + { 96, 256, 128, 196, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x01\x98\x36\xbb", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xc4\x83\xb7\x33\x4e\xbe\x2e\x87\x9b\x0c\x3f\x9d\xb4\xfc\xd9\xf5\x21" + "\x90\x62\x36\x0d\x6c\xe4\x4c\xda\xe0\xf9\x4e\x04\xc8\x34\x5e\xa7\xe3" + "\xae\x33\x85\x51\x18\x74\x1d\xca\xfe\x0d\xe4\xae\x98\xc4\xe4\x3a\xf7" + "\xb1\x2b\x04\xee\x8a\xb1\x75\x62\x58\x23\xac\x04\x0e\x5a\xba\xc4\x40" + "\x3f\x1d\x45\x23\x8a\xdc\xb8\xc0\xcf\x44\xbd\x56\x91\x7f\x9f\x5d\x93" + "\x97\x4c\x82\xb5\x69\x51\x98\x6a\x9c\x04\x50\xbd\x90\x47\xb5\xa6\x16" + "\xe8\x14\x52\x6a\xd0\x58\x0e\x3e\xcd\x81\x89\xc9\xfe\xf2\xcd\xb9\x79" + "\xa2\x2a\xd3\xa0\x19\x30\xfb\xd1", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\xf4\xfc\x25\xf4\xc5\x54\x3a\x9a\xfe\xe9\x81\x9e\x29\x04\xfb\x68", 1, + 512, 1016 }, + { 96, 256, 128, 197, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x1d\x59\xf2\x88", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xbc\x7f\x4f\x15\xfd\x1e\x4c\x13\x99\x74\x08\x36\x67\x0a\xbe\x39\xa0" + "\x57\x07\xbe\x19\x95\x6c\xe1\x69\xb3\x23\x21\x75\x9e\x0f\x21\x3a\xe1" + "\x9a\xd3\x4a\xa6\x12\xb3\xa2\x9f\x02\xc4\xbb\xac\x9f\x78\x5a\x55\xa3" + "\xad\xfe\x41\x9a\xb8\x91\xbb\xe0\xac\xee\x99\x21\x32\x2e\xa2\x10\x02" + "\xc9\xdd\x3d\xcd\xd1\x3a\x7f\x85\x54\xdd\xdc\x10\xf9\xb5\x29\xce\x94" + "\xbe\x70\x50\x93\x7d\xab\x76\x55\x7b\x7e\xb1\x7c\x68\x5a\xad\x8f\x07" + "\x97\xe3\x9d\x62\x55\x39\x88\x98\x9a\xab\x1d\x97\x64\xfe\x43\x1c\xc1" + "\xd4\xc5\x95\x06\x2c\xe9\x3c\xe9", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x5e\x67\xa7\xb8\x73\x3e\x0e\x4b\x01\xac\x21\x78\xa2\x05\xae\x7e", 1, + 512, 1016 }, + { 96, 256, 128, 198, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x05\x52\xa4\x11", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xea\xcc\xaa\x77\x89\x35\xef\x24\x9e\x09\x00\x14\x9d\xd8\x89\x46\x2d" + "\x2a\x06\x14\x86\xba\x10\x2b\x8c\xae\xbe\x46\x5f\x39\x59\xfb\x31\x19" + "\xeb\xb5\x68\x96\x76\xff\xdd\x6d\x85\x1a\x26\x73\x9e\x77\x2b\x54\xa2" + "\xf5\xf4\x73\xea\x9c\x7e\x58\xcc\xbc\x4c\xfc\x95\x3e\x8c\x42\x0b\x21" + "\x75\xd9\xdd\x51\x92\x65\x63\x0b\xb7\x9b\xd8\x7a\x60\x1b\x11\x32\x31" + "\xa8\xb1\x6c\xe5\x4c\x33\x13\x47\xec\x04\xc2\xb1\xc9\x16\x0f\x38\x20" + "\x7a\xa4\x6e\x96\xfe\xb0\x6d\xee\x88\x3e\xb4\x22\xfa\x14\x90\x8d\xf3" + "\x00\xbb\x1a\x1e\xf7\x58\xc4\x08", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x17\x7a\x77\xfc\xe1\x14\xa4\x34\x9c\x4f\x8d\x5e\xc8\x25\xd0\x6f", 1, + 512, 1016 }, + { 96, 256, 128, 199, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x0c\x80\x7a\x72", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xa7\x6c\x33\x0e\x01\x50\x60\xa1\x7e\x64\xcb\x7b\x6d\x75\x3f\x20\x1f" + "\x75\xbe\x87\x59\xfd\x75\x39\xfb\x92\xb2\x2a\xef\x54\xc9\xd3\x02\x9d" + "\xba\x0c\x15\xcb\xf7\xc9\x51\x35\x88\x83\x19\xc6\xb2\xe6\x27\x6d\xa2" + "\x1e\x0c\x35\x1f\xd5\x22\xb2\x9a\xab\xb5\x88\x3a\x32\x91\xd6\xf4\x27" + "\xde\x77\x3b\x12\x43\x90\xef\x6f\xd9\x66\x21\xff\xbc\x42\xdf\xbf\x7a" + "\x34\xda\x27\x2c\xbc\x9c\xcb\x1a\x49\x8d\x07\x80\x33\xd1\xac\x3b\xf7" + "\xe9\x27\x15\x94\x8b\x06\xd6\x9d\x5c\x50\x39\xe9\x16\x4b\xa9\xc3\xa0" + "\x22\x19\xec\x59\x08\x20\x6b\x3b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x62\x3c\x7d\x44\x24\xf5\x49\x7a\xed\xfd\x13\x39\xcf\x8c\xec\xce", 1, + 512, 1016 }, + { 96, 256, 128, 200, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x03\x97\xa1\x43", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x22\x8a\x7e\x15\xbc\xce\x13\x05\x1d\xe9\x14\x5f\x77\xf7\xf4\xff\x79" + "\x21\x82\x8b\x4f\x99\xef\xc4\xff\x55\xee\x0d\x93\x44\x95\x5b\x69\xec" + "\x2d\x47\x98\xb0\x51\x7f\x02\x73\xc4\x45\x6a\xe5\xff\xc5\x92\x9c\xbe" + "\x74\xdd\xb0\xda\x51\xd4\xf2\xb4\xdf\x75\x78\xa3\x12\x40\xc8\x8a\xe9" + "\x22\xc3\xc5\xec\xa7\xb9\x7d\x72\xd4\x97\x06\x20\x50\xa5\x87\x44\x7c" + "\x56\x2b\x34\x3d\x5c\x71\x92\x19\x44\x87\x2f\x9f\xd0\x6b\x8f\x34\xb3" + "\xeb\x5d\x43\x41\xf5\xff\x8a\x90\x7d\xd7\xc2\xe1\x67\x6b\x81\x25\x27" + "\x26\xba\x54\x81\x4d\xa5\x1e\xab", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x1c\x18\xb6\x93\x54\xb1\x89\x73\x1a\x1a\x83\xfe\x8f\x0d\x57\xc9", 1, + 512, 1016 }, + { 96, 256, 128, 201, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x08\xcb\x0f\x3f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xc7\xd8\x43\x18\x8a\xb1\x93\xdf\xef\x5c\x4d\xaf\x58\x3f\x95\x2c\xd4" + "\xb1\x95\xf2\x40\xfa\x2e\x70\x4d\x02\x17\x23\x02\x3c\x12\x33\x71\xa4" + "\x1e\x87\xdf\xc6\xe6\xc3\x87\x4a\x42\xf3\x31\xcf\x03\x59\x88\xa3\x8c" + "\x72\xba\x2d\xa8\x54\xb1\x20\x8f\x98\xbf\x8c\xc2\x99\x48\x16\x94\x81" + "\xab\x3a\x40\x2d\x5f\xcc\x7f\xf7\x8f\x9e\x31\x92\x55\x76\xdc\x39\x38" + "\x07\x4b\x8c\x5b\x27\x96\x0e\x3a\xfc\x75\x0a\xd6\x86\x56\x36\x88\xb7" + "\x44\x17\x87\x28\x8d\x52\x56\xc1\x30\x1d\x56\x3b\x77\x44\x84\x3b\xd1" + "\xab\x4e\xff\x5b\xe6\xf1\x65\x3d", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x20\x45\x81\x5b\x82\x11\xb9\xa2\x99\x5e\xff\xe0\xb8\xed\x98\x68", 1, + 512, 1016 }, + { 96, 256, 128, 202, + "\x9d\xe8\x36\xaa\x57\x95\x85\x08\x1f\x33\x0a\x7c\x40\x36\xe2\x0e\x38" + "\xef\x15\xef\xf3\x94\x51\x84\xd2\x31\x86\x7f\x50\x5f\xff\xdf", + "\x00\x00\x00\x00\x10\x11\x12\x13\x0d\x8f\xcf\x4e", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xcf\xc3\xdb\x86\x31\xc8\x1c\x69\x02\x3a\x3c\x8a\x9a\xd6\x6c\x35\x05" + "\x36\x85\x14\x4c\x4f\xa2\xa9\x51\x0a\xdd\x72\xe2\x11\xda\xd9\xca\x5b" + "\x98\x2e\x4c\x19\x45\x91\xfd\xb7\x41\x16\x28\x03\x11\xd1\x29\x9a\xd8" + "\x12\x27\x25\x8c\xb5\x2f\x07\x9b\xbc\xb1\x2a\xff\x16\x1d\x27\x8d\xec" + "\x33\xa3\x26\xd7\x12\x76\xb3\xde\x01\xa8\x32\x7e\xe7\xf4\x5f\x94\x17" + "\x9d\xff\x18\xa3\xfe\x64\x3e\x56\xc3\x0c\xfd\x03\x87\x1c\x81\x10\xab" + "\x00\xf6\x61\x2b\x9e\x17\xa4\x64\x73\x60\xd7\x84\x7b\xb6\x3a\x31\x22" + "\x61\x3c\x2e\x7c\xdd\xdd\x08\xae", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff", + "\x1a\xe2\xed\x84\xea\x97\x74\xd7\x8d\x78\x2b\xf8\xd9\x72\xa8\xb8", 1, + 512, 1016 }, + { 96, 256, 128, 203, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x41" + "\x57\x71\xfd\xa4\xfb\xcc\x55\xc3\x77\xf7\x32\x03\xe6\x02\x26", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", 1, + 256, 512 }, + { 96, 256, 128, 204, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xf1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x61" + "\x5a\xf3\x9e\xdd\xb5\xfc\xd2\x51\x91\x90\xd5\x50\x7d\x3b\x06", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 256, 512 }, + { 96, 256, 128, 205, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xb5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x76" + "\x4e\x5d\x82\xce\x7d\xa0\xd4\x41\x48\x48\x4f\xd9\x6a\x61\x07", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 256, 512 }, + { 96, 256, 128, 206, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xfd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2b" + "\xdb\xf1\x6d\x8e\xa4\xd3\x9d\xab\x8d\xcb\x3d\x4b\xc4\xe1\x04", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80\x00\x00\x00\x80", 1, + 256, 512 }, + { 96, 256, 128, 207, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xa9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xac" + "\xcd\x5e\xb3\x1d\x8f\xc9\x09\xe8\x4b\x0d\xe7\xde\x23\xbb\x08", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f\xff\xff\xff\x7f", 1, + 256, 512 }, + { 96, 256, 128, 208, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xd2\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xdd" + "\x4b\x93\x3e\x7b\x1a\x7e\xd9\x3c\xc7\xc0\x50\xdb\x71\xdc\x03", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00", 1, + 256, 512 }, + { 96, 256, 128, 209, + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f\x50" + "\x51\x52\x53\x54\x55\x56\x57\x58\x59\x5a\x5b\x5c\x5d\x5e\x5f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa0" + "\x81\x64\x42\x5d\x76\x42\xe9\xe9\x0f\xc8\xd5\xc3\x2d\x2c\xf6", + "\xe4\x8c\xaf\x8a\x76\x18\x33\x27\xc9\x56\x1a\x46\x51\xc0\x7c\x82\x2c" + "\xcd\x16\x42\xc0\x66\x07\xd0\xd4\xbc\x0a\xfb\x4d\xe1\x59\x15\xdb\xfa" + "\x3b\x0b\x42\x2e\x77\xe1\x5c\x64\xbf\x62\x47\x03\x1f\x15\xfd\xb6\x43" + "\x11\x78\x09\x82\x18\x70\x00\x0a\xdf\x83\x83\x4d\xa5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + "\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 256, 512 }, + { 96, 256, 128, 210, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xc6\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x7b" + "\xb6\x6f\x80\x90\xc1\x49\xe4\x52\xec\x7f\x20\x32\x7e\xb2\xea\x2e\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x0f\xf9\xbc" + "\x23\xc8\x97\xdf\x6b\x00\xaf\x86\x93\x1d\x6c\x81\x55\x51\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x03\xf3\xb7\x2d\xeb" + "\xfb\xa2\x38\x4b\xaa\x48\xce\xed\xfe\xdb\x91", + "\xe5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x08" + "\x71\xbc\x8f\x1e\x4a\xa2\x35\x08\x77\x12\xd9\xdf\x18\x36\x09\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xe7\xa3\x30\x09\xef\x5f\xc6\x04\xea\x0f\x9a\x75\xe9\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe7\xa3" + "\x30\x09\xef\x5f\xc6\x04\xea\x0f\x9a\x75\xe9", + "\x35\x72\x16\x27\x77\x26\x2c\x51\x8e\xef\x57\x3b\x72\x0e\x8e\x64", 1, + 32, 768 }, + { 96, 256, 128, 211, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xc7\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x76" + "\x20\x9e\xef\x14\x16\x91\xfb\xa5\xd1\x0e\xaf\x58\x1a\xff\xe6\x2e\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x0e\x73\xd2" + "\xdc\x3b\xbd\x95\x49\x89\xcb\x84\x33\xb7\xd6\x59\x7b\x51\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x02\x79\xd9\xd2\x18" + "\xd1\xe8\x1a\xc2\xce\x4a\x6e\x47\x44\x03\xbf", + "\xe4\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x05" + "\xe7\x4d\xe0\x9a\x9d\x7a\x2a\xff\x4a\x63\x56\xb5\x7c\x7b\x05\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x75\x91" + "\x18\x50\x1a\x43\xcd\xd6\xa2\x06\x4a\xa5\x20\xad\xc7\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\x75\x91\x18\x50" + "\x1a\x43\xcd\xd6\xa2\x06\x4a\xa5\x20\xad\xc7", + "\x34\x72\x16\x37\x5f\x5b\x7b\x5c\x4e\x6b\xff\x49\x12\xfd\x94\x73", 1, + 32, 768 }, + { 96, 256, 128, 212, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xfc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x8b" + "\x79\x40\x3d\xfa\xab\xc0\xd8\xc1\x8d\x23\xa3\x46\x9c\x13\xe6\x2e\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x0a\x4b\x94" + "\x1e\x6b\x66\xfc\xc2\xed\x7d\x8c\xb3\xe8\xcc\x7f\xfc\x51\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x06\x41\x9f\x10\x48" + "\x0a\x81\x91\xa6\x78\x42\xee\x18\x5e\x25\x38", + "\xdf\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf8" + "\xbe\x93\x32\x74\x20\x2b\x09\x9b\x16\x4e\x5a\xab\xfa\x97\x05\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfa\x4d\xd7" + "\xda\x00\xc1\x2a\x46\xb2\x14\x0e\xca\xfa\x3a\x8b\x40\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfa\x4d\xd7\xda\x00" + "\xc1\x2a\x46\xb2\x14\x0e\xca\xfa\x3a\x8b\x40", + "\x30\x72\x16\x77\xff\x2e\xb8\x89\x4e\x5a\x9d\x84\x92\xb7\xb0\xaf", 1, + 32, 768 }, + { 96, 256, 128, 213, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xfa\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x9b" + "\xcb\xb8\xda\x47\x7d\x58\x0d\x77\x2d\xe4\x22\x9b\xba\x7d\xe2\x29\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x1e\x9d\xed" + "\xf9\xdd\x64\xa0\x68\x1b\xac\x29\x69\x54\x94\x25\xbc\x56\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x12\x97\xe6\xf7\xfe" + "\x08\xdd\x3b\x50\xa9\xe7\x34\xa4\x06\x7f\x78", + "\xd9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe8" + "\x0c\x6b\xd5\xc9\xf6\xb3\xdc\x2d\xb6\x89\xdb\x76\xdc\xf9\x01\xf8\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xee\x9b\xae" + "\x3d\xb6\xc3\x76\xec\x44\xc5\xab\x10\x46\x62\xd1\x00\xf8\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xee\x9b\xae\x3d\xb6" + "\xc3\x76\xec\x44\xc5\xab\x10\x46\x62\xd1\x00", + "\x2b\x72\x16\xc7\x87\x37\x44\xc2\x0e\xc5\xe2\xcd\xb2\x60\xd3\xfa", 1, + 32, 768 }, + { 96, 256, 128, 214, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xee\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xb9" + "\xf5\x5b\xd5\x6e\x0f\xd7\x4b\x46\x06\x3a\x96\x35\x4c\xfb\xee\x32\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x20\xc7\x88" + "\x86\xa6\xf6\x29\x2d\x6c\xc5\xfb\xdd\xb5\x46\xa2\xb0\x4d\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x2c\xcd\x83\x88\x85" + "\x9a\x54\x7e\x27\xc0\x35\x80\x45\xd4\xf8\x74", + "\xcd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xca" + "\x32\x88\xda\xe0\x84\x3c\x9a\x1c\x9d\x57\x6f\xd8\x2a\x7f\x0d\xe3\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd0\xc1\xcb" + "\x42\xcd\x51\xff\xa9\x33\xac\x79\xa4\xa7\xb0\x56\x0c\xe3\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd0\xc1\xcb\x42\xcd" + "\x51\xff\xa9\x33\xac\x79\xa4\xa7\xb0\x56\x0c", + "\x22\x72\x16\x57\xb0\x13\x0d\x28\xcf\x1e\xc6\x51\x53\xc4\x11\x82", 1, + 32, 768 }, + { 96, 256, 128, 215, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xef\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xb4" + "\x6f\xca\x24\xd3\x53\xff\x5e\x49\xea\xc5\x15\x40\xe8\x40\xea\x30\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x3d\x31\x1e" + "\x57\x22\x02\x01\x1a\x75\xe9\x48\x58\x6f\xe2\x68\xb4\x4f\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x31\x3b\x15\x59\x01" + "\x6e\x7c\x49\x3e\xec\x86\x05\x9f\x70\x32\x70", + "\xcc\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc7" + "\xa8\x19\x2b\x5d\xd8\x14\x8f\x13\x71\xa8\xec\xad\x8e\xc4\x09\xe1\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xcd\x37\x5d" + "\x93\x49\xa5\xd7\x9e\x2a\x80\xca\x21\x7d\x14\x9c\x08\xe1\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xcd\x37\x5d\x93\x49" + "\xa5\xd7\x9e\x2a\x80\xca\x21\x7d\x14\x9c\x08", + "\x21\x72\x16\x67\x98\x48\x5c\x33\x8f\x9a\x6d\x60\xf3\xb2\x18\x91", 1, + 32, 768 }, + { 96, 256, 128, 216, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf5\x9d\x56\x15\x1d\xe2\x8b\xef\x83\x50\x5f\x6d\x89\xc0\xb0\xf7\xf7" + "\x5b\x2f\xa8\xe6\xdc\xe3\x86\x07\x5d\xb2\x83\xec\x85\xee\x62\x55\x5b" + "\xaf\xfa\xd4\x23\xaf\x25\xf6\x60\x69\xbb\x69\xfb\x6f\x4d", + "\xd6\xee\x4e\xe2\x5d\x3b\xde\xa8\x1e\x76\xde\x89\x34\xcc\x51\xfb\x84" + "\x9c\xfc\xa7\x68\x57\x08\x57\x5d\xc6\xdf\x7a\x01\xe3\x6a\x81\x84\x9c" + "\xfc\xa7\x68\x57\x08\x57\x5d\xc6\xdf\x7a\x01\xe3\x6a\x81", + "\x83\x13\x12\xcb\xb0\xf1\x65\xdc\x3e\x8f\xf5\x21\x25\xf4\x86\x40", 1, + 32, 384 }, + { 96, 256, 128, 217, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf7\x17\xf8\xd5\xb2\x80\x32\xd5\xc8\xe8\x06\x1c\xd4\x4d\x71\xe4\xf2" + "\xd5\x5d\xe7\x72\xfe\x7a\x91\xce\x85\xe4\x10\xdb\x3e\x2d\x8d\x50\xd5" + "\xdd\xb5\x40\x01\x36\x32\x3f\xb8\x3f\x28\x5e\x40\xac\xa2", + "\xd4\x64\xe0\x22\xf2\x59\x67\x92\x55\xce\x87\xf8\x69\x41\x90\xe8\x81" + "\x12\x8e\xe8\xfc\x75\x91\x40\x94\x1e\x89\xe9\x36\x58\xa9\x6e\x81\x12" + "\x8e\xe8\xfc\x75\x91\x40\x94\x1e\x89\xe9\x36\x58\xa9\x6e", + "\x82\x13\x12\xdb\x98\x26\xb5\xe7\xfe\x0a\x9d\x30\xc5\xe2\x8d\x4f", 1, + 32, 384 }, + { 96, 256, 128, 218, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf2\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xe6" + "\x8a\x92\x2c\x92\x19\xd3\x0f\x07\x55\x4d\x7d\x99\xf2\xbd\xe9\x2c\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\xe2\x4c\x07" + "\xdd\x98\xf9\xb2\x53\xab\x0c\x31\x8d\x9b\x14\xf6\xb1\x53\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\xee\x46\x0c\xd3\xbb" + "\x95\xcf\x00\xe0\x09\xff\xd0\x6b\x86\xac\x75", + "\xd1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x95" + "\x4d\x41\x23\x1c\x92\x38\xde\x5d\xce\x20\x84\x74\x94\x39\x0a\xfd\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x12\x4a\x44" + "\x19\xf3\x5e\x64\xd7\xf4\x65\xb3\xf4\x89\xe2\x02\x0d\xfd\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x12\x4a\x44\x19\xf3" + "\x5e\x64\xd7\xf4\x65\xb3\xf4\x89\xe2\x02\x0d", + "\xc1\x04\x57\x69\xd4\x87\xd5\x45\xce\xf3\xf0\xd3\x4b\x7a\x87\x33", 1, + 32, 768 }, + { 96, 256, 128, 219, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x2e" + "\x67\x84\xd8\x57\xdf\x07\x54\x3d\x0d\xc7\x2f\x17\x99\x35\xfb\xed\xe8" + "\xc8\xba\xf0\x1e\xe2\x04\x4b\x16\x2c\xbb\x34\x3b\x35\x5a\xcc\x29\xd8" + "\x23\x27\xcd\x93\xf2\xbf\xd9\x18\x03\x4e\xd5\xc4\x2a", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5d" + "\xa0\x57\xd7\xd9\x54\xec\x85\x67\x96\xaa\xd6\xfa\xff\xb1\x18\x3c\x2f" + "\x9b\xe7\x4c\x6a\x45\x76\xe0\xb0\x9a\x7a\x5c\x23\x30\x96\x3c\x2f\x9b" + "\xe7\x4c\x6a\x45\x76\xe0\xb0\x9a\x7a\x5c\x23\x30\x96", + "\x64\xe7\xef\xd2\x45\x16\xa8\x3e\x2c\x87\xe0\x6a\x76\xe2\xde\xa3", 1, + 32, 512 }, + { 96, 256, 128, 220, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf7\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x4c" + "\x6e\xad\x26\xf8\x4a\x02\x25\xd5\x57\x74\x5d\x32\xfc\x72\xe7\x2c\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x64\xdb\x33" + "\x4b\x69\xbe\xe5\x79\x38\x3e\x61\xae\x74\x2c\x71\xbb\x53\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x68\xd1\x38\x45\x4a" + "\xd2\x98\x2a\x73\x3b\xaf\xf3\x84\xbe\x2b\x7f", + "\xd4\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3f" + "\xa9\x7e\x29\x76\xc1\xe9\xf4\x8f\xcc\x19\xa4\xdf\x9a\xf6\x04\xfd\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x94\xdd\x70" + "\x8f\x02\x19\x33\xfd\x67\x57\xe3\xd7\x66\xda\x85\x07\xfd\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x94\xdd\x70\x8f\x02" + "\x19\x33\xfd\x67\x57\xe3\xd7\x66\xda\x85\x07", + "\xe6\xcc\x67\x29\xd7\x9b\xa5\x58\xcd\x73\xb0\x3c\xba\x54\xd6\x60", 1, + 32, 768 }, + { 96, 256, 128, 221, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf0\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x4f" + "\xd8\xc3\x75\x7c\x9f\x29\x38\xdc\x3b\x07\xd8\x58\x98\xbf\xe2\x2a\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x61\x55\x41" + "\x24\x15\xcb\xdd\x76\x01\x42\xb6\x2c\x2e\xc8\x3f\xbf\x55\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x6d\x5f\x4a\x2a\x36" + "\xa7\xa0\x25\x4a\x47\x78\x71\xde\x5a\x65\x7b", + "\xd3\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3c" + "\x1f\x10\x7a\xf2\x14\xc2\xe9\x86\xa0\x6a\x21\xb5\xfe\x3b\x01\xfb\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x91\x53\x02" + "\xe0\x7e\x6c\x0b\xf2\x5e\x2b\x34\x55\x3c\x3e\xcb\x03\xfb\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x91\x53\x02\xe0\x7e" + "\x6c\x0b\xf2\x5e\x2b\x34\x55\x3c\x3e\xcb\x03", + "\xe5\xcc\x67\x39\xbf\xd0\xf4\x63\x8d\xef\x57\x4b\x5a\x43\xdd\x6f", 1, + 32, 768 }, + { 96, 256, 128, 222, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf2\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xdf" + "\x03\xca\x84\x08\x2f\x7f\x70\xad\x8e\x40\x04\xca\xbd\x2c\xe4\x2b\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x28\xfd\x41" + "\x3c\xaa\xb1\xd0\x2b\xf1\xc6\x57\x53\xaa\x2a\xd3\xb9\x54\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x24\xf7\x4a\x32\x89" + "\xdd\xad\x78\xba\xc3\x99\x0e\x5a\xb8\x89\x7d", + "\xd1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xac" + "\xc4\x19\x8b\x86\xa4\x94\xa1\xf7\x15\x2d\xfd\x27\xdb\xa8\x07\xfa\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd8\xfb\x02" + "\xf8\xc1\x16\x06\xaf\xae\xaf\xd5\x2a\xb8\xdc\x27\x05\xfa\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd8\xfb\x02\xf8\xc1" + "\x16\x06\xaf\xae\xaf\xd5\x2a\xb8\xdc\x27\x05", + "\x0f\xca\x70\x22\x28\x81\x7d\x53\xee\x64\xd1\x42\xb1\x92\xe6\x65", 1, + 32, 768 }, + { 96, 256, 128, 223, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf3\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x1f" + "\xfc\x31\xae\x69\x39\x93\x94\xb8\xc3\x38\x67\x4c\x3d\xfd\xe9\x29\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x47\x7e\xc8" + "\xcf\x3e\xa3\xd4\xd5\xd7\x6d\x85\xad\x2b\x7f\x0b\xb8\x56\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x4b\x74\xc3\xc1\x1d" + "\xcf\xa9\x86\x9c\x68\x4b\xf0\xdb\xed\x51\x7c", + "\xd0\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x6c" + "\x3b\xe2\xa1\xe7\xb2\x78\x45\xe2\x58\x55\x9e\xa1\x5b\x79\x0a\xf8\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb7\x78\x8b" + "\x0b\x55\x04\x02\x51\x88\x04\x07\xd4\x39\x89\xff\x04\xf8\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb7\x78\x8b\x0b\x55" + "\x04\x02\x51\x88\x04\x07\xd4\x39\x89\xff\x04", + "\xef\xc3\xb0\x35\xde\xd6\xb4\x60\xbf\xce\x6f\x49\x49\x55\xe6\x77", 1, + 32, 768 }, + { 96, 256, 128, 224, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\x2b\xfd\x0d\x56\xec\xe9\x87\x71\x75\x6d\x60\xd9\xd9\x10\x6c\xd0\xc6" + "\xfc\x10\x69\x36\xc7\xef\x34\x7c\x07\x8f\xd7\x1c\x54\x22\x81\x64\xfc" + "\x90\x3b\x04\x38\xa3\x97\x8d\x3a\x54\xef\x99\x2a\xa3\xae", + "\x08\x8e\x15\xa1\xac\x30\xd2\x36\xe8\x4b\xe1\x3d\x64\x1c\x8d\xdc\xb5" + "\x3b\xc3\x66\xb8\x4c\x04\xe5\x26\x9c\xe2\x2e\xf1\x32\xa6\x62\xb5\x3b" + "\xc3\x66\xb8\x4c\x04\xe5\x26\x9c\xe2\x2e\xf1\x32\xa6\x62", + "\x34\x5f\xc9\xfe\x57\x3c\x13\x6c\x1b\xe8\x37\x30\x50\x0c\xe6\x62", 1, + 32, 384 }, + { 96, 256, 128, 225, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf6\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x7c" + "\xc2\x25\x5d\xec\xdf\x8e\x0f\xe1\x37\x35\x91\xda\x0e\x28\xe4\x28\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\xe2\x91\xfb" + "\x48\x38\x01\x9c\x51\xdf\xb7\x14\x15\x15\xbb\x53\xb1\x57\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\xee\x9b\xf0\x46\x1b" + "\x6d\xe1\x02\x94\xb2\xda\x48\xe5\x29\x09\x75", + "\xd5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0f" + "\x05\xf6\x52\x62\x54\x65\xde\xbb\xac\x58\x68\x37\x68\xac\x07\xf9\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x12\x97\xb8" + "\x8c\x53\xa6\x4a\xd5\x80\xde\x96\x6c\x07\x4d\xa7\x0d\xf9\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x12\x97\xb8\x8c\x53" + "\xa6\x4a\xd5\x80\xde\x96\x6c\x07\x4d\xa7\x0d", + "\x33\x6f\x97\xa5\xfa\xa9\x95\xa2\xa0\x37\x81\xb5\x91\x58\x8d\xa8", 1, + 32, 768 }, + { 96, 256, 128, 226, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xc6\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x7a" + "\xb6\x6f\x80\x90\xc1\x49\xe4\x52\xec\x7f\x20\x32\x7e\xb2\xea\x04\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x8d\x26\x13" + "\xea\x0e\xf8\xb6\x56\xb2\x47\x37\x3e\xce\xc0\x15\xbc\x7b\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x81\x2c\x18\xe4\x2d" + "\x94\xcb\x05\xf9\x42\xf9\x63\x3e\x52\x4f\x78", + "\xe5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x09" + "\x71\xbc\x8f\x1e\x4a\xa2\x35\x08\x77\x12\xd9\xdf\x18\x36\x09\xd5\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7d\x20\x50" + "\x2e\x65\x5f\x60\xd2\xed\x2e\xb5\x47\xdc\x36\xe1\x00\xd5\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7d\x20\x50\x2e\x65" + "\x5f\x60\xd2\xed\x2e\xb5\x47\xdc\x36\xe1\x00", + "\x93\x51\xc6\x80\xc8\xa5\xd3\x48\x82\xd4\x21\x45\xe8\x97\x45\xc4", 1, + 32, 768 }, + { 96, 256, 128, 227, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xc6\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x74" + "\xb6\x6f\x80\x90\xc1\x49\xe4\x52\xec\x7f\x20\x32\x7e\xb2\xea\x2e\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\xac\xd9\xec" + "\x85\x9e\x08\x66\x62\x0c\xc2\x4c\x8a\x97\xd5\xd9\xf5\x51\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\xa0\xd3\xe7\x8b\xbd" + "\x64\x1b\x31\x47\xc7\x82\xd7\x67\x47\x83\x31", + "\xe5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x07" + "\x71\xbc\x8f\x1e\x4a\xa2\x35\x08\x77\x12\xd9\xdf\x18\x36\x09\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5c\xdf\xaf" + "\x41\xf5\xaf\xb0\xe6\x53\xab\xce\xf3\x85\x23\x2d\x49\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5c\xdf\xaf\x41\xf5" + "\xaf\xb0\xe6\x53\xab\xce\xf3\x85\x23\x2d\x49", + "\xd7\x92\x66\xcd\x25\xa7\x84\x59\x9a\x0a\x8e\x31\xfc\x84\xd6\x04", 1, + 32, 768 }, + { 96, 256, 128, 228, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf7\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x42" + "\x51\xcd\x29\xb0\xaa\xa9\x60\x55\x7c\x9e\xa2\x82\x83\x34\xe4\xe4\xe2" + "\x31\xdb\x0a\x27\xfa\xc9\xec\x9e\x74\x48\x86\xeb\x01\x33\xc5\x23\x21" + "\x42\xdd\xf4\x8b\x3f\x18\x51\x40\xf0\xfc\x05\xf0\x43", + "\xd4\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x31" + "\x96\x1e\x26\x3e\x21\x42\xb1\x0f\xe7\xf3\x5b\x6f\xe5\xb0\x07\x35\x25" + "\x62\x86\xb6\x53\x5d\xbb\x47\x38\xc2\x89\xee\xf3\x04\xff\x35\x25\x62" + "\x86\xb6\x53\x5d\xbb\x47\x38\xc2\x89\xee\xf3\x04\xff", + "\x9d\x67\x1d\x40\x7d\x76\x60\x45\x9d\x5d\x58\x2d\x83\x91\x5e\xfe", 1, + 32, 512 }, + { 96, 256, 128, 229, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xf5\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x73" + "\xbd\x9f\x01\xbf\x33\x31\xb1\x2e\x31\xdd\x14\xcf\x11\xfe\xee\x1d\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x62\x5c\x69" + "\x65\xf6\x1a\x1c\x36\x11\x8c\x74\x70\x76\xd5\xb7\xb7\x62\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x6e\x56\x62\x6b\xd5" + "\x76\x61\x65\x5a\x89\xba\x2d\x86\x47\xed\x73", + "\xd6\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00" + "\x7a\x4c\x0e\x31\xb8\xda\x60\x74\xaa\xb0\xed\x22\x77\x7a\x0d\xcc\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x92\x5a\x2a" + "\xa1\x9d\xbd\xca\xb2\x4e\xe5\xf6\x09\x64\x23\x43\x0b\xcc\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x92\x5a\x2a\xa1\x9d" + "\xbd\xca\xb2\x4e\xe5\xf6\x09\x64\x23\x43\x0b", + "\x7b\x20\x7c\x2c\x32\x78\xc6\x4f\x0d\x6b\x91\x3f\xe3\x71\xfe\x63", 1, + 32, 768 }, + { 96, 256, 128, 230, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xec" + "\x09\x33\xf0\xbf\xb9\x12\x18\xce\xa0\xd7\x4e\x06\x1f\x55\x9e\x2d\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x8d\x5b\x67" + "\xe0\xac\xee\x53\x4c\xe2\xd9\x79\x14\x87\xb1\xec\xb2\x52\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x81\x51\x6c\xee\x8f" + "\x82\x2e\x1f\xa9\xdc\xb7\x49\x77\x23\xb6\x76", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9f" + "\xce\xe0\xff\x31\x32\xf9\xc9\x94\x3b\xba\xb7\xeb\x79\xd1\x7d\xfc\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7d\x5d\x24" + "\x24\xc7\x49\x85\xc8\xbd\xb0\xfb\x6d\x95\x47\x18\x0e\xfc\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7d\x5d\x24\x24\xc7" + "\x49\x85\xc8\xbd\xb0\xfb\x6d\x95\x47\x18\x0e", + "\x36\x72\x16\x2b\xb1\xf3\xff\x53\x7e\xce\x01\x3f\x1a\xca\x4f\x68", 1, + 32, 768 }, + { 96, 256, 128, 231, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xee" + "\x83\xa1\x4f\x48\xdb\x69\x62\x91\x08\x0e\xdf\xcc\x89\x8b\x88\x2b\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x8a\xd5\xf6" + "\xb0\x28\x3a\x8b\x39\xeb\xed\xce\x92\x78\x5d\xa9\xb6\x54\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x86\xdf\xfd\xbe\x0b" + "\x56\xf6\x6a\xa0\xe8\x00\xcf\x88\xcf\xf3\x72", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9d" + "\x44\x72\x40\xc6\x50\x82\xb3\xcb\x93\x63\x26\x21\xef\x0f\x6b\xfa\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7a\xd3\xb5" + "\x74\x43\x9d\x5d\xbd\xb4\x84\x4c\xeb\x6a\xab\x5d\x0a\xfa\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7a\xd3\xb5\x74\x43" + "\x9d\x5d\xbd\xb4\x84\x4c\xeb\x6a\xab\x5d\x0a", + "\x35\x72\x16\x3b\x99\x28\x4f\x5f\x3e\x4a\xa9\x4d\xba\xb8\x56\x77", 1, + 32, 768 }, + { 96, 256, 128, 232, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xe8" + "\x7d\xd0\x8e\xd4\xe4\xe0\x4c\x58\x77\x61\x6c\xbb\x02\xca\xbb\x29\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x87\x4f\x04" + "\x01\xd4\x57\xe3\x36\xf4\x31\x1f\x11\x52\xf9\x57\xba\x56\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x8b\x45\x0f\x0f\xf7" + "\x3b\x9e\x65\xbf\x34\xd1\x4c\xa2\x6b\x0d\x7e", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9b" + "\xba\x03\x81\x5a\x6f\x0b\x9d\x02\xec\x0c\x95\x56\x64\x4e\x58\xf8\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x77\x49\x47" + "\xc5\xbf\xf0\x35\xb2\xab\x58\x9d\x68\x40\x0f\xa3\x06\xf8\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x77\x49\x47\xc5\xbf" + "\xf0\x35\xb2\xab\x58\x9d\x68\x40\x0f\xa3\x06", + "\x34\x72\x16\x4b\x81\x5d\x9e\x6a\xfe\xc5\x50\x5c\x5a\xa7\x5d\x86", 1, + 32, 768 }, + { 96, 256, 128, 233, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xc8\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x6b" + "\xe4\x36\xe3\x46\xf8\xf2\xb3\x2f\x4c\xbb\xae\xf9\x51\x50\xef\x04\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x2f\xb7\x6b" + "\x51\x32\xe9\x30\xf6\xd0\xac\xf7\x08\x75\xe9\x77\xb5\x7b\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x23\xbd\x60\x5f\x11" + "\x85\x4d\xa5\x9b\xa9\x39\x55\x85\x7b\x2d\x71", + "\xeb\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x18" + "\x23\xe5\xec\xc8\x73\x19\x62\x75\xd7\xd6\x57\x14\x37\xd4\x0c\xd5\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xdf\xb1\x28" + "\x95\x59\x4e\xe6\x72\x8f\xc5\x75\x71\x67\x1f\x83\x09\xd5\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xdf\xb1\x28\x95\x59" + "\x4e\xe6\x72\x8f\xc5\x75\x71\x67\x1f\x83\x09", + "\x3a\x72\x16\xd7\xee\x1d\xa0\x18\xce\x84\x12\xf2\x51\x65\x6b\x19", 1, + 32, 768 }, + { 96, 256, 128, 234, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xc5\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x78" + "\x3c\xf9\x30\x2c\x7d\x22\x91\x4b\x38\xac\xa2\xe7\xd3\x74\xef\x1d\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x22\x8f\x2d" + "\x23\x59\x76\x40\xd5\x74\xf8\xe2\x0c\x4f\x6b\x6b\xb5\x62\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x2e\x85\x26\x2d\x7a" + "\x1a\x3d\x86\x3f\xfd\x2c\x51\xbf\xf9\x31\x71", + "\xe6\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x0b" + "\xfb\x2a\x3f\xa2\xf6\xc9\x40\x11\xa3\xc1\x5b\x0a\xb5\xf0\x0c\xcc\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd2\x89\x6e" + "\xe7\x32\xd1\x96\x51\x2b\x91\x60\x75\x5d\x9d\x9f\x09\xcc\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd2\x89\x6e\xe7\x32" + "\xd1\x96\x51\x2b\x91\x60\x75\x5d\x9d\x9f\x09", + "\x36\x72\x16\x17\x8f\xf1\xdc\x45\xce\x73\xb0\x2c\xd2\x1f\x87\x55", 1, + 32, 768 }, + { 96, 256, 128, 235, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x5d" + "\xb7\x2f\x89\xd1\x40\x2b\x1a\x03\x73\xff\x0a\x9c\x5c\xd4\x4b\x6d\x67" + "\xaf\x40\x79\x8f\x54\x55\x50\x17\x92\x95\x32\x48\xec\x23\x4c\xa6\xbf" + "\xd9\xae\x5c\x25\xa3\xa4\xd8\xa6\x2d\x48\xa6\x1d\x53", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2e" + "\x70\xfc\x86\x5f\xcb\xc0\xcb\x59\xe8\x92\xf3\x71\x3a\x50\xa8\xbc\xa0" + "\xfc\x1d\xc5\xfb\xf3\x27\xfb\xb1\x24\x54\x5a\x50\xe9\xef\xbc\xa0\xfc" + "\x1d\xc5\xfb\xf3\x27\xfb\xb1\x24\x54\x5a\x50\xe9\xef", + "\x0b\x49\x61\xc9\x52\x5e\xa2\xf2\xcd\xad\x62\x73\xe1\xc7\x82\x4c", 1, + 32, 512 }, + { 96, 256, 128, 236, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\x5f" + "\x21\x5e\xc8\x7d\x62\xa2\x64\xca\xdb\x51\x9b\x4a\xc9\x0a\x76\x68\xd1" + "\xdd\x03\xe5\x6e\xda\x63\x99\xac\x78\x03\xe7\xdd\x22\x11\x49\x10\xcd" + "\x9a\x32\xbd\xab\x95\x6d\x63\x4c\xbb\x9d\x33\xd3\x61", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2c" + "\xe6\x8d\xc7\xf3\xe9\x49\xb5\x90\x40\x3c\x62\xa7\xaf\x8e\x95\xb9\x16" + "\x8e\x5e\x59\x1a\x7d\x11\x32\x0a\xce\xc2\x8f\xc5\x27\xdd\xb9\x16\x8e" + "\x5e\x59\x1a\x7d\x11\x32\x0a\xce\xc2\x8f\xc5\x27\xdd", + "\x0a\x49\x61\xd9\x3a\x93\xf1\xfd\x8d\x29\x0a\x82\x81\xb6\x89\x5b", 1, + 32, 512 }, + { 96, 256, 128, 237, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x10\xab\xb1\x65", + "\xff\xff\xff\xff", + "\xdc\x8c\xe7\x08\xbf\x26\xaa\xb8\x62\xd9\x7e\x1b\x42\xf3\x1e\xf3\xd1" + "\x5a\xd5\x90\xdd\x0f\x40\xba\x18\xac\xd1\x68\xf6\xac\x77\x7a\x0f\x38" + "\xac\xa2\x43\x8b\x58\x8d\x54\x59\x49\x3e\x97\xe7\xfa\x33\x93\x2a\x09" + "\x7f\x1d\x39\xa0\x4a\xd3\x0f\x1b\x6c\x65\x02\x60\xbf\x70\x03\x47\x8f" + "\x28\x69\xb9\x3e\xe2\x9c\x83\x7e\x95\xfb\x6b\x99\x9f\x20\x02\x71\x3e" + "\x55\xdd\x19\x98\x0a\xd5\x31\x95\x90\x3a\x7b", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa2" + "\x9d\x06\x9f\x53\x84\xab\x6b\x42\x37\xbc\x91\x1b\xca\xf3\x99\xde\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x63\x2c\x4a" + "\xbb\x76\x9e\x76\xce\x8c\x66\x99\x15\x77\xf4\x94\x03\xde\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x63\x2c\x4a\xbb\x76" + "\x9e\x76\xce\x8c\x66\x99\x15\x77\xf4\x94\x03", + "\x35\x72\x16\x13\x55\x24\x09\x43\xde\x94\x06\x29\x2a\x64\xc5\x51", 1, + 32, 768 }, + { 96, 256, 128, 238, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x40\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x39" + "\x0e\xf9\x3a\xeb\x61\xaa\x30\x7f\x14\x13\x23\xc3\x8e\x06\x85\xfa\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x25\x94\x45" + "\xf4\xff\xc3\x1b\xce\x54\x01\x90\xed\xd6\xad\x20\x78\x76\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xed\xa1\xa5\xb6\x13" + "\x97\x50\xf9\x73\xf0\xd4\x84\x1b\xaa\x2c\xb8", + "\xd9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa0" + "\x09\xd7\x3c\x65\x44\x42\x8c\xfa\xc0\xb2\xd8\xc7\xbb\xef\x0b\xed\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8a\x5e\xf6" + "\x07\x15\xbc\x4b\x07\xc9\x2b\x97\x07\x37\x6d\xa1\x05\xed\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8a\x5e\xf6\x07\x15" + "\xbc\x4b\x07\xc9\x2b\x97\x07\x37\x6d\xa1\x05", + "\x19\x53\x2d\x9f\xa0\xb5\xfb\xd5\x82\xaa\xed\xa8\x30\x60\x2f\x1d", 1, + 32, 768 }, + { 96, 256, 128, 239, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x49\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\xe0" + "\x2b\x87\xae\xae\x8c\x3d\xa8\x89\x5f\x8c\xb0\xf6\xb9\xcc\x80\xf4\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\xcc\x4b\x7b" + "\x80\x3a\x5f\x8f\x46\x47\xdf\x16\x90\x80\xfe\x56\x7a\x78\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\x04\x7e\x9b\xc2\xd6" + "\x0b\xc4\x71\x60\x2e\x52\xf9\x4d\xf9\x5a\xba", + "\xd0\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x79" + "\x2c\xa9\xa8\x20\xa9\xd5\x14\x0c\x8b\x2d\x4b\xf2\x8c\x25\x0e\xe3\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x63\x81\xc8" + "\x73\xd0\x20\xdf\x8f\xda\xf5\x11\x7a\x61\x3e\xd7\x07\xe3\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x63\x81\xc8\x73\xd0" + "\x20\xdf\x8f\xda\xf5\x11\x7a\x61\x3e\xd7\x07", + "\xad\xbd\x2c\xaf\xc8\xc8\xf0\xe5\x12\x50\xe7\xb8\x1c\x9d\x0a\x2d", 1, + 32, 768 }, + { 96, 256, 128, 240, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x43\xea\xda\xe0\x36\xf7\x33\xea\x9b\x5b\x7e\xb2\x2a\xee\x39\x5d\xb6" + "\xf5\x1a\x4d\x10\xbc\x24\x60\x81\x0c\x22\x96\x51\x55\x6a\xcf\x38\x4a" + "\xd8\x2e\x3e\x28\x0c\xad\x69\xf0\xdf\x25\xb4\x2b\x83\xb0", + "\xda\x04\x7b\x78\x25\xdb\x18\x02\xe8\xe8\xe1\xaa\xc6\xba\x88\xfc\x2f" + "\xf2\x34\x4b\x9e\x99\xcc\xdc\x04\xd8\x83\x6d\x55\x60\x83\x41\x2f\xf2" + "\x34\x4b\x9e\x99\xcc\xdc\x04\xd8\x83\x6d\x55\x60\x83\x41", + "\x97\x3e\x27\x0a\x7a\xfc\xab\x75\x34\x8e\x14\xdb\xe1\x9c\x51\x56", 1, + 32, 384 }, + { 96, 256, 128, 241, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x66\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x89" + "\x1b\x79\x75\x21\xba\x92\x5b\x24\x09\x0a\xaf\x6c\x44\x82\xba\xe8\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x6d\x50\xc3" + "\x2d\x05\xa9\x46\xcb\x8c\xea\x57\xc9\xf1\x44\x2c\xb1\x64\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xa5\x65\x23\x6f\xe9" + "\xfd\x0d\xfc\xab\x1b\x13\xa0\x3c\x43\x20\x71", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x10" + "\x1c\x57\x73\xaf\x9f\x7a\xe7\xa1\xdd\xab\x54\x68\x71\x6b\x34\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x9a\x70" + "\xde\xef\xd6\x16\x02\x11\xc0\x50\x23\x10\x84\xad\xcc\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc2\x9a\x70\xde\xef" + "\xd6\x16\x02\x11\xc0\x50\x23\x10\x84\xad\xcc", + "\xe1\x7c\x27\x3f\x31\x75\x8e\x75\x23\x22\xae\x48\x69\xc1\xbf\xbb", 1, + 32, 768 }, + { 96, 256, 128, 242, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x6a\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x51" + "\x9c\xcc\xeb\xf7\x25\x73\xdb\xee\x8c\x12\xf7\x42\x55\xd1\x8c\x0a\xdd" + "\x10\x35\x86\x1f\xfc\x0b\x7f\x40\x07\x9b\x96\x9f\x8c\x63\xb2\xaf\x4f" + "\xa3\xcc\xd1\x6c\xb3\x8f\x42\x5c\x39\x96\x14\x0d\xef", + "\xf3\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc8" + "\x9b\xe2\xed\x79\x00\x9b\x67\x6b\x58\xb3\x0c\x46\x60\x38\x02\x1d\x65" + "\xfc\x50\x26\xae\x3c\x7a\x12\x68\x5b\xd3\x77\xd4\x8c\x92\x1d\x65\xfc" + "\x50\x26\xae\x3c\x7a\x12\x68\x5b\xd3\x77\xd4\x8c\x92", + "\xa2\x23\x90\x22\x4c\x5d\xb0\xf0\x16\x96\x74\x3d\x87\x07\x25\xc5", 1, + 32, 512 }, + { 96, 256, 128, 243, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\xe2\x35\xb8\xc2\x13\x84\x55\x70\x85\xc3\xf2\xeb\x2a\x8f\xa3\x60\x58" + "\xcf\xfd\x2a\xf7\x43\xda\xcf\x96\xb4\xae\x4d\x51\xb4\xe4\x88\xd6\x70" + "\x3f\x49\xd9\xd7\xf2\x02\x7e\x48\x53\xfe\xb4\xca\x0d\xf7", + "\x7b\xdb\x19\x5a\x00\xa8\x7e\x98\xf6\x70\x6d\xf3\xc6\xdb\x12\xc1\xc1" + "\xc8\xd3\x2c\x79\x66\x32\x73\x13\x60\x0f\xb6\x55\x81\x0d\x06\xc1\xc8" + "\xd3\x2c\x79\x66\x32\x73\x13\x60\x0f\xb6\x55\x81\x0d\x06", + "\x43\x7d\x1e\xfa\xd2\x1b\x08\x65\xa5\x41\xb5\xca\xb6\x2e\x2a\x44", 1, + 32, 384 }, + { 96, 256, 128, 244, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x66\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x8f" + "\xab\x58\x57\x4a\x32\x2b\xac\x6f\x39\x44\x74\xe4\xce\x7e\xae\xc3\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x71\x53\x2d" + "\xfb\x0e\x91\x41\xb0\x09\x83\x39\x47\x22\x82\x9e\x7c\x4f\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xb9\x66\xcd\xb9\xe2" + "\xc5\x0a\x87\x2e\x72\x7d\x2e\xef\x85\x92\xbc", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x16" + "\xac\x76\x51\xc4\x17\xc3\x10\xea\xed\xe5\x8f\xe0\xfb\x97\x20\xd4\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xde\x99\x9e" + "\x08\xe4\xee\x11\x79\x94\xa9\x3e\xad\xc3\x42\x1f\x01\xd4\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xde\x99\x9e\x08\xe4" + "\xee\x11\x79\x94\xa9\x3e\xad\xc3\x42\x1f\x01", + "\xac\xf4\xff\xa2\x0c\x0d\x06\xd6\x1a\x18\xe9\xa8\xd4\xc8\x4d\x1d", 1, + 32, 768 }, + { 96, 256, 128, 245, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x61\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x5e" + "\xfe\x67\x9b\xa1\x73\x84\xc5\x5e\xb8\xcc\x19\x36\x66\xfe\x8d\x04\x60" + "\x8c\x35\x03\xd2\x17\xaa\x3f\x90\xa9\xb0\xe1\xb3\xb3\x13\xbc\x12\xd3" + "\xa3\x49\x1c\x87\x12\xcf\x92\xf2\x12\xe1\x38\x32\x9f", + "\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc7" + "\xf9\x49\x9d\x2f\x56\x6c\x79\xdb\x6c\x6d\xe2\x32\x53\x17\x03\x13\xd8" + "\x60\x50\xa3\x63\xd7\xdb\x52\xb8\xf5\xf8\x00\xf8\xb3\xe2\x13\xd8\x60" + "\x50\xa3\x63\xd7\xdb\x52\xb8\xf5\xf8\x00\xf8\xb3\xe2", + "\xcd\x46\x6d\x06\xe7\x5b\x7f\xd1\x8d\x5f\xe2\x1d\x92\x27\xd9\xa7", 1, + 32, 512 }, + { 96, 256, 128, 246, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x90\x64\xb8\x8a\x28\x20\x52\xa1\xee\x44\xdf\x05\xad\x21\x3d\xa6\x79" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\x51\xb2" + "\x65\x0e\xc9\x45\xfe\xc7\x05\x88\xbc\x65\xa6\x16\xa5\xf2\x4f\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\xdd\x55\x7e\xc8" + "\xa4\xd6\x3d\xf7\x27\x45\x94\x36\x7b\xef\x09\xcd", + "\x09\x8a\x19\x12\x3b\x0c\x79\x49\x9d\xf7\x40\x1d\x41\x75\x8c\x07\xe0" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x46\x0a" + "\x89\x6b\x69\xf4\x3e\xb6\x68\xa0\xe0\x2d\x47\x5d\xa5\x03\xe0\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x46\x0a\x89\x6b" + "\x69\xf4\x3e\xb6\x68\xa0\xe0\x2d\x47\x5d\xa5\x03", + "\xce\x8a\x3d\x4d\x88\x7d\x95\x61\x3d\x82\x9b\x53\x8e\xd0\x11\x96", 1, + 32, 640 }, + { 96, 256, 128, 247, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x43\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\xee" + "\xf6\x7b\xd4\x79\x5b\x74\x01\x5a\x34\x93\x90\x5d\x54\x4a\x86\xe8\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x31\x97\xbe" + "\x28\xef\xf8\x43\x59\x2b\xd8\xfc\x8d\x57\x84\x21\xd6\x64\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xf9\xa2\x5e\x6a\x03" + "\xac\x08\x6e\x0c\x29\xb8\xe4\x9a\x83\x2d\x16", + "\xda\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x77" + "\xf1\x55\xd2\xf7\x7e\x9c\xbd\xdf\xe0\x32\x6b\x59\x61\xa3\x08\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9e\x5d\x0d" + "\xdb\x05\x87\x13\x90\xb6\xf2\xfb\x67\xb6\x44\xa0\xab\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9e\x5d\x0d\xdb\x05" + "\x87\x13\x90\xb6\xf2\xfb\x67\xb6\x44\xa0\xab", + "\x08\x28\x9f\x51\x99\xdf\x47\x6f\xe9\x04\x75\xcb\x95\x22\x55\x66", 1, + 32, 768 }, + { 96, 256, 128, 248, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x6b\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x1e" + "\x34\x41\x2a\xb0\xa0\x56\xe8\x09\xd5\xd4\xb9\x2b\xe1\x12\x8a\x4b\x2a" + "\x65\x1a\x62\xae\xab\x26\xcf\x43\x7f\xb1\x95\x40\x75\x74\xf3\x58\x3a" + "\x8c\x28\x60\x3b\x9e\x3f\x41\x24\x13\x95\xcb\xf4\xf8", + "\xf2\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x87" + "\x33\x6f\x2c\x3e\x85\xbe\x54\x8c\x01\x75\x42\x2f\xd4\xfb\x04\x5c\x92" + "\x89\x7f\xc2\x1f\x6b\x57\xa2\x6b\x23\xf9\x74\x0b\x75\x85\x5c\x92\x89" + "\x7f\xc2\x1f\x6b\x57\xa2\x6b\x23\xf9\x74\x0b\x75\x85", + "\x06\xdf\x93\xf6\x51\xea\x5c\xc5\x69\x11\xf3\x0d\x3e\x58\xf9\x97", 1, + 32, 512 }, + { 96, 256, 128, 249, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x3f\xe6\x06\x10\x8f\x35\x86\x9d\xf4\xc7\xaa\x01\x28\x46\x4a\x12\x65" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xfd\xbe" + "\x84\x3a\x0a\xd9\xbe\x25\x05\x59\x92\xab\x6d\xcb\xc9\xf1\x53\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x71\x59\x9f\xfc" + "\x67\x4a\x7d\x15\x27\x94\xba\xf8\xb0\x32\x65\xce", + "\xa6\x08\xa7\x88\x9c\x19\xad\x75\x87\x74\x35\x19\xc4\x12\xfb\xb3\xfc" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xea\x06" + "\x68\x5f\xaa\x68\x7e\x54\x68\x71\xce\xe3\x8c\x80\xc9\x00\xfc\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xea\x06\x68\x5f" + "\xaa\x68\x7e\x54\x68\x71\xce\xe3\x8c\x80\xc9\x00", + "\x92\x64\xfc\x0f\x47\xfe\xbb\x30\x66\x12\x54\xda\xf9\xa0\x61\x89", 1, + 32, 640 }, + { 96, 256, 128, 250, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x6e\x8e\xb9\x8c\xf7\xff\xfe\x4c\xd6\x83\x56\x8c\xf8\x92\x99\x15\x64" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xc7\x0f" + "\x5d\x8b\x30\xc6\x4b\xf2\xe6\xd1\xd6\x13\xf4\x0e\x0b\xf0\x52\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x4b\xe8\x46\x4d" + "\x5d\x55\x88\xc2\xc4\x1c\xfe\x40\x29\xf7\xa7\xcf", + "\xf7\x60\x18\x14\xe4\xd3\xd5\xa4\xa5\x30\xc9\x94\x14\xc6\x28\xb4\xfd" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd0\xb7" + "\xb1\xee\x90\x77\x8b\x83\x8b\xf9\x8a\x5b\x15\x45\x0b\x01\xfd\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd0\xb7\xb1\xee" + "\x90\x77\x8b\x83\x8b\xf9\x8a\x5b\x15\x45\x0b\x01", + "\x69\xa1\x24\xfc\x7f\x96\xe2\x20\xd1\xa0\x31\xce\xd5\x52\x72\x79", 1, + 32, 640 }, + { 96, 256, 128, 251, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x4f\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x41" + "\x56\x26\x9f\xe3\xda\x10\x1e\xeb\x0a\xbf\x8d\xda\x20\xfe\x8f\xff\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x6a\xec\xe9" + "\x83\xe6\x4f\x97\xe4\x3f\xf5\x29\x5b\xc8\x84\xfa\x77\x73\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xa2\xd9\x09\xc1\x0a" + "\x1b\xdc\xd3\x18\x04\x6d\x32\x05\x83\xf6\xb7", + "\xd6\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd8" + "\x51\x08\x99\x6d\xff\xf8\xa2\x6e\xde\x1e\x76\xde\x15\x17\x01\xe8\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc5\x26\x5a" + "\x70\x0c\x30\xc7\x2d\xa2\xdf\x2e\xb1\x29\x44\x7b\x0a\xe8\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc5\x26\x5a\x70\x0c" + "\x30\xc7\x2d\xa2\xdf\x2e\xb1\x29\x44\x7b\x0a", + "\x3e\xa8\xf9\xb2\x01\x23\x21\xe6\x3d\x5f\xb5\xbc\x2c\x5d\x33\x2d", 1, + 32, 768 }, + { 96, 256, 128, 252, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x66\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x18" + "\xf1\x25\xef\x37\x4c\x14\x54\xb6\x80\xe2\x34\x27\xe7\xdc\x69\xe4\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x85\x8b\x08" + "\xeb\x1d\x58\x15\x70\xa7\xcd\x1e\x48\x59\x3b\x75\x75\x68\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\x4d\xbe\xe8\xa9\xf1" + "\x0c\x5e\x47\x80\x3c\x5a\x21\x94\x3c\x79\xb5", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x81" + "\xf6\x0b\xe9\xb9\x69\xfc\xe8\x33\x54\x43\xcf\x23\xd2\x35\xe7\xf3\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2a\x41\xbb" + "\x18\xf7\x27\x45\xb9\x3a\xe7\x19\xa2\xb8\xfb\xf4\x08\xf3\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2a\x41\xbb\x18\xf7" + "\x27\x45\xb9\x3a\xe7\x19\xa2\xb8\xfb\xf4\x08", + "\xdf\xaf\x8a\x3a\x15\xd4\x5e\x7f\x4c\x34\x30\x04\x8d\x85\x89\xf0", 1, + 32, 768 }, + { 96, 256, 128, 253, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\xb0\x2a\xb7\x47\xa3\x10\xd6\xa3\xbb\xdb\x97\x01\x8a\x3b\xe8\xb3\x41" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xb7\xa3" + "\x38\xbc\x34\x23\x89\x5f\x0f\xd9\x6c\xdb\x27\xa7\x87\xf2\x77\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x3b\x44\x23\x7a" + "\x59\xb0\x4a\x6f\x2d\x14\x44\x88\xfa\x5e\x2b\xcd", + "\x29\xc4\x16\xdf\xb0\x3c\xfd\x4b\xc8\x68\x08\x19\x66\x6f\x59\x12\xd8" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa0\x1b" + "\xd4\xd9\x94\x92\x49\x2e\x62\xf1\x30\x93\xc6\xec\x87\x03\xd8\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa0\x1b\xd4\xd9" + "\x94\x92\x49\x2e\x62\xf1\x30\x93\xc6\xec\x87\x03", + "\x34\x08\xeb\x2b\x13\xa9\xb7\x6b\xef\xce\xdf\x69\x94\x22\xd6\x1f", 1, + 32, 640 }, + { 96, 256, 128, 254, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x40\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x38" + "\x0e\xf9\x3a\xeb\x61\xaa\x30\x7f\x14\x13\x23\xc3\x8e\x06\x85\xf6\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x3f\x76\x9a" + "\x30\xe8\x95\x1f\xf2\xfb\x36\x5f\xa7\x80\xfd\xde\x7e\x7a\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xf7\x43\x7a\x72\x04" + "\xc1\x54\xc5\xdc\xc7\x1b\xce\x4d\xfa\xd2\xbe", + "\xd9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa1" + "\x09\xd7\x3c\x65\x44\x42\x8c\xfa\xc0\xb2\xd8\xc7\xbb\xef\x0b\xe1\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x90\xbc\x29" + "\xc3\x02\xea\x4f\x3b\x66\x1c\x58\x4d\x61\x3d\x5f\x03\xe1\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x90\xbc\x29\xc3\x02" + "\xea\x4f\x3b\x66\x1c\x58\x4d\x61\x3d\x5f\x03", + "\x09\xf4\xf2\xa3\x93\x6d\x74\x61\xa6\x7c\xe0\x22\x17\x6b\xb8\xdd", 1, + 32, 768 }, + { 96, 256, 128, 255, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x40\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x06" + "\x0e\xf9\x3a\xeb\x61\xaa\x30\x7f\x14\x13\x23\xc3\x8e\x06\x85\xee\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x2b\xca\x70" + "\xbf\xcd\xf1\x17\x1a\xb6\x11\xd1\x2b\xed\x5d\x62\x7a\x62\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xe3\xff\x90\xfd\x21" + "\xa5\x5c\x2d\x91\xe0\x95\x42\x20\x5a\x6e\xba", + "\xd9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x9f" + "\x09\xd7\x3c\x65\x44\x42\x8c\xfa\xc0\xb2\xd8\xc7\xbb\xef\x0b\xf9\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x84\x00\xc3" + "\x4c\x27\x8e\x47\xd3\x2b\x3b\xd6\xc1\x0c\x9d\xe3\x07\xf9\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x84\x00\xc3\x4c\x27" + "\x8e\x47\xd3\x2b\x3b\xd6\xc1\x0c\x9d\xe3\x07", + "\x2e\xb2\x67\x9a\xad\xfd\x82\x4a\x5f\xd8\xfa\x2e\x4a\x55\xa6\x5c", 1, + 32, 768 }, + { 96, 256, 128, 256, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x56\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x6c" + "\x7e\x13\x12\xc6\x77\x4f\xae\x7d\x1e\x5d\x0c\xc6\x09\x02\x8f\xf5\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x81\xc9\xe6" + "\x1c\xbe\xee\xd5\x54\x6b\x1c\xe5\xd8\xfe\xf2\x1a\x7a\x79\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\x49\xfc\x06\x5e\x52" + "\xba\x9e\x63\x4c\xed\xa1\xb1\x33\xf5\x16\xba", + "\xcf\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf5" + "\x79\x3d\x14\x48\x52\xa7\x12\xf8\xca\xfc\xf7\xc2\x3c\xeb\x01\xe2\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2e\x03\x55" + "\xef\x54\x91\x85\x9d\xf6\x36\xe2\x32\x1f\x32\x9b\x07\xe2\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2e\x03\x55\xef\x54" + "\x91\x85\x9d\xf6\x36\xe2\x32\x1f\x32\x9b\x07", + "\x5e\x89\x34\x9f\x6b\x01\x1c\xd6\xe2\x4e\xe6\xac\x2f\x59\x0c\x21", 1, + 32, 768 }, + { 96, 256, 128, 257, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x2e\xa8\x41\x0b\x4d\xca\x8c\x9d\x53\x69\xa0\x33\xd8\xdb\x61\xe4\x6c" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xf0\xf5" + "\x8e\x8b\xba\x6c\xf1\xa5\x21\x46\x27\x3d\x8f\xe0\xc4\xfc\x5a\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x7c\x12\x95\x4d" + "\xd7\xff\x32\x95\x03\x8b\x0f\x6e\x52\x19\x68\xc3", + "\xb7\x46\xe0\x93\x5e\xe6\xa7\x75\x20\xda\x3f\x2b\x34\x8f\xd0\x45\xf5" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe7\x4d" + "\x62\xee\x1a\xdd\x31\xd4\x4c\x6e\x7b\x75\x6e\xab\xc4\x0d\xf5\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe7\x4d\x62\xee" + "\x1a\xdd\x31\xd4\x4c\x6e\x7b\x75\x6e\xab\xc4\x0d", + "\xb2\x45\x37\xfc\xb0\xdc\xb6\x20\x0b\x02\x85\xca\xfc\x9c\x3a\x7d", 1, + 32, 640 }, + { 96, 256, 128, 258, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x17\x05\x9a\x7c\x88\x83\xa2\x8b\x90\xbd\x94\xae\x44\xd1\x54\x36\x62" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xa2\x30" + "\x18\xbf\x8e\x68\xe4\x13\xe9\x9a\xc2\xd4\xab\x3f\x8d\xf1\x54\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x2e\xd7\x03\x79" + "\xe3\xfb\x27\x23\xcb\x57\xea\x87\x76\xc6\x21\xce", + "\x8e\xeb\x3b\xe4\x9b\xaf\x89\x63\xe3\x0e\x0b\xb6\xa8\x85\xe5\x97\xfb" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb5\x88" + "\xf4\xda\x2e\xd9\x24\x62\x84\xb2\x9e\x9c\x4a\x74\x8d\x00\xfb\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb5\x88\xf4\xda" + "\x2e\xd9\x24\x62\x84\xb2\x9e\x9c\x4a\x74\x8d\x00", + "\x43\x30\x04\x00\xea\x36\xe7\x20\x36\x11\x53\xce\x0c\x5d\x63\x7d", 1, + 32, 640 }, + { 96, 256, 128, 259, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\xaa\xa1\xb2\x58\xfd\x4b\x54\xb4\x97\xb5\x20\x80\x6a\x66\xd7\xaa\x68" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\x99\x13" + "\x2a\x23\x4a\x8c\x78\x9b\xf8\x54\x45\x47\x94\x0e\xc3\xf3\x5e\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x15\xf4\x31\xe5" + "\x27\x1f\xbb\xab\xda\x99\x6d\x14\x49\xf7\x6f\xcc", + "\x33\x4f\x13\xc0\xee\x67\x7f\x5c\xe4\x06\xbf\x98\x86\x32\x66\x0b\xf1" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8e\xab" + "\xc6\x46\xea\x3d\xb8\xea\x95\x7c\x19\x0f\x75\x45\xc3\x02\xf1\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8e\xab\xc6\x46" + "\xea\x3d\xb8\xea\x95\x7c\x19\x0f\x75\x45\xc3\x02", + "\xd7\x9a\x03\x10\x12\x4a\xdc\x30\xc6\xb6\x4c\xde\xf8\x99\x3e\x8d", 1, + 32, 640 }, + { 96, 256, 128, 260, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x4c\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\xbb" + "\x53\x57\xed\x31\x4a\xd7\x40\xb9\x91\x0f\xad\x6f\x01\xd7\x81\xf0\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\xc8\x04\x2b" + "\x41\x4f\xdd\x1b\xba\x3a\x6c\x93\x6b\x7e\xd6\x78\x79\x7c\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\x00\x31\xcb\x03\xa3" + "\x89\x50\x8d\x1d\x9d\xd7\x02\xb3\xd1\x74\xb9", + "\xd5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x22" + "\x54\x79\xeb\xbf\x6f\x3f\xfc\x3c\x45\xae\x56\x6b\x34\x3e\x0f\xe7\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x67\xce\x98" + "\xb2\xa5\xa2\x4b\x73\xa7\x46\x94\x81\x9f\x16\xf9\x04\xe7\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x67\xce\x98\xb2\xa5" + "\xa2\x4b\x73\xa7\x46\x94\x81\x9f\x16\xf9\x04", + "\xe6\x02\x2c\xc3\xba\x20\xe3\xf9\x06\x5f\xdf\xcc\x43\xa9\xdc\x40", 1, + 32, 768 }, + { 96, 256, 128, 261, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x66\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\xf6" + "\x42\x96\x97\x5a\xf7\xfc\xed\x16\x81\x81\xf7\x6c\x65\x08\xe1\xc9\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x49\x75\x06" + "\x0f\x7d\xde\xf4\xa0\x98\x69\x93\x33\xb3\x0f\xbf\x7c\x45\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\x81\x40\xe6\x4d\x91" + "\x8a\xbf\x97\xbf\x98\xd7\x5a\x7e\x08\xb3\xbc", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x6f" + "\x45\xb8\x91\xd4\xd2\x14\x51\x93\x55\x20\x0c\x68\x50\xe1\x6f\xde\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe6\xbf\xb5" + "\xfc\x97\xa1\xa4\x69\x05\x43\x94\xd9\x52\xcf\x3e\x01\xde\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe6\xbf\xb5\xfc\x97" + "\xa1\xa4\x69\x05\x43\x94\xd9\x52\xcf\x3e\x01", + "\x35\x3e\x30\x4f\xd8\x55\x32\x86\xb2\x6e\x0d\x59\x94\x2f\xe7\xcd", 1, + 32, 768 }, + { 96, 256, 128, 262, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x98\x41\xcf\xc9\x27\xa5\x7d\xc4\x91\xab\x35\x42\x7f\xf9\x35\xe6\x6e" + "\xf8\xd1\xf9\x71\xda\x17\x43\x7a\x2b\x5e\x04\xfb\xca\x16\x71\xa6\x83" + "\xc8\xf9\xf9\xe6\x78\x0f\xda\x49\x40\xdd\xed\xd7\x6b\xf2\x58\x35\x4c" + "\x0c\x15\x80\xaf\x36\x62\xd5\xf8\x15\x1e\x3f\x7e\x82\x2a\x64\xd3\x3f" + "\x94\x75\xbb\x3f\xf8\x84\x68\x8e\x30\x2e\xc7\xcd", + "\x01\xaf\x6e\x51\x34\x89\x56\x2c\xe2\x18\xaa\x5a\x93\xad\x84\x47\xf7" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb1\x3b" + "\x24\x9c\x59\x57\xb8\x7e\xb7\x61\x1c\x95\x0c\x9c\x6b\x03\xf7\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb1\x3b\x24\x9c" + "\x59\x57\xb8\x7e\xb7\x61\x1c\x95\x0c\x9c\x6b\x03", + "\x0a\xeb\x04\xec\xf7\xde\xf4\x0c\x42\x02\x5b\xba\xe5\x50\x91\x69", 1, + 32, 640 }, + { 96, 256, 128, 263, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x42\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x0b" + "\x61\xbf\x9b\x7c\xaf\x83\xcc\x34\xda\x62\x55\x93\x51\x42\x89\xe8\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x69\x6a\x5c" + "\x7f\xb9\xda\x9c\xd4\xa3\x9c\x85\x91\x08\x6d\xb4\x2d\x64\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xa1\x5f\xbc\x3d\x55" + "\x8e\xd7\xe3\x84\x6d\xc1\xf8\xc5\x6a\xb8\xed", + "\xdb\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x92" + "\x66\x91\x9d\xf2\x8a\x6b\x70\xb1\x0e\xc3\xae\x97\x64\xab\x07\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc6\xa0\xef" + "\x8c\x53\xa5\xcc\x1d\x3e\xb6\x82\x7b\xe9\xad\x35\x50\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc6\xa0\xef\x8c\x53" + "\xa5\xcc\x1d\x3e\xb6\x82\x7b\xe9\xad\x35\x50", + "\x8f\xc4\xf7\x7a\x6e\xe0\x52\xa4\xc3\x14\x78\x0b\x8d\xf9\xa2\xd0", 1, + 32, 768 }, + { 96, 256, 128, 264, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x4b\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\xf2" + "\x8e\x4d\x0f\x20\xca\x16\x44\x47\x0c\x9c\xda\xc6\x00\x08\x87\xed\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x14\x64\x77" + "\x5b\xac\xd5\xc6\x9f\xe2\x6e\x1a\x74\x96\x8e\xa2\x7e\x61\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xdc\x51\x97\x19\x40" + "\x81\x8d\xa8\xc5\x9f\x5e\x1d\x5b\x89\xae\xbe", + "\xd2\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x6b" + "\x89\x63\x09\xae\xef\xfe\xf8\xc2\xd8\x3d\x21\xc2\x35\xe1\x09\xfa\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xbb\xae\xc4" + "\xa8\x46\xaa\x96\x56\x7f\x44\x1d\x9e\x77\x4e\x23\x03\xfa\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xbb\xae\xc4\xa8\x46" + "\xaa\x96\x56\x7f\x44\x1d\x9e\x77\x4e\x23\x03", + "\x23\x2f\xf7\x8a\x96\xf3\x47\xb4\x53\xba\x71\x1b\x79\x36\x7e\xe0", 1, + 32, 768 }, + { 96, 256, 128, 265, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x2d\xd4\xcd\x40", + "\xff\xff\xff\xff", + "\x4d\x11\x5e\x67\xec\xd3\xd4\x17\x8c\x4c\x60\xe7\x13\xab\x4e\x5e\x6e" + "\xe6\x28\xfc\x4b\x58\x30\x18\x4c\xd2\x93\x36\x4a\x21\x3e\x84\xfe\x47" + "\x13\x9a\x5f\x4e\x3f\x8e\x92\xd7\xa3\xb7\x1e\xb4\xff\x0e\x29\xdb\x95" + "\x3a\xd5\x45\x8f\xea\x61\xf0\x13\xea\x18\x54\xfe\x75\x72\xa0\x08\x5c" + "\x32\xdd\xfc\xbe\xb0\x1a\x8b\xe4\xc3\x4d\x53\x31\xe1\xee\x75\x78\x39" + "\x11\xc4\xdd\x46\x01\x57\x83\xd5\x53\xf2\xb5", + "\xd4\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf7" + "\xe1\x06\xfa\xc5\x7d\xd8\xa4\xc9\x06\x32\xcd\x4e\x14\xd7\x0a\xe9\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x86\x11\x26" + "\xc9\x3f\x3a\xdf\x23\xfc\xda\x14\x00\xf9\x94\x7f\x08\xe9\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x86\x11\x26\xc9\x3f" + "\x3a\xdf\x23\xfc\xda\x14\x00\xf9\x94\x7f\x08", + "\xe0\x0d\x2e\x8b\xae\x5d\x09\xc2\x8e\x9b\xf5\x94\x09\x54\x5d\x09", 1, + 32, 768 }, + { 96, 256, 128, 266, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x19\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xbc" + "\x28\x6f\xd9\x79\x80\x79\x51\xb1\x83\xa1\x88\x93\x0a\xd1\x5e\xdc\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x89\x0e\x65" + "\x9f\xd3\x02\x8c\x90\x4e\x65\x01\x8f\xdf\xd6\x03\x83\x33\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x33\xb7\xfb\x50\xc3" + "\xe7\xeb\xca\x97\x0f\x6f\x89\xa8\x8a\x82\xd6", + "\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01" + "\x5d\x15\x65\x92\x4f\x6c\x74\x18\xde\x9b\xab\xf8\xbe\x44\x07\xed\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2e\x11\x0e" + "\x5e\x1c\x04\x68\xcb\xaa\xd9\x9c\x8a\xbe\xff\xff\x07\xed\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x2e\x11\x0e\x5e\x1c" + "\x04\x68\xcb\xaa\xd9\x9c\x8a\xbe\xff\xff\x07", + "\x47\xe5\xd4\x29\x42\x39\xdb\x73\xb8\x36\xc0\x40\x70\xff\x5b\x2d", 1, + 32, 768 }, + { 96, 256, 128, 267, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x83" + "\x9f\x81\x1a\xd0\x31\x0c\x77\x05\x2f\x45\x32\x0b\x0d\x95\x60\xc4\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x70\xd6\xb1" + "\x4f\xd2\x09\xfe\xdf\x26\x1f\xd1\xd2\x50\xd3\x47\x8d\x2b\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xca\x6f\x2f\x80\xc2" + "\xec\x99\x85\xff\x75\xbf\xd4\x27\x8f\xc6\xd8", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3e" + "\xea\xfb\xa6\x3b\xfe\x19\x52\xac\x72\x7f\x11\x60\xb9\x00\x39\xf5\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd7\xc9\xda" + "\x8e\x1d\x0f\x1a\x84\xc2\xa3\x4c\xd7\x31\xfa\xbb\x09\xf5\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd7\xc9\xda\x8e\x1d" + "\x0f\x1a\x84\xc2\xa3\x4c\xd7\x31\xfa\xbb\x09", + "\x23\x2c\x88\x2f\x7a\x1a\x2f\x80\x8c\xcf\x26\x49\x6c\xff\x5b\x3d", 1, + 32, 768 }, + { 96, 256, 128, 268, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x97\x31\x1c\xd6\xe2\xd2\x5a\x7b\x4e\xaa\x16\xf0\xa6\x1c\xa6\x24\x6b" + "\x8a\x85\x43\x14\x30\xea\xda\x56\xa2\xc5\xdc\x94\x4b\x6a\xa6\x95\x13" + "\x63\x10\xb6\xb6\xb5\xc1\x7c\x9f\x8c\x02\xba\x7d\x0a\xeb\x71\xe0\x94" + "\x3e\x30\xf9\x1b\xa4\x1b\x43\x62\xfa\x9e\xd6\x03\x7b\x7a\x32\x9e\xe1" + "\xa0\xaf\x16\x0f\xc7\x6d\x3d\xe7\xe9\x91\x02\xc3", + "\x77\x10\x78\xb7\xd5\x9f\xe2\x50\x9a\xeb\x0b\x0e\x34\x84\x4c\x61\xd6" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa4\x1c" + "\x2c\xb9\xeb\xa7\x86\x6f\x50\x68\x4b\x1b\x05\xe3\xab\x00\xd6\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xa4\x1c\x2c\xb9" + "\xeb\xa7\x86\x6f\x50\x68\x4b\x1b\x05\xe3\xab\x00", + "\xd7\x1b\xc7\x0d\x5a\xdc\x74\xe7\xdf\xd8\x94\x06\xfc\x15\xf0\x44", 1, + 32, 640 }, + { 96, 256, 128, 269, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x34\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x74" + "\xcf\x7e\x9d\x82\xb7\xe8\xed\x9e\xc9\x65\xf6\xea\x31\x09\x51\xdc\x10" + "\x49\x40\xe0\x8a\x42\x22\x55\x68\x28\xeb\xa4\x59\xf6\x5a\x4a\x00\x6d" + "\x28\x72\x9d\x95\xd7\x9d\x23\x72\xf7\x7a\xee\xab\x35", + "\xd4\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc9" + "\xba\x04\x21\x69\x78\xfd\xc8\x37\x94\x5f\xd5\x81\x85\x9c\x08\xed\x1f" + "\x06\xe9\xbd\x9b\x71\x8c\x79\x9f\xef\xf2\x1b\xc7\x57\xb1\xed\x1f\x06" + "\xe9\xbd\x9b\x71\x8c\x79\x9f\xef\xf2\x1b\xc7\x57\xb1", + "\x21\xe6\x39\x87\xd4\x94\x67\x3f\x30\x40\xae\x9d\xe2\xbc\x0d\xa0", 1, + 32, 512 }, + { 96, 256, 128, 270, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\xe7\x2b\x83\x51\x4e\x5e\x50\x50\x90\x70\x35\x9c\x1c\xac\x7e\x1c\x42" + "\x8a\x85\x43\x14\x30\xea\xda\x56\xa2\xc5\xdc\x94\x4b\x6a\xa6\xda\xd3" + "\x59\x50\xd8\xa9\xb5\x5a\x47\x2f\x9b\xb8\x86\x0a\x52\x63\x58\xe0\x94" + "\x3e\x30\xf9\x1b\xa4\x1b\x43\x62\xfa\x9e\xd6\x03\x7b\x35\xf2\xa4\xa1" + "\xce\xb0\x16\x94\xfc\xdd\x2a\x5d\xd5\xe6\x5a\x4b", + "\x07\x0a\xe7\x30\x79\x13\xe8\x7b\x44\x31\x28\x62\x8e\x34\x94\x59\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xeb\xdc" + "\x16\xf9\x85\xb8\x86\xf4\x6b\xd8\x5c\xa1\x39\x94\xf3\x88\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xeb\xdc\x16\xf9" + "\x85\xb8\x86\xf4\x6b\xd8\x5c\xa1\x39\x94\xf3\x88", + "\xe4\xfb\x94\x5d\x6a\x2d\x0b\x94\x78\x34\x31\x7c\xc4\x15\xf0\x24", 1, + 32, 640 }, + { 96, 256, 128, 271, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x8c\x61\x65\xf4\x45\x44\x35\x88\x04\x1b\x6e\x04\x4f\xb6\xba\xae\x72" + "\x8a\x85\x43\x14\x30\xea\xda\x56\xa2\xc5\xdc\x94\x4b\x6a\xa6\x88\x1a" + "\x54\xc0\x95\x16\xa1\xf1\xca\xe7\xb9\xdd\x71\x13\x0e\xe1\x68\xe0\x94" + "\x3e\x30\xf9\x1b\xa4\x1b\x43\x62\xfa\x9e\xd6\x03\x7b\x67\x3b\xa9\x31" + "\x83\x0f\x02\x3f\x71\x15\x08\x38\x22\xff\x06\xc9", + "\x6c\x40\x01\x95\x72\x09\x8d\xa3\xd0\x5a\x73\xfa\xdd\x2e\x50\xeb\xcf" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb9\x15" + "\x1b\x69\xc8\x07\x92\x5f\xe6\x10\x7e\xc4\xce\x8d\xaf\x0a\xcf\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb9\x15\x1b\x69" + "\xc8\x07\x92\x5f\xe6\x10\x7e\xc4\xce\x8d\xaf\x0a", + "\xc0\x42\x48\x63\xa2\x0e\x5f\xa0\x4c\xcd\x97\x84\xc0\x15\xf0\x34", 1, + 32, 640 }, + { 96, 256, 128, 272, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x18\xe3\x61\x74\x54\x5f\xa7\xec\x9e\xa9\xf0\x5d\x70\x57\xc5\xca\x63" + "\x8a\x85\x43\x14\x30\xea\xda\x56\xa2\xc5\xdc\x94\x4b\x6a\xa6\x43\x4e" + "\x1c\x5e\x71\x00\x5b\x69\x0c\xa5\xcb\x8d\x58\x0b\x89\xed\x79\xe0\x94" + "\x3e\x30\xf9\x1b\xa4\x1b\x43\x62\xfa\x9e\xd6\x03\x7b\xac\x6f\xe1\xaf" + "\x67\x19\xf8\xa7\xb7\x57\x7a\x68\x0b\xe7\x81\xc5", + "\xf8\xc2\x05\x15\x63\x12\x1f\xc7\x4a\xe8\xed\xa3\xe2\xcf\x2f\x8f\xde" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x72\x41" + "\x53\xf7\x2c\x11\x68\xc7\x20\x52\x0c\x94\xe7\x95\x28\x06\xde\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x72\x41\x53\xf7" + "\x2c\x11\x68\xc7\x20\x52\x0c\x94\xe7\x95\x28\x06", + "\xaa\x72\x93\xff\xe5\xdb\x30\xa3\x1f\x25\x81\xe0\xe7\xae\x56\xed", 1, + 32, 640 }, + { 96, 256, 128, 273, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x12\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x54" + "\x30\x5d\xff\x6b\x61\xc4\x0b\x77\x5c\x35\x2d\x02\x5c\x1a\x56\xd7\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\xbc\xe5\x74" + "\xe9\xe1\x1a\xfe\xdb\xdc\xa0\x21\xe5\x3b\xb9\x18\x83\x38\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x06\x5c\xea\x26\xf1" + "\xff\x99\x81\x05\xca\x4f\xe3\x4c\xe5\x99\xd6", + "\xf2\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xe9" + "\x45\x27\x43\x80\xae\xd1\x2e\xde\x01\x0f\x0e\x69\xe8\x8f\x0f\xe6\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1b\xfa\x1f" + "\x28\x2e\x1c\x1a\x80\x38\x1c\xbc\xe0\x5a\x90\xe4\x07\xe6\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1b\xfa\x1f\x28\x2e" + "\x1c\x1a\x80\x38\x1c\xbc\xe0\x5a\x90\xe4\x07", + "\x42\xe5\xd4\x3d\x1e\x80\x8e\x79\xf0\x17\x14\x4d\x44\x98\xc2\x35", 1, + 32, 768 }, + { 96, 256, 128, 274, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xdf" + "\x05\x99\x19\x4b\x0c\xe8\x90\xcc\x1d\x8e\xb3\x83\xb5\x7f\x38\xdc\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x35\xdf\x81" + "\x07\x7d\x06\x80\x77\xce\x80\x5e\xa5\x92\xf6\xf8\x88\x33\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x8f\x66\x1f\xc8\x6d" + "\xe3\xe7\x2d\x17\xea\x30\xa3\xe5\xaa\x79\xdd", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x62" + "\x70\xe3\xa5\xa0\xc3\xfd\xb5\x65\x40\xb4\x90\xe8\x01\xea\x61\xed\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x92\xc0\xea" + "\xc6\xb2\x00\x64\x2c\x2a\x3c\xc3\xa0\xf3\xdf\x04\x0c\xed\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x92\xc0\xea\xc6\xb2" + "\x00\x64\x2c\x2a\x3c\xc3\xa0\xf3\xdf\x04\x0c", + "\x6c\xf2\xf9\x23\x0a\xf8\x67\x9e\x7e\xcb\x19\x42\x13\x62\xfc\xe3", 1, + 32, 768 }, + { 96, 256, 128, 275, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x39\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x40" + "\x92\xe1\xf9\xa2\x2c\x8b\x18\x18\x4d\x80\x5c\x12\x8a\xde\x57\xc7\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x64\xfe\x8b" + "\x9b\xdd\x21\x5a\x62\x09\x73\xaf\xfe\xfe\x93\x39\x85\x28\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xde\x47\x15\x54\xcd" + "\xc4\x3d\x38\xd0\x19\xc1\xf8\x89\xcf\xb8\xd0", + "\xd9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfd" + "\xe7\x9b\x45\x49\xe3\x9e\x3d\xb1\x10\xba\x7f\x79\x3e\x4b\x0e\xf6\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\xe1\xe0" + "\x5a\x12\x27\xbe\x39\xed\xcf\x32\xfb\x9f\xba\xc5\x01\xf6\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\xe1\xe0\x5a\x12" + "\x27\xbe\x39\xed\xcf\x32\xfb\x9f\xba\xc5\x01", + "\x6d\x46\xd2\x23\x0a\x98\x48\xd5\x18\xf9\xd9\x4b\xb2\xc4\x9c\xaa", 1, + 32, 768 }, + { 96, 256, 128, 276, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x12\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x32" + "\x7f\x3a\x1b\xef\xb4\x28\x7c\x17\x45\x03\x91\xed\x0e\xb8\x54\xd6\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x14\x60\xd3" + "\x54\x5c\x29\xdd\xc7\x90\x71\x1b\x8e\x75\x33\x69\x85\x39\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xae\xd9\x4d\x9b\x4c" + "\xcc\xba\x9d\x49\x1b\x75\x88\x02\x6f\xe8\xd0", + "\xf2\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x8f" + "\x0a\x40\xa7\x04\x7b\x3d\x59\xbe\x18\x39\xb2\x86\xba\x2d\x0d\xe7\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb3\x7f\xb8" + "\x95\x93\x2f\x39\x9c\x74\xcd\x86\x8b\x14\x1a\x95\x01\xe7\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xb3\x7f\xb8\x95\x93" + "\x2f\x39\x9c\x74\xcd\x86\x8b\x14\x1a\x95\x01", + "\x74\xdd\xa1\x2e\x05\x58\x87\x7b\xc0\xe4\x0c\x3e\xac\xe0\xaf\x29", 1, + 32, 768 }, + { 96, 256, 128, 277, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1b\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x85" + "\xb6\x76\x64\xee\x49\xfa\x34\x7f\xbf\xd2\xdd\x92\x00\x7c\x57\xde\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\xfb\x27\xee" + "\x07\x5b\x3c\x0f\x0f\x68\x2b\xab\xdd\xe6\x3d\xad\x87\x31\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x41\x9e\x70\xc8\x4b" + "\xd9\x68\x55\xb1\x41\xc5\xdb\x91\x61\x2c\xd2", + "\xfb\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x38" + "\xc3\x0c\xd8\x05\x86\xef\x11\xd6\xe2\xe8\xfe\xf9\xb4\xe9\x0e\xef\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5c\x38\x85" + "\xc6\x94\x3a\xeb\x54\x8c\x97\x36\xd8\x87\x14\x51\x03\xef\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x5c\x38\x85\xc6\x94" + "\x3a\xeb\x54\x8c\x97\x36\xd8\x87\x14\x51\x03", + "\x50\x24\x55\x34\x3d\x39\xdb\x87\x94\x7d\x73\x46\xa8\xe0\xaf\x39", 1, + 32, 768 }, + { 96, 256, 128, 278, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x36\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x11" + "\x32\x81\x1b\x2f\x18\x32\x1b\xa9\x9b\x12\x43\x2c\x7f\x86\x5a\xa3\x35" + "\x2c\xd2\xd7\xac\x70\xb4\xc6\xf5\x41\x97\x67\x92\x6e\x20\x35\x25\x08" + "\xba\x45\xbb\xa7\x41\x0e\xbe\x1b\x8b\xb9\x25\x33\x4f", + "\xd6\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xac" + "\x47\xfb\xa7\xc4\xd7\x27\x3e\x00\xc6\x28\x60\x47\xcb\x13\x03\x92\x3a" + "\x63\x7b\x8a\xbd\x43\x1a\xea\x02\x86\x8e\xd8\x0c\xcf\xcb\x92\x3a\x63" + "\x7b\x8a\xbd\x43\x1a\xea\x02\x86\x8e\xd8\x0c\xcf\xcb", + "\x14\xfb\xa1\x49\xd1\xc0\xed\xc8\xaa\x66\x58\x51\x12\x6b\x5a\xfd", 1, + 32, 512 }, + { 96, 256, 128, 279, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xf9" + "\x99\x46\x10\x58\xf6\xd7\x73\x3e\x5c\xd0\xd1\x63\x9d\x90\x25\xcb\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x52\x0a\x0d" + "\xa5\x04\x39\xdb\x00\xe2\x89\xe1\x79\x13\x42\x06\x8e\x24\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xe8\xb3\x93\x6a\x14" + "\xdc\xbc\x5a\x3b\xe3\x8f\x7f\x64\x1e\x87\xdb", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x44" + "\xec\x3c\xac\xb3\x39\xc2\x56\x97\x01\xea\xf2\x08\x29\x05\x7c\xfa\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf5\x15\x66" + "\x64\xcb\x3f\x3f\x5b\x06\x35\x7c\x7c\x72\x6b\xfa\x0a\xfa\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xf5\x15\x66\x64\xcb" + "\x3f\x3f\x5b\x06\x35\x7c\x7c\x72\x6b\xfa\x0a", + "\xbf\x7f\xbd\x42\x2c\xbf\x0e\x70\x0f\xd1\x60\x5b\xe8\xfd\x21\x2f", 1, + 32, 768 }, + { 96, 256, 128, 280, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x15\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xcc" + "\x16\x29\xa4\x0c\xd1\x1e\xaf\xdf\x04\x13\x8b\x45\xaf\xe4\x58\xef\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x34\x0a\xc9" + "\xb4\x5a\x58\x96\xa4\x18\xa8\xce\xe8\x03\x2e\x07\x8f\x00\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x8e\xb3\x57\x7b\x4a" + "\xbd\xf1\xfe\xc1\xc2\xa0\xee\x74\x72\x86\xda", + "\xf5\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x71" + "\x63\x53\x18\xe7\x1e\x0b\x8a\x76\x59\x29\xa8\x2e\x1b\x71\x01\xde\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x93\x15\xa2" + "\x75\x95\x5e\x72\xff\xfc\x14\x53\xed\x62\x07\xfb\x0b\xde\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x93\x15\xa2\x75\x95" + "\x5e\x72\xff\xfc\x14\x53\xed\x62\x07\xfb\x0b", + "\xc6\xf2\x32\x04\x86\x5b\x0a\xdd\xe0\x07\x00\x37\xd6\x53\x8d\xd3", 1, + 32, 768 }, + { 96, 256, 128, 281, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x31\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xff" + "\x74\x6e\xf5\x3e\xc3\x35\x7c\xbc\x3c\x3c\xe4\xab\x1d\x2d\x51\xed\x9e" + "\xb4\x56\xdc\x9d\x9b\x59\xf6\x56\xa5\xd2\xd9\x74\xd2\x6a\x7b\x8e\x90" + "\x3e\x4e\x8a\x4c\xac\x3e\x1d\xff\xce\x07\xc3\x8f\x05", + "\xd1\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x42" + "\x01\x14\x49\xd5\x0c\x20\x59\x15\x61\x06\xc7\xc0\xa9\xb8\x08\xdc\x91" + "\xfb\xff\x81\x8c\xa8\xf7\xda\xa1\x62\xcb\x66\xea\x73\x81\xdc\x91\xfb" + "\xff\x81\x8c\xa8\xf7\xda\xa1\x62\xcb\x66\xea\x73\x81", + "\x8c\xff\x61\xb7\xb3\x91\x9e\xd6\xbd\xe7\x2b\x36\xe0\xd3\x13\x26", 1, + 32, 512 }, + { 96, 256, 128, 282, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x19\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xbf" + "\x28\x6f\xd9\x79\x80\x79\x51\xb1\x83\xa1\x88\x93\x0a\xd1\x5e\xce\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x64\x41\x3d" + "\x71\x93\x9b\x9c\xb0\xa4\xd3\x2e\xf1\x15\xda\x9e\x10\x21\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xde\xf8\xa3\xbe\x83" + "\x7e\xfb\xea\x7d\xb9\x40\xf7\x62\x86\x1f\x45", + "\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x02" + "\x5d\x15\x65\x92\x4f\x6c\x74\x18\xde\x9b\xab\xf8\xbe\x44\x07\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\x5e\x56" + "\xb0\x5c\x9d\x78\xeb\x40\x6f\xb3\xf4\x74\xf3\x62\x94\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc3\x5e\x56\xb0\x5c" + "\x9d\x78\xeb\x40\x6f\xb3\xf4\x74\xf3\x62\x94", + "\x36\x9c\xf1\x70\x11\xca\xe4\x75\x39\xe2\x72\x3f\x01\x0c\xf9\x80", 1, + 32, 768 }, + { 96, 256, 128, 283, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x19\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xbd" + "\x28\x6f\xd9\x79\x80\x79\x51\xb1\x83\xa1\x88\x93\x0a\xd1\x5e\xe3\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\xf2\x5e\x78" + "\xfe\x1b\x53\xae\x41\x6d\x1f\xbc\x69\x85\x22\x61\x8f\x0c\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x48\xe7\xe6\x31\x0b" + "\xb6\xc9\x1b\xb4\x75\xd2\x6f\xf2\x7e\xe0\xda", + "\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00" + "\x5d\x15\x65\x92\x4f\x6c\x74\x18\xde\x9b\xab\xf8\xbe\x44\x07\xd2\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x55\x41\x13" + "\x3f\xd4\x55\x4a\x1a\x89\xa3\x21\x6c\xe4\x0b\x9d\x0b\xd2\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x55\x41\x13\x3f\xd4" + "\x55\x4a\x1a\x89\xa3\x21\x6c\xe4\x0b\x9d\x0b", + "\x53\x2e\xb8\xe2\x72\xa8\xd1\x71\x37\x8b\x0d\x42\xdf\xf2\xbe\xd9", 1, + 32, 768 }, + { 96, 256, 128, 284, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x32\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x25" + "\x8d\x5d\x3e\x44\x16\x83\xf5\x46\xbe\xba\x2e\x23\x75\x5f\x5c\xce\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x9d\x13\xfd" + "\xf8\xfa\x89\x98\x36\xfa\x5c\x41\x0d\x4c\xcd\x25\xea\x21\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x27\xaa\x63\x37\xea" + "\x6c\xff\x6c\x23\x36\x2f\x0b\x3b\x91\xa4\xbf", + "\xd2\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x98" + "\xf8\x27\x82\xaf\xd9\x96\xd0\xef\xe3\x80\x0d\x48\xc1\xca\x05\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x0c\x96" + "\x39\x35\x8f\x7c\x6d\x1e\xe0\xdc\x08\x2d\xe4\xd9\x6e\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x3a\x0c\x96\x39\x35" + "\x8f\x7c\x6d\x1e\xe0\xdc\x08\x2d\xe4\xd9\x6e", + "\xd1\xbe\x74\x26\xcd\x12\x44\x6f\xe5\x2e\x8d\x45\x33\x1e\x08\x35", 1, + 32, 768 }, + { 96, 256, 128, 285, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xd6" + "\x4a\xdd\x2a\xa3\xc5\xa3\x0a\x31\xd9\xe6\x5e\x90\xf9\x3a\xd1\xcb\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\xde\x9a\xea" + "\xb8\x61\x44\xd5\x46\x48\x11\xb2\x37\x3b\xa4\xcc\x83\x24\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x64\x23\x74\x77\x71" + "\xa1\xb2\x1c\x91\x7b\xdc\x31\x4c\xf8\x4d\xd6", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x6b" + "\x3f\xa7\x96\x48\x0a\xb6\x2f\x98\x84\xdc\x7d\xfb\x4d\xaf\x88\xfa\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x79\x85\x81" + "\x79\xae\x42\x31\x1d\xac\xad\x2f\x32\x5a\x8d\x30\x07\xfa\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x79\x85\x81\x79\xae" + "\x42\x31\x1d\xac\xad\x2f\x32\x5a\x8d\x30\x07", + "\x62\x63\x0c\x18\xde\x8c\x10\x87\x6a\xdb\x9f\x30\xf3\x00\x96\x3f", 1, + 32, 768 }, + { 96, 256, 128, 286, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xcc" + "\x34\x92\x27\x2b\x8a\x4b\x11\x2a\x4e\x7d\x7c\xcf\x09\x26\x92\xce\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x30\xce\x67" + "\x8e\x93\x75\xb2\xaf\x0b\x82\xc2\xd2\xfb\xd7\x92\x8c\x21\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x8a\x77\xf9\x41\x83" + "\x90\xd5\xf5\xd2\xe8\xac\xd4\x8c\x8b\x13\xd9", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x71" + "\x41\xe8\x9b\xc0\x45\x5e\x34\x83\x13\x47\x5f\xa4\xbd\xb3\xcb\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x97\xd1\x0c" + "\x4f\x5c\x73\x56\xf4\xef\x3e\x5f\xd7\x9a\xfe\x6e\x08\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x97\xd1\x0c\x4f\x5c" + "\x73\x56\xf4\xef\x3e\x5f\xd7\x9a\xfe\x6e\x08", + "\xfe\xb6\x41\x2b\x90\x31\xf0\x76\xed\xdc\xd9\x42\x6f\xff\x5b\x31", 1, + 32, 768 }, + { 96, 256, 128, 287, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x34\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x72" + "\x2b\x65\x49\xc9\xdf\x0f\x4b\x04\xb5\xf7\x43\x22\x03\xfa\x54\xce\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x87\xde\x18" + "\x6c\xd2\x8e\x43\x54\x4c\x73\xde\x62\x8f\xd1\xd6\x0e\x21\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x3d\x67\x86\xa3\xc2" + "\x6b\x24\x0e\x95\x19\xb0\x64\xf8\x8d\x57\x5b", + "\xd4\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xcf" + "\x5e\x1f\xf5\x22\x10\x1a\x6e\xad\xe8\xcd\x60\x49\xb7\x6f\x0d\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x20\xc1\x73" + "\xad\x1d\x88\xa7\x0f\xa8\xcf\x43\x67\xee\xf8\x2a\x8a\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x20\xc1\x73\xad\x1d" + "\x88\xa7\x0f\xa8\xcf\x43\x67\xee\xf8\x2a\x8a", + "\xda\xfd\xf4\x30\xc8\x12\x44\x83\xc1\x75\x40\x4b\x6b\xff\x5b\x41", 1, + 32, 768 }, + { 96, 256, 128, 288, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x3d\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xc5" + "\x62\x96\x99\xcf\xd4\xd9\x03\x6c\xef\x47\x8e\xd7\x05\xbe\x56\x50\xf5" + "\x75\x88\x2c\x38\x00\xf7\x57\xea\x6e\x0f\x8c\x6d\x47\xac\xc6\xe5\x51" + "\xe0\xbe\x2f\xd7\x02\x9f\xa1\x34\x13\x52\xda\x1a\xc3", + "\xdd\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x78" + "\x17\xec\x25\x24\x1b\xcc\x26\xc5\xb2\x7d\xad\xbc\xb1\x2b\x0f\x61\xfa" + "\x3a\x21\x71\x29\x33\x59\x7b\x1d\xa9\x16\x33\xf3\xe6\x47\x61\xfa\x3a" + "\x21\x71\x29\x33\x59\x7b\x1d\xa9\x16\x33\xf3\xe6\x47", + "\xf8\x80\x0c\x5b\x62\x83\xdd\xdf\xc4\x1f\x93\x5c\x01\xbd\x0d\x24", 1, + 32, 512 }, + { 96, 256, 128, 289, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x66" + "\xd6\x24\xf2\x88\xf5\x29\x41\xca\x24\x86\x5c\xe9\x6f\x0d\x97\x36\xff" + "\x33\xa2\x7c\x23\xf4\x97\x6f\xc7\x4f\x1f\xcd\x82\xf5\xcc\xa0\xef\x17" + "\xca\xee\x34\x23\x62\xa7\x8c\x15\x03\x13\x35\xa8\xa3", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xdb" + "\xa3\x5e\x4e\x63\x3a\x3c\x64\x63\x79\xbc\x7f\x82\xdb\x98\xce\x07\xf0" + "\x7c\x0b\x21\x32\xc7\x39\x43\x30\x88\x06\x72\x1c\x54\x27\x07\xf0\x7c" + "\x0b\x21\x32\xc7\x39\x43\x30\x88\x06\x72\x1c\x54\x27", + "\x38\xbf\xb8\x31\x8c\x62\x7d\x86\xc3\x4b\xab\x1f\x1e\xbd\x0d\xb0", 1, + 32, 512 }, + { 96, 256, 128, 290, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\xf4\xeb\xbe\x3f\xca\x96\xbc\x48\x85\xb3\x55\x82\xc4\x3e\x0e\xb3\x58" + "\x8a\x85\x43\x14\x30\xea\xda\x56\xa2\xc5\xdc\x94\x4b\x6a\xa6\xb4\x57" + "\x0e\x84\x46\xe8\x86\xbc\xbf\xf8\x2a\x24\xf4\x9b\xe5\xed\x42\xe0\x94" + "\x3e\x30\xf9\x1b\xa4\x1b\x43\x62\xfa\x9e\xd6\x03\x7b\x5b\x76\xf3\x75" + "\x50\xf1\x25\x72\x04\x0a\x9b\xc1\xa7\x77\xed\xc5", + "\x14\xca\xda\x5e\xfd\xdb\x04\x63\x51\xf2\x48\x7c\x56\xa6\xe4\xf6\xe5" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x85\x58" + "\x41\x2d\x1b\xf9\xb5\x12\x93\x0f\xed\x3d\x4b\x05\x44\x06\xe5\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x85\x58\x41\x2d" + "\x1b\xf9\xb5\x12\x93\x0f\xed\x3d\x4b\x05\x44\x06", + "\xaf\x72\x93\xeb\x09\x95\x7d\x9d\xe7\x43\x2d\xd4\x13\x16\xf0\xe4", 1, + 32, 640 }, + { 96, 256, 128, 291, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1a\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x57" + "\x1a\x3f\xca\x3c\xda\x7d\xef\x4c\x93\xd4\xa3\x82\xca\x3a\x57\xea\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\x76\xcd\xdb" + "\xee\x2f\x18\x57\x76\x17\x4f\x6d\xf3\xbb\xe5\xb3\x81\x05\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\xcc\x74\x45\x21\x3f" + "\xfd\x30\x2c\xce\x25\x03\xf5\xcc\xb9\x32\xd4", + "\xfa\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xea" + "\x6f\x45\x76\xd7\x15\x68\xca\xe5\xce\xee\x80\xe9\x7e\xaf\x0e\xdb\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd1\xd2\xb0" + "\x2f\xe0\x1e\xb3\x2d\xf3\xf3\xf0\xf6\xda\xcc\x4f\x05\xdb\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xd1\xd2\xb0\x2f\xe0" + "\x1e\xb3\x2d\xf3\xf3\xf0\xf6\xda\xcc\x4f\x05", + "\xe1\x78\xb0\xd5\xeb\x9b\xc5\x51\xfa\x64\x5c\x49\xf9\xf1\x76\x67", 1, + 32, 768 }, + { 96, 256, 128, 292, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x1f\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\xbe" + "\x31\xa5\x01\x53\x6a\x7c\x91\xe4\xa1\x02\xcc\x27\xcd\xfe\x09\xd2\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\xdd\x94\x16" + "\xa1\x2e\x2f\x81\xbd\xee\x02\x3d\x46\x2f\xee\xf7\x83\x3d\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x67\x2d\x88\x6e\x3e" + "\xca\xe6\xe7\x37\x68\x53\x40\x58\xb2\x76\xd6", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x03" + "\x44\xdf\xbd\xb8\xa5\x69\xb4\x4d\xfc\x38\xef\x4c\x79\x6b\x50\xe3\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7a\x8b\x7d" + "\x60\xe1\x29\x65\xe6\x0a\xbe\xa0\x43\x4e\xc7\x0b\x07\xe3\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x7a\x8b\x7d\x60\xe1" + "\x29\x65\xe6\x0a\xbe\xa0\x43\x4e\xc7\x0b\x07", + "\xbd\xbf\x63\xdb\x23\x7d\x19\x5e\xce\xfd\xc2\x51\xf5\xf1\x76\x77", 1, + 32, 768 }, + { 96, 256, 128, 293, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x03\xe7\x6f\x6f", + "\xff\xff\xff\xff", + "\x3e\xde\x9b\x9e\xc8\xb2\x47\xd4\x2b\xbe\xe2\x01\x6d\x67\x15\xba\x85" + "\x67\xa7\xfd\xe8\x12\xa3\xaa\x2f\x55\x2a\x33\xc1\x71\x8c\x58\xe2\xf0" + "\xb0\x56\xa2\xee\xcc\x51\xd3\x08\x38\xe6\x40\x61\x5e\x14\xbb\x87\x29" + "\xfd\x14\x8f\x23\xb2\xa9\x16\xb7\xf4\x0f\x2f\x29\x81\x0d\xd1\x4d\xa7" + "\xb4\xf7\x6f\x9f\x68\xfa\x89\x03\x13\x8d\x56\x3c\x01\x3e\xb7\x32\x04" + "\x6a\x44\xe8\x70\x7c\xd9\xf2\x78\x73\xa8\xd4", + "\xde\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x38" + "\x12\xdd\x41\x03\xdd\xb6\x8f\x86\x08\x10\x10\xaa\xc5\x19\x01\xd3\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1c\x98\x42" + "\x3c\xdb\x89\xc7\xe9\x4d\xaa\x2a\xf1\x6e\x06\xd5\x05\xd3\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x1c\x98\x42\x3c\xdb" + "\x89\xc7\xe9\x4d\xaa\x2a\xf1\x6e\x06\xd5\x05", + "\xb4\xcc\xb4\x22\xbc\x5f\x72\x64\xaf\xf7\x3f\x36\x75\xff\x5b\x19", 1, + 32, 768 }, + { 0, 256, 128, 294, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", "", "", "", "", 0, 0, 0 }, + { 64, 256, 128, 295, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07", "", "", "", "", 0, 0, 0 }, + { 88, 256, 128, 296, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a", "", "", "", "", 0, 0, + 0 }, + { 104, 256, 128, 297, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c", "", "", "", + "", 0, 0, 0 }, + { 112, 256, 128, 298, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d", "", "", + "", "", 0, 0, 0 }, + { 128, 256, 128, 299, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "", "", "", "", 0, 0, 0 }, + { 160, 256, 128, 300, + "\x20\x21\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30" + "\x31\x32\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "", "", "", 0, 0, 0 }, + { 0, 0, 0, 0, NULL, NULL, NULL, NULL, NULL, NULL, 0, 0, 0 } +}; diff --git a/test/wycheproof/gmac_test.json.c b/test/wycheproof/gmac_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..8730d33a6799f51f88ae850e5dc1e56887e95477 --- /dev/null +++ b/test/wycheproof/gmac_test.json.c @@ -0,0 +1,3263 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* AES-GMAC, 0.8r12 */ +#include "mac_test.h" +const struct mac_test gmac_test_json[] = { + { 128, 128, 1, + "\x98\xb0\x8a\x72\xff\xde\x0d\xed\x4b\xec\x9d\x2a\x8d\xb5\x72\x35", + "", + "\x51\x18\xcc\x71\x50\x1c\x82\x73\xa4\x36\x62\xb9\x81\x19\x17\x50", 1, + 0, "\x15\x95\x24\x87\x35\x31\x0e\xb7\x10\x51\x9c\x2b", 96 }, + { 128, 128, 2, + "\xf0\xcf\xce\x28\x06\x56\xfa\xbd\x93\xf6\x8b\xa6\xb3\xa3\xad\x6e", + "\x4b", + "\x86\x77\xa0\x16\x0a\x92\x3c\xe7\x43\x7c\xa9\x4b\x8d\xe9\x7d\xa5", 1, + 8, "\x0a\x38\xca\x62\x6b\x43\x0e\xd8\x4a\x2a\x8d\xfe", 96 }, + { 128, 128, 3, + "\xfd\x3c\x53\x81\xf5\x88\xbf\xe3\x32\x70\xe3\x36\xa5\xb0\x28\x96", + "\x02\x6f", + "\xeb\xc6\x96\x93\x10\x51\x0a\x2e\xb8\xac\xb9\xec\x3d\x63\x1f\x29", 1, + 16, "\x02\xd9\x16\x63\x1f\xba\xcf\x27\xc2\x74\xb7\x4c", 96 }, + { 128, 128, 4, + "\x53\x10\x7d\x29\xdc\x58\x4d\x32\xd3\xef\x32\x1a\x85\xee\xc2\x57", + "\x03\x9e\x0f\x5b", + "\x8c\x99\xcb\xf2\x8c\x43\xf9\x0f\xc3\x60\x91\x26\xbd\xf3\x0f\x4b", 1, + 32, "\xdd\x91\xa3\x3d\xce\x80\x9e\x7b\xfe\x68\xd8\x43", 96 }, + { 128, 128, 5, + "\x8e\x2c\x0c\xc6\x24\x72\x8a\xf2\x19\x5d\x46\x77\x55\x02\xec\x13", + "\xf7\xef\xb0\x72\x10\x0c\xc6", + "\x2b\x4e\x06\xa5\x58\xa4\x05\x42\xc0\xfd\xc7\x14\x96\x78\xc8\xb8", 1, + 56, "\xe8\xd4\x3d\x56\x46\xbb\x7d\xde\x8a\x1e\x93\x74", 96 }, + { 128, 128, 6, + "\xf5\x43\xfb\x6a\x66\x73\xe9\xdf\x99\x8a\x30\x58\x08\x4d\xcc\xc1", + "\x80\x63\x71\x9e\x25\xc0\xbc\x9a", + "\xd3\x2c\x12\xb4\xb7\x84\x5a\xda\xca\xda\xbe\x23\x87\xe2\x43\xd4", 1, + 64, "\x0e\x78\xb2\xa2\x25\x0b\x5d\xd1\xda\xeb\xcb\x7b", 96 }, + { 128, 128, 7, + "\xfe\x3f\x26\x6f\x5f\x23\x66\x69\x58\x2b\xd8\x06\x18\x76\x05\x43", + "\xdc\x25\xf0\x4a\xcf\xbd\xbc\x98\x6a\x10\x70\x43\x28\x6e\xae", + "\x60\x39\x2a\xd4\x1d\xeb\xec\x0d\xb4\x3d\x97\xdf\xb3\x79\x8c\x91", 1, + 120, "\x01\x43\x13\x72\x3d\x18\x60\xbf\x8f\x4b\x11\xfd", 96 }, + { 128, 128, 8, + "\x1d\x4a\x92\x39\x4c\x73\x56\xa6\xf5\x28\xcf\xb3\xad\xb8\xf2\x53", + "\x19\xb6\x9d\x41\x78\xd4\x9c\x20\xb4\xee\x58\x46\xcb\x44\x0c\x99", + "\x37\x90\xb3\x2d\x5c\xda\x9f\x8e\xf7\x2b\x64\x3d\x70\x10\x77\xd3", 1, + 128, "\x02\x19\x4e\xce\x3b\xc5\x0c\x51\xc5\x2b\xdd\x83", 96 }, + { 128, 128, 9, + "\xc9\x85\x9c\x8b\x39\xd8\xd7\xe8\x11\xd8\xae\x45\xd9\xb8\x58\x82", + "\x36\x71\x70\x76\x74\x8d\x93\x51\xe5\xa3\xcc\x67\xb9\xe8\x83\x3e" + "\x07", + "\xf0\xb2\x6b\x17\xb2\x8e\xa9\x27\x08\xb1\x2b\x87\x1f\x3b\x30\x71", 1, + 136, "\x11\x0d\x55\x01\x19\xd7\x1a\x09\x45\xbf\xec\xbb", 96 }, + { 128, 128, 10, + "\x8b\x4f\xd3\xd3\x05\x57\x46\xcb\x2d\xcd\x9b\x08\xc2\x98\x38\x6f", + "\x83\xe5\xab\xbb\xfe\xd5\xee\xb3\x53\xb2\x5f\x36\xe4\xad\xc0\xf2\x90" + "\x60\xc5\x5c\x08\xae\x4f\x80", + "\x2c\x9c\x99\xe0\x71\xf1\xd2\x78\x3a\x47\xc7\xa8\x47\xa3\xb7\x6a", 1, + 192, "\x1a\xac\x5c\xca\x3d\x02\xd4\x0f\x57\xfa\xbe\xcf", 96 }, + { 128, 128, 11, + "\x46\xc3\x3f\xff\x88\x98\xc4\xa4\x98\x55\x99\xcc\xc0\x5d\x05\x71", + "\x03\x85\x37\xbf\x94\xe1\xd7\xe1\x4f\x68\xb8\xd5\x45\x82\x41\xe3\x4f" + "\x51\x58\xde\xa6\xf8\x05\x20\x49\xb9\xda\xd8\xfb\x66\xcb\x6e\x0b\xa6" + "\xcc\x22\x3f\x67\x56\x14\xe9\x5d\x15\x27\xc7\x46\xe6\x50\xe9\xfc\x6a" + "\xca\x69\xe6\x82\xd9\xe3\x0a\xc0\x6e\x0a\x48\xc0\xa0\x42\x8e\xc1\xae" + "\x23\x73\x9d\x82\xfc\x24\x6e\x4c\xd6\xbc\x27\xcd\x1d\x54\xc0\xe6\x30" + "\xc1\x62\x4f\xe3\xdb\xd0\xa8\xce\xa3\xb7\xc8\xf2\xd7\xc1\xcc\xa8\xb8" + "\xe0\x86\xca\xb0\x21\x53\xbe\x76\x2d\x59\xe4\x9c\x53\x3c\xb3\x9e\x65" + "\xab\x37\xf6\xca\xd2\x29\x0d\x0d\xfe\x2a", + "\xfe\xee\xc6\x27\x6f\x89\x39\x3b\x5a\x52\x22\xe0\xeb\xb1\x60\xfd", 1, + 1032, "\x01\xda\x1c\xa7\x2e\x26\xf6\xd6\xfa\x9c\xe2\x81", 96 }, + { 128, 128, 12, + "\xd0\x96\xbe\xd9\x70\xd1\x7a\x34\x00\xcd\x60\xce\xf5\x7b\x4e\x49", + "\x30\xf8\xa0\xee\xa9\x37\x56\x7b\xfe\xc3\xb1\x5d\x1c\x58\xa1\x70\xb4" + "\xf8\xaa\xe9\x49\x17\x8a\xd0\x4d\x8e\xa4\x6b\x6e\x62\x5b\x50\xdc\xe2" + "\x52\xdc\x38\x0e\x9e\xff\x53\xca\x20\x57\x22\x80\x39\xed\xd7\x8a\xe3" + "\xe2\x28\x19\xaf\x6e\x3b\xdc\x83\xb1\xf0\x73\x6a\xbb\x76\xeb\x40\x44" + "\xe4\x74\x34\x64\xfb\xc6\x1c\x62\xb0\x68\x39\xe3\x72\x28\x9a\xa8\x21" + "\x8e\xa0\xa6\xe4\x43\xa3\x7a\x65\xc9\x48\x85\x73\x80\x31\x8f\xe0\x7b" + "\xd4\x82\xdf\xba\xef\x86\x9c\xdf\x75\xfd\x2d\x95\x75\xf6\x04\x0f\x90" + "\xf2\x0b\xb4\x1b\x39\x0d\x4d\x1c\xda\xa3\xb7\xb6\xa5\xd9\xa9\x47\xbc" + "\x71\xf0\x6a\x74\xdb\x8f\x65\x13\x5f\x59\x82\xb7\x5b\x21\xa0\xdc\xc3" + "\x3e\xaf\x7c\x6b\x26\xda\x62\xbf\x0f\xe9\x07\x4f\x3f\x96\x1e\x73\x9f" + "\x22\x92\x19\x2e\x33\x09\x39\x7d\x19\xde\xd4\xd4\x4a\x6c\xe7\x48\xf3" + "\x53\x0c\x7b\xee\xc7\x6d\x65\xd4\x0c\x6b\xc8\xd2\x81\xf7\xc2\x3d\x56" + "\x37\x07\x10\xd8\x95\x2d\x7f\x68\xfc\xac\x5e\x8f\x53\xb5\x8d\x07\x14" + "\x23\x8a\x9c\x79\xc8\x36\xd9\xb7\x91\x54\x1e\xd5\x27\xa9\xd6\xef\x87" + "\xbc\xef\x11\x42\x65\x3e\x73\x4e\x66\x60\x1d\x2a\x8a\x37\xdd\x20\x07" + "\x16", + "\x01\x4c\x13\xda\x92\xa0\xe9\x93\x29\xa8\x8c\x9c\xc8\x4e\x08\xd4", 1, + 2048, "\x23\x18\x54\x32\x3b\xc9\x56\x13\xa1\x5d\xd7\xd5", 96 }, + { 128, 128, 13, + "\x6f\x8b\x47\x45\x2e\xf0\x25\x02\x85\x60\xf6\x16\x01\xb6\x4e\x46", + "\xf3\x6e\x40\x72\xc2\x04\x5b\x89\x95\xed\x8f\xb9\x90\x7b\xc9\x85\x60" + "\x6b\x01\xd1\x92\x0b\x09\xe3\x93\xe3\x79\x26\x37\x7f\x89\x47\x8e\x6e" + "\xd5\x53\x14\xc5\x4f\x29\xff\x0b\xd2\x5f\xf7\x5e\x99\xf1\xde\xa9\xeb" + "\x7a\xe0\x86\x05\x93\xe5\x96\x7c\x57\xa8\x35\x2f\x13\x5a\x33\xc0\x46" + "\x4b\x90\xdd\x6d\x5e\x8f\x9a\xef\xb9\x42\x83\x91\xce\xff\xf6\x19\xf2" + "\x93\x08\x4d\xc9\x59\x2b\x0b\x72\xad\x03\x4f\xad\x93\x09\x00\x18\x70" + "\xcc\x1f\x58\x97\xeb\x94\xb6\x8a\xd1\x85\x61\xc8\x3b\x7f\xb5\x55\x36" + "\xb4\xbe\x80\x4b\x4a\xe8\xe9\x80\x05\xc4\xf1\x18\x0b\xad\xd7\xe4\xd9" + "\x8a\x6e\x0f\x62\xa5\x90\xc1\xbf\xae\xbb\x2e\xf7\xca\x56\x96\x24\x5a" + "\x69\xb3\x48\x63\xf9\x95\x26\x96\xd3\x18\xb6\x9d\x14\xa1\xed\x7c\xe7" + "\x04\x64\x54\x2f\x3c\xfd\xa2\xe9\x3b\x22\x86\xab\x3d\x1a\x43\x54\x4a" + "\x24\xa4\x91\xde\x66\x04\x3c\x0e\x83\xc6\x99\x13\x8f\xa2\x94\xe6\xfd" + "\x29\x3d\x20\xb5\x96\xf9\x28\x4e\x75\x52\x85\xd3\x6e\x32\xd1\x1e\x3d" + "\x51\x83\x0f\x55\x40\x5a\x91\xcb\x13\xce\xc0\xef\x4e\xb3\x68\x3b\x0c" + "\x28\x8b\x72\xbc\xc0\x4e\x8e\x53\x59\xb1\xae\x44\xc3\x34\x0c\x64\x04" + "\x42\x6d\x9a\x3b\xa5\x05\x9c\xbb\xa0\x50\x6a\x3a\x69\x87\xa5\x9e\x9a" + "\x0a\x2e\xac\x96\xf9", + "\x53\x7a\x54\x05\xae\x5d\xfd\x3a\x83\xad\x6e\x9e\x4b\x4d\x0a\x1d", 1, + 2216, "\xc8\x99\x82\xa6\xa7\x87\xca\xa3\x8f\x16\x6e\xc7", 96 }, + { 128, 128, 14, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8c\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 15, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7c\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 16, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8f\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 17, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7f\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 18, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x0d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 19, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xfd\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 20, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf6\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 21, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x03\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 22, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\x6d\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 23, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x03\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 24, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb8\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 25, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb6\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 26, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xbb\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 27, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb5\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\x7a\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x12\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xac\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcd\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\x2d\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\x4c\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x3b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x41\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x02\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x53\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x52\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x49\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x51\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x4a\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\xd3\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\xc8\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xe9", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x05", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xea", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x06", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\xa8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x44", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\xfa\xad\x1b\x03\x8c\x53\xb3\x20\x68", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x92\xcc\x61\x52\x8e\x48\xcb\x02\x84", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8c\xf7\xd8\xed\xb9\x91\x65\xfa\xac\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7c\x02\x8e\x83\xb7\x27\xda\x92\xcd\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\x6d\xb9\x91\x65\x7a\xad\x1b\x03\x8c\x53\xb3\x20\xe8", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x03\xb7\x27\xda\x12\xcc\x61\x52\x8e\x48\xcb\x02\x04", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8d\xf7\xd8\xed\xb9\x91\x65\x7a\xad\x1b\x03\x8c\x53\xb3\x20\x68", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7d\x02\x8e\x83\xb7\x27\xda\x12\xcc\x61\x52\x8e\x48\xcb\x02\x84", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x72\x08\x27\x12\x46\x6e\x9a\x05\x52\xe4\xfc\x73\xac\x4c\xdf\x17", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x82\xfd\x71\x7c\x48\xd8\x25\x6d\x33\x9e\xad\x71\xb7\x34\xfd\xfb", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x0d\x77\x58\x6d\x39\x11\xe5\x7a\x2d\x9b\x83\x0c\xd3\x33\xa0\x68", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xfd\x82\x0e\x03\x37\xa7\x5a\x12\x4c\xe1\xd2\x0e\xc8\x4b\x82\x84", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x8c\xf6\xd9\xec\xb8\x90\x64\xfb\xac\x1a\x02\x8d\x52\xb2\x21\xe9", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x7c\x03\x8f\x82\xb6\x26\xdb\x93\xcd\x60\x53\x8f\x49\xca\x03\x05", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 128, 128, 68, + "\x82\x6b\xa4\x9b\x4a\xff\x2a\xdc\x6b\x22\xdd\x4a\x84\xf2\x99\x41", + "\x32\x72\x5b\xc6\x38\x59\x49\xb2\x98\x75\xf8\x39\xe7\x5c\x06\x71", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 128, "\xce\x88\xbe\x4f\x89\xc9\x46\x02\x7d\x7c\x54\x2a", 96 }, + { 128, 128, 69, + "\x82\x6b\xa4\x9b\x4a\xff\x2a\xdc\x6b\x22\xdd\x4a\x84\xf2\x99\x41", + "\x4a\x41\x61\x2e\xec\x3b\x21\x56\x4f\xf6\x23\x91\xeb\xae\x07\x6c", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 128, "\xce\x88\xbe\x4f\x89\xc9\x46\x02\x7d\x7c\x54\x2a", 96 }, + { 128, 128, 70, + "\x82\x00\x8d\xa5\xb6\x5a\x6e\x99\xa3\xe9\x78\xab\x5a\x98\xe9\xb0", + "", + "\x82\xf2\xab\xe9\xa8\x97\xcc\xda\x3d\x2c\xbd\x39\x57\x97\xc8\x3e", 1, + 0, "\x63\x8c\x5f\x29\xff\x29\x23\x33\xab\x0e\xb7\xcd\x66\x56\xde\x69", + 128 }, + { 128, 128, 71, + "\x7c\x3f\x22\x46\xf4\xa3\x26\xae\x60\x05\x4f\x41\x7c\x20\xe9\xc1", + "\x2d", + "\xa0\x79\xf6\x04\x8f\xed\x16\xf0\x04\x1f\xa0\x4d\x7d\x70\x92\x5e", 1, + 8, "\x16\xa0\xff\x55\x39\xe1\x0a\x86\xec\x54\x33\x76\x4d\xa7\x1b\x59", + 128 }, + { 128, 128, 72, + "\x0f\x62\x4e\x31\x8b\x51\xa9\xcc\x6c\x30\xcf\x4e\x45\xa2\x87\xdb", + "\x0e\x45", + "\xd0\x7a\x83\xbb\xd5\x44\xf0\xb0\x79\xee\xc6\x32\xf8\x09\x74\xc9", 1, + 16, + "\x0d\x4d\xf0\x21\x63\x3d\xce\xbb\x58\xf2\xa3\x89\xe1\x91\x1b\x66", + 128 }, + { 128, 128, 73, + "\x57\x43\x6a\xb1\x0e\xe8\x28\x55\x2e\x63\xe1\xbf\xbf\x83\x81\xdd", + "\x95\x8a\x56\x55", + "\x66\x5b\x87\xde\x5e\x3e\xec\x09\x8f\x9e\x6a\xd7\x40\x12\xf2\x36", 1, + 32, + "\x0c\x5d\x7c\x05\xd7\x21\xf4\x22\xda\x93\xd6\x01\xe9\xfe\xbe\xf0", + 128 }, + { 128, 128, 74, + "\x18\xd6\xdf\x42\x42\xd1\x00\xb5\x71\x21\x57\xc7\x72\xc8\xe1\x28", + "\xc9\xc7\xd4\x6f\xd7\x9e\x72", + "\x92\xe3\xc1\x24\xab\xdd\x0f\x9c\x10\xce\xbe\xa8\x4f\xd8\x5e\x0a", 1, + 56, + "\xee\xad\x99\xcd\xb2\x04\x42\x2d\xb1\xa0\x08\xfe\x6d\xea\x6a\x3a", + 128 }, + { 128, 128, 75, + "\xf9\x81\x9a\x0d\x58\x53\xfe\x80\x56\x45\x23\x1e\x43\x33\x62\xce", + "\x37\x4d\xd4\xb4\x66\xa5\x1b\xf5", + "\x56\xb0\x62\x73\x96\x08\xc6\x7a\xe6\xbd\x1a\xa6\x1e\x83\xbe\x2b", 1, + 64, + "\x23\x07\xa6\x4b\x29\xef\x75\xd4\x64\xc0\x51\x06\x1d\x06\x9b\xbe", + 128 }, + { 128, 128, 76, + "\x47\xff\x15\xa9\xa9\xf5\x65\xdf\x93\x25\x57\x74\xa1\x29\x6b\x11", + "\xf3\x5d\xf7\xb3\x38\x71\x7e\x4e\xc9\x97\x45\x48\xb9\x9f\x21", + "\x46\x08\xea\x9a\x7e\x40\xa7\x29\xd2\x62\x59\xa9\xc6\x2b\xc5\x86", 1, + 120, + "\xeb\xff\xd4\x1f\x0f\x5b\x3b\xdc\x64\x7d\xa8\x03\x6b\xe5\xe4\xdd", + 128 }, + { 128, 128, 77, + "\x75\x91\x85\xe9\x95\xfb\x6d\xee\xc8\x01\xba\x11\x9d\xb8\x7b\xcc", + "\xde\x69\xd4\x92\x6f\xbc\x7d\xaa\xc4\x1a\xdd\xdf\x69\x2f\xe1\x6b", + "\xd2\x12\xc3\xe2\x57\x83\x74\x81\xe9\x7b\x31\xe7\xa8\x99\xb1\x26", 1, + 128, + "\x80\x86\x24\x48\x2a\xd1\x9d\x2a\x8b\x76\xac\x96\xa3\x09\x05\x13", + 128 }, + { 128, 128, 78, + "\x77\x22\x74\xb7\x24\x0c\x3a\xda\x54\x68\xfa\x3f\x12\xbc\xe8\x1d", + "\xdf\x69\x01\x0d\xc9\x2f\xc4\xaf\x5e\xc1\x9d\xd1\x65\xb0\xd2\x6a" + "\x97", + "\xb6\x85\x46\x3b\xed\xda\xd5\x56\x68\xe4\x48\xb0\xc1\x3f\xb3\x2f", 1, + 136, + "\xd0\x7e\xfa\x93\xb8\x88\x8c\xd7\xf0\xd3\xfe\x53\xb3\x59\x2b\x62", + 128 }, + { 128, 128, 79, + "\xf2\xd0\xfd\x91\xee\xca\x7f\x55\xe5\xf7\xb5\xc5\x7f\x59\xc5\x77", + "\x7c\x51\x95\x7d\x97\x4a\x84\x78\x77\xe8\xb7\xf7\x16\x5d\xe4\x6e\xc0" + "\x50\xcb\x5c\xb0\x3c\xd5\x6f", + "\xa5\xbf\x64\x54\x74\x86\x54\xfb\xd9\x01\xca\x5c\x28\xc0\x09\xd6", 1, + 192, + "\x27\xbb\x91\x06\x32\x86\x93\x14\x97\x9d\xea\x4a\xa6\x80\x16\x13", + 128 }, + { 128, 128, 80, + "\x97\xca\xc4\x81\x2b\x0d\x1e\xbc\x7f\x26\xf2\xa7\xb8\x11\x81\x2b", + "\xa9\xd9\x97\xfa\x61\x44\x43\x93\xce\x45\xf9\xaa\xcb\x2c\x9d\xcc\x50" + "\x98\xae\xdc\xc4\xd5\x69\xcd\x92\xa6\x75\x6b\x4e\x53\x9b\x28\x37\xf7" + "\xec\x79\x8c\x05\x61\xf8\x93\xcf\xf8\x8b\x16\x0a\x68\xd2\xcd\xe2\xcb" + "\x09\x77\x89\x60\xf6\x68\xa9\x8e\xc3\xd2\x15\x20\xa3\x56\xb9\x12\x82" + "\x14\x28\x3f\xa7\xb1\x3d\xef\x37\xb9\x2c\xa3\x33\x60\xc6\xaf\x90\x6d" + "\x9f\x2d\x5b\x94\xd4\x00\x7b\xfa\xe0\xf9\x5f\xcb\xe6\x04\xe4\x22\x36" + "\x01\xf6\x4b\xae\xe1\x50\xfa\xaa\xfd\x7e\x9c\xbc\xdc\x67\xbb\x9d\xfd" + "\x8f\x70\x14\xd4\x91\xd9\xae\x5f\x67\x7a", + "\x03\x50\x77\x2a\x98\xaa\x49\xdf\x0a\x66\xc6\xd4\x6d\x4a\x60\x34", 1, + 1032, + "\xe1\x2b\x41\x5a\x79\x1b\x51\xdb\x9d\x35\xf7\x35\xac\x50\x78\xc6", + 128 }, + { 128, 128, 81, + "\xcb\xf3\x1f\xf2\x9c\x06\x80\x99\x32\x93\x50\x1c\x2e\x1f\xf3\x4c", + "\x7b\xfb\x18\x64\xf9\x2d\x4a\x92\x58\xf0\xac\x72\x1e\xb8\x74\xcf\xf7" + "\xd7\x34\x27\x23\x7c\xea\x9f\x4d\x0e\x28\x39\x84\x85\xc4\xed\xa7\x83" + "\x7c\x60\x44\xb1\x5d\x3d\x30\xe1\xea\x75\x07\x47\x0e\xdf\x0b\x46\x5a" + "\x4f\x48\xa8\x6d\xe4\x59\xc3\xbb\x32\x55\x3c\x51\x54\x3c\x7c\xc6\xbb" + "\x9a\x3c\x28\x40\x8f\x56\x5e\xcf\x9f\x16\xb3\x9a\xb0\xc7\x86\x73\xeb" + "\x2d\xda\x8c\x18\xc2\xfb\x96\x5c\x82\x5a\xc6\x11\x6e\xf3\xd7\x5f\x9e" + "\x7d\x8d\xd8\x7e\x89\xd3\x9e\x29\x4e\x63\x09\xe8\xe0\xbf\x33\x02\xd2" + "\x03\xf3\xe9\x40\x1e\x77\x18\x44\x45\x81\xff\x54\xfd\xa6\x66\x48\x91" + "\xbc\x3d\x7e\xc5\xa0\x97\x92\xf0\xc5\xeb\x7d\x2a\xb6\x0c\x68\x3b\xb1" + "\x8d\x19\x15\x9f\xe5\x6a\x40\x20\x7b\x8b\x1c\x1a\x0a\x47\xc6\x66\x4c" + "\x9d\xc6\x3a\x75\xc7\x14\xb6\xd3\x4b\x95\x75\x9d\x4c\x1c\xb6\x81\x13" + "\xfa\xab\x3f\x63\xcd\x0a\xf1\x00\xcf\x66\x02\x2c\xe3\x51\xf8\xc4\xf4" + "\x27\xee\xa9\x77\xca\x67\x34\x3c\x4d\xd2\xda\xf2\x36\x62\x30\x93\x3f" + "\x16\xa0\x76\xca\x65\x3a\xff\x91\x25\x7f\xc9\x48\x74\xbb\x56\x4b\xfd" + "\xf3\x9e\xa4\xce\x06\x1f\x56\x2b\xaf\xbf\x67\x40\xe2\x45\x3d\xef\xae" + "\x5f", + "\x40\x12\xf2\xc6\xf3\x12\x93\xc1\xd3\x90\x53\x5d\xfe\xbc\xa6\x18", 1, + 2048, + "\x85\x0a\xea\x39\xc6\x6d\x85\xc5\x9a\x4a\x65\xa0\x67\xe9\xd6\xfc", + 128 }, + { 128, 128, 82, + "\xde\x20\xc3\x9c\x12\x01\x1e\x61\xfb\x40\xc9\xd7\xdc\x1a\x26\xdb", + "\x2e\x11\xd7\x8f\x73\xc8\x30\xc1\xd3\xd3\xf7\x87\x47\x9b\xc3\x58\x48" + "\x91\xdc\xe8\xae\xa3\x7c\x72\xcf\x87\x6f\x9f\x37\xc9\x25\x5d\x28\xd3" + "\xad\x4e\xfb\xdf\xc9\x63\xf2\xc4\xec\x4d\xfa\xe4\xeb\x56\x8e\x1a\x3f" + "\xac\x31\x38\xcd\x2e\xfc\x28\x66\x91\x9e\x42\x52\xa5\xd5\xcf\x07\x0c" + "\xe4\x83\xd6\x09\x74\x2b\x65\x42\xa4\x49\xdc\xe4\x49\xa1\xf5\x18\x4b" + "\x92\x04\x2b\x65\x77\x91\x6c\x11\x50\xa8\xe2\x70\x56\xc1\xa3\x3a\x65" + "\xbd\xb2\x51\x69\x02\x74\x0a\xcd\xb0\x9f\x90\xce\xb6\x71\x3f\xb9\x92" + "\xfd\x81\x36\xee\xf0\xeb\xe9\xb5\xd7\xdf\x10\x2e\xef\x2b\x34\xa4\x4f" + "\x18\x4a\x9f\x8b\xdc\x92\x3c\x0c\x7d\x46\x3d\xbd\xd3\x23\xa5\x00\xdf" + "\xb5\xb7\xde\x8f\x43\xfc\xea\x5e\x7e\x73\x26\xd4\xee\x0d\x1c\xea\x46" + "\xc9\x39\x3e\x7a\x96\x0f\x84\xda\x95\xbb\x14\x10\xbc\xe1\x3c\xbb\x88" + "\x28\x0d\xd7\xd6\x46\xf9\x93\xf1\x7e\x89\xc1\x16\x74\x24\x27\x43\xfe" + "\x1a\x7a\xf4\x53\xde\x01\xbe\x2c\x64\x36\x06\xe4\x65\x1a\xc0\x59\x5a" + "\x65\x48\xba\x47\x4f\x25\x95\x1e\xfb\xda\x00\x95\x3e\x12\x22\x09\x3c" + "\x86\x3d\x0f\xb0\x08\x7e\xcc\xc0\x45\x7d\x51\xe6\x20\x30\x6b\xaf\xea" + "\x0a\xb3\xfb\x72\x82\x7e\xd5\x70\x51\xa8\xcf\x6f\x15\xe5\x4b\x2e\x84" + "\xb3\xec\x8a\x33\xbb", + "\x10\x6a\x8b\x1d\x9a\xed\x24\x63\x87\x59\x5c\xe9\xae\xcf\x16\x3f", 1, + 2216, + "\xce\xb4\x9c\x91\xd2\x09\x42\x78\x86\x23\xc8\x56\xa5\xf4\x44\xd4", + 128 }, + { 128, 128, 83, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xdf\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 84, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2f\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 85, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xdc\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 86, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2c\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 87, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x5e\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 88, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xae\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 89, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc6\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 90, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x33\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 91, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\xb8\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 92, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\xd6\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 93, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8b\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 94, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x85\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 95, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x88\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 96, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x86\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 97, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\xd3\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 98, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\xbb\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 99, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xfe\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 100, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9f\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 101, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\x7f\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 102, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x1e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 103, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x3f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 104, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x45\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 105, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x85\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 106, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd4\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 107, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x58\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 108, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x43\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x5b\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x40\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\xd9\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\xc2\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf6", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x1a", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xf5", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x19", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\xb7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x5b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\x53\xff\x1f\x84\xf1\x59\x99\x74\x77", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\x3b\x9e\x65\xd5\xf3\x42\xe1\x56\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xdf\xc7\xf8\x38\x8a\x5a\x84\x53\xfe\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2f\x32\xae\x56\x84\xec\x3b\x3b\x9f\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\xb8\x8a\x5a\x84\xd3\xff\x1f\x84\xf1\x59\x99\x74\xf7", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\xd6\x84\xec\x3b\xbb\x9e\x65\xd5\xf3\x42\xe1\x56\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xde\xc7\xf8\x38\x8a\x5a\x84\xd3\xff\x1f\x84\xf1\x59\x99\x74\x77", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2e\x32\xae\x56\x84\xec\x3b\xbb\x9e\x65\xd5\xf3\x42\xe1\x56\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x21\x38\x07\xc7\x75\xa5\x7b\xac\x00\xe0\x7b\x0e\xa6\x66\x8b\x08", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd1\xcd\x51\xa9\x7b\x13\xc4\xc4\x61\x9a\x2a\x0c\xbd\x1e\xa9\xe4", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x5e\x47\x78\xb8\x0a\xda\x04\xd3\x7f\x9f\x04\x71\xd9\x19\xf4\x77", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xae\xb2\x2e\xd6\x04\x6c\xbb\xbb\x1e\xe5\x55\x73\xc2\x61\xd6\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xdf\xc6\xf9\x39\x8b\x5b\x85\x52\xfe\x1e\x85\xf0\x58\x98\x75\xf6", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x2f\x33\xaf\x57\x85\xed\x3a\x3a\x9f\x64\xd4\xf2\x43\xe0\x57\x1a", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 128, 128, 137, + "\xb0\x91\x32\xf1\xb7\x5e\xef\x72\x9e\x4f\x61\x66\xc2\x64\x90\xdb", + "\x20\x08\xb1\x1d\x8a\x75\x6a\xfc\x3d\x63\x40\x14\x1d\xf6\x77\x5b", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 128, + "\x2a\xda\x15\x3c\x42\x3a\x82\xf8\x45\xdb\x0c\xaa\xa9\x4b\x96\x6d", + 128 }, + { 128, 128, 138, + "\xb0\x91\x32\xf1\xb7\x5e\xef\x72\x9e\x4f\x61\x66\xc2\x64\x90\xdb", + "\x17\xc0\x16\x62\x31\x2d\x93\xbc\xe1\x5b\x1e\xcb\xa9\x68\x2b\x2d", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 128, + "\x2a\xda\x15\x3c\x42\x3a\x82\xf8\x45\xdb\x0c\xaa\xa9\x4b\x96\x6d", + 128 }, + { 192, 128, 139, + "\xde\xd6\xff\x22\x55\x65\x87\x32\xf0\xfe\xf0\x84\x07\x13\x68\x93\xf1" + "\x2e\xdf\xeb\xdf\x86\xa2\x14", + "", + "\x59\xa1\x46\x0f\x62\x83\x16\x8b\x32\xc7\xf5\x27\xb7\xa7\x6f\x12", 1, + 0, "\x05\x36\xe9\xff\x30\x45\x94\xf2\xad\xfe\x5e\x02", 96 }, + { 192, 128, 140, + "\x6a\xda\x91\xc9\x74\x2d\xc0\x1e\x95\x3e\xf1\x20\xb2\x2a\x26\x49\xc0" + "\xbe\x93\x04\x38\x65\x15\xa2", + "\x0d", + "\xb1\xd2\xa3\xdb\x72\x4f\xf1\xef\x11\xaa\x76\x95\x4e\x39\x49\x8e", 1, + 8, "\xe1\x9f\xaa\x23\xf8\x71\xfc\x66\x58\xf5\xb5\x96", 96 }, + { 192, 128, 141, + "\x9b\xee\x48\xdc\x40\xa8\xb5\x22\xbd\x7a\xbd\xe7\x99\x9a\xde\xdd\x70" + "\xba\xb2\xef\xf8\x00\xa0\x50", + "\x94\xc8", + "\x36\xb0\x21\xf1\x9c\x12\x4c\x46\x50\x0e\x40\x7f\xc8\xe0\xcd\x5e", 1, + 16, "\x6f\xaa\xe4\x20\x1f\x79\xc9\xd5\x32\xf6\x3d\x85", 96 }, + { 192, 128, 142, + "\xf7\x9e\x59\x0f\x07\x38\x81\x24\xc8\x00\xdd\x4a\xda\xda\xc2\x54\x6c" + "\x62\x67\x96\xc4\x6c\x3d\x2d", + "\xea\x09\x34\x8f", + "\x5f\xa2\x25\x2f\x2b\xbe\x35\x08\xc8\xa9\x19\xc2\x68\xb9\x51\x4a", 1, + 32, "\xc2\x36\x00\x65\x82\x07\xbf\x2d\xc3\x08\x08\x7c", 96 }, + { 192, 128, 143, + "\x35\x6a\xe8\x0d\x9f\xf5\x1f\x72\x92\x0a\x0b\xcc\x80\x78\x37\xcd\x79" + "\x77\x68\xad\xba\x48\x75\x1c", + "\xa8\xab\x4d\x4d\x07\xc7\x1e", + "\x04\x31\xff\x77\x84\xb4\x55\x6e\xc6\x31\x24\xce\xa0\x3e\xb0\x39", 1, + 56, "\xae\x0a\x4c\x45\x2d\x43\xa2\xc8\x41\xd6\xe0\xfd", 96 }, + { 192, 128, 144, + "\x26\x8b\x5c\xb7\x84\x37\xe8\x7a\xf7\x5f\x27\xab\xc7\x53\x90\x9b\xda" + "\xa2\x34\x1f\x88\x91\xd7\x7e", + "\xc4\x2a\xb5\x35\xa8\x76\x94\x2b", + "\x93\xb3\xff\x81\x65\x4e\xe9\x57\x91\xc8\x66\x82\xdb\x37\x89\x4a", 1, + 64, "\xee\x1a\x14\xe5\x50\x24\x2a\x65\x3f\xd4\x50\x9f", 96 }, + { 192, 128, 145, + "\x9c\x42\x6c\x09\xc0\xa0\xc5\x19\xa5\x54\x84\x1e\xb7\x69\x88\x6a\xa3" + "\xd8\x27\x25\xe0\x1b\x25\x05", + "\xe6\x56\x80\xb9\xae\xeb\x43\xbb\x41\x44\x90\x08\x00\xf3\xbd", + "\x0f\x39\x2a\x50\xfe\x0f\x18\x3a\xd9\x41\x1a\x61\x65\xd5\x4b\xda", 1, + 120, "\x8f\x64\x41\xff\xb9\x77\xde\x17\x6c\x4f\x53\x36", 96 }, + { 192, 128, 146, + "\x60\x31\xb3\xa1\x77\xb4\x30\x45\xae\x58\x7f\xfb\x0d\x22\x8b\x04\x14" + "\xff\x26\xb7\x20\x6c\x30\xf5", + "\xdc\x89\xf1\xdd\x15\xf3\x25\x8b\x7e\x92\x11\x73\x65\x0e\xb9\xba", + "\x68\xa1\x24\x9d\x64\x87\xf4\x6c\x10\x7b\xdb\x84\x17\x27\x1e\x26", 1, + 128, "\x8a\xf8\xef\x74\xbd\x25\x21\xf6\xe7\x11\x95\x0b", 96 }, + { 192, 128, 147, + "\x36\x42\x89\xec\xcf\xf5\x3e\xfb\x98\xda\x8e\xe4\x37\xd6\xd3\x89\x77" + "\x5a\x3c\xf2\xb4\xc9\x70\xcd", + "\x66\x47\xb5\xda\x47\xd1\x69\x83\x20\x5c\xe2\x44\x88\x0c\xa6\x05" + "\x19", + "\x73\xb9\x8c\x93\x98\xc7\x0a\x83\x3e\x3d\xbe\xf4\x55\x6d\x8f\xa7", 1, + 136, "\x72\x45\xa5\x15\x65\x6a\x22\x36\xc3\x8c\x85\xaa", 96 }, + { 192, 128, 148, + "\x4c\x41\xb2\xfe\x3e\x60\xbc\xe3\xa9\x45\xe2\xfa\xcd\x97\x54\xc4\xf6" + "\x0b\x19\x2d\xa1\xfe\x11\x0e", + "\x11\xc1\x07\xe4\xb3\x02\xfd\x91\xea\x92\x19\xd7\xfc\x00\x79\xe8\xac" + "\x4b\xdc\xfc\x71\xff\x02\x7a", + "\xfa\xcd\x4d\x12\xdf\xaf\xd6\xbd\x1f\x42\x1a\x5c\x4c\x62\x17\xac", 1, + 192, "\x4a\xb5\x1f\xc4\x59\xe4\xf4\xa5\xb4\x4d\xcf\xa3", 96 }, + { 192, 128, 149, + "\x22\x61\x96\x23\x65\xe2\xeb\x22\xe1\x20\x02\xe0\x53\x95\x0b\x14\x29" + "\x53\xa6\xce\x60\x2d\x19\x59", + "\x44\x21\x60\xc2\x8d\x0c\x08\x32\x87\x32\x19\xdf\x81\xf1\x93\x23\x34" + "\x2b\xd5\x1b\x56\x68\x45\xaa\x3b\xff\x39\xd3\x02\xdb\x3f\x09\x85\xc9" + "\x16\xf7\xc7\x7c\x6d\x63\x87\xd9\x3c\xbc\xdc\x40\xbd\x6e\x45\x8d\x67" + "\xea\x5e\x79\x3a\xc6\xda\x12\x2d\x32\x31\x49\xf1\xb8\x4b\x8c\xd9\x94" + "\xb6\xd8\xbf\x01\x5d\xfa\xcf\x47\x5f\xc9\xc5\x09\xec\x20\xad\xae\x68" + "\x3e\x57\xc2\xb9\x96\x8a\x64\xcf\x56\xde\x90\xcb\xac\x6b\x85\xb9\x67" + "\x92\x96\xa3\x09\x7d\xfc\x76\xfb\xfd\x67\xcb\xc3\x37\x69\x7f\x93\x15" + "\xc2\x3f\xe0\xa4\xc0\xd4\x4d\x79\xc4\x73", + "\x54\xb1\x0f\x53\xa3\x04\xfd\x1b\xee\x6f\x9f\xb2\x46\xca\x40\xef", 1, + 1032, "\x35\xcb\xde\x01\xe0\x7a\x10\xda\x05\xba\x2a\x34", 96 }, + { 192, 128, 150, + "\x5a\x94\x6c\x7e\x78\xad\x42\x7e\x00\xbc\x52\x91\x7f\xd2\x7f\x00\x9c" + "\x98\x7a\x87\x1b\x22\xf6\xf8", + "\x8a\xd8\xea\xf0\xec\xc7\x29\x3f\x9a\x50\x42\xc9\xc7\xb7\xc0\x40\xa2" + "\x39\xa8\xd8\xc6\xee\xd4\xcf\x1c\x2c\x5c\x97\xb5\x68\x62\x67\x11\x10" + "\xdd\xc0\x27\xaf\x7b\x9c\x6f\x87\x49\x66\x3e\xbc\xe7\x34\xa4\x42\xd6" + "\x6d\x1e\xc2\x13\x3f\xc1\xe9\x23\xd9\xf6\xb5\xd0\x9b\x06\x9a\xde\x86" + "\x61\x05\x10\x05\xe5\x4f\x24\xe2\x04\x5a\x40\xd6\x5a\xd4\xa1\x2e\xd0" + "\x57\x9d\xd1\x92\xaf\x76\x11\x8b\xa3\x95\x33\x5f\xb4\xa9\x46\x64\x81" + "\x2b\xe3\xe5\xde\x91\xf4\xa4\xa6\x19\x6e\xae\x48\xc6\x39\xa8\xd2\xba" + "\xf5\x4b\xc9\xe4\x7b\xf1\x44\x86\x2f\x03\x77\xd5\x1b\x87\xab\x3e\xf6" + "\x49\xbe\xc7\xe7\x8e\xf2\x78\xf6\x2a\xea\xe1\xb5\xdf\x61\xd2\x1b\xec" + "\x30\x4a\xb1\x79\x3b\x2b\x95\x8d\xc9\xd9\xc5\xaa\xe3\x8d\x35\x43\xbf" + "\x6d\x29\xef\xf2\x5f\xae\xcb\x7c\xf8\xca\x1e\x75\x84\x70\xb4\xb3\x0b" + "\x4f\xc0\xba\xe2\x56\xff\x83\x01\xf6\x31\x30\xc0\x28\x74\xe6\x7c\xe7" + "\x25\x87\x06\xea\x3a\x45\xf2\xac\x11\x66\x4a\xcb\x43\xed\x90\xff\xc8" + "\xf7\xbd\x73\x6b\x50\x27\x56\x5c\x1e\xb9\x8e\x54\xd9\xf0\x0b\x34\xc8" + "\xf3\x89\x62\x6f\x83\xda\x9f\xfb\x9f\xd0\xfd\x48\x45\xab\x58\x41\x56" + "\x37", + "\x5a\x79\xd3\x22\x8c\x67\x30\x5d\xfc\xf1\x5d\x68\xae\x03\x28\x60", 1, + 2048, "\xbf\x96\x93\x96\x00\x66\x0e\x86\x25\x42\x03\x62", 96 }, + { 192, 128, 151, + "\x57\xc0\x7c\x71\xe6\x13\x61\xa2\xe7\x5a\xf1\x84\x82\xd9\xbd\xe5\x83" + "\xff\x2b\x09\xb5\xc1\xda\x77", + "\x94\x6c\xcc\xb8\xff\x9b\x67\xb3\x56\x50\x66\x12\x5a\x59\x52\x98\x57" + "\xac\x62\xc3\xe3\x93\xd4\x8d\xe1\x74\x1e\xfe\x35\x1c\xa1\x94\x85\xfa" + "\x4e\x45\xb4\xcf\x08\x31\xbf\x30\xcc\xef\x3d\x38\x9c\x0b\xb1\xcd\x16" + "\x25\x7a\xc6\x47\xa7\x7b\x96\x5a\xd9\x03\x62\xcf\x27\xbd\xa5\xe4\x2f" + "\x16\x00\x5c\x69\x7e\x9e\x46\x5c\xc1\x28\x73\x88\xab\xd4\x5e\xd7\x24" + "\xc6\xf0\x1a\x87\x01\xd8\x6e\xb2\x24\x39\xd0\xb4\xe6\x09\xf7\xf8\x10" + "\x37\xed\x59\xa1\x36\xe2\x0e\x8b\xb8\x70\xfe\x57\x74\x44\xf6\xde\xa2" + "\x15\x3b\x4e\x2c\x0c\x0c\x30\x0c\x14\x04\xe8\x31\x51\x0b\x93\x0b\x68" + "\x19\x82\xb9\xcd\xd5\x13\xb3\x94\x7f\xcf\x9e\x07\x9d\xac\xd2\x0f\xa7" + "\x8a\x08\x19\x9c\x77\x72\x75\x56\xd7\x24\x80\x93\x2f\x15\xe2\x01\x2f" + "\xf0\x3e\x59\x3b\x98\x7c\x89\xa1\xdd\x1a\xb5\xae\x0b\xd2\x3d\xe0\xd9" + "\xfc\x3c\x1c\x37\xe0\xfd\x6c\x46\xc0\xec\xb1\x49\x5d\xc3\xe6\x2d\xc6" + "\x7b\xd7\xc5\x15\x0d\xbf\x5f\xcb\xc6\xf6\xfe\xcc\xd5\xd4\x7b\x8d\x6b" + "\x59\x3a\x14\x3d\xf5\x43\x91\xed\x1f\xcc\x8e\xaf\xe5\x45\x67\xef\xc6" + "\x30\x87\x49\x4e\xad\xaa\xdb\x0b\x61\x98\x19\x4e\x1f\x5a\x5d\x9b\xcf" + "\x1d\xe6\x08\xa5\x3f\x42\x2b\xae\x1a\x40\xc9\xa6\x6d\xfb\x47\xc2\x6e" + "\xfa\x85\xb8\x46\xa2", + "\x8f\x13\x64\xd1\xd9\xe5\x82\x6a\x79\x25\x4c\xa6\x02\xbb\x1e\x4c", 1, + 2216, "\x7c\xc9\x7a\x3e\x4f\x65\x71\xae\xdd\x69\xee\x95", 96 }, + { 192, 128, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x35\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x36\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8e\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xb4\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xec\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x76\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x98\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 161, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\x45\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 162, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x01\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 163, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x57\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 164, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x02\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 165, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x54\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 166, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x8f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 167, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\x3e\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 168, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x10\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 169, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2a\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 170, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x91\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 171, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\xab\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 172, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x34\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 173, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xfc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 174, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbd\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 175, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5b\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 176, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe2\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 177, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xce\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 178, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe1\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 179, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcd\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 180, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\x63\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 181, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\x4f\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 182, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x76", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 183, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7e", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 184, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x75", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 185, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7d", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 186, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\x37", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 187, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\x3f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 188, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x0f\x11\x14\xbc\xd4\xe3\x4b\xab\xf7", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 189, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\xbe\x2b\xdc\x5a\x8f\xcf\xc9\x38\xff", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 190, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x35\xed\x91\x18\x00\xfd\xfa\x0f\x10\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 191, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\x77\x1d\xc5\x56\x3b\x63\xbe\x2a\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 192, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x98\x00\xfd\xfa\x8f\x11\x14\xbc\xd4\xe3\x4b\xab\x77", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 193, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\x45\x56\x3b\x63\x3e\x2b\xdc\x5a\x8f\xcf\xc9\x38\x7f", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 194, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x34\xed\x91\x18\x00\xfd\xfa\x8f\x11\x14\xbc\xd4\xe3\x4b\xab\xf7", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 195, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8c\x77\x1d\xc5\x56\x3b\x63\x3e\x2b\xdc\x5a\x8f\xcf\xc9\x38\xff", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 196, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xcb\x12\x6e\xe7\xff\x02\x05\xf0\xee\xeb\x43\x2b\x1c\xb4\x54\x88", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 197, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x73\x88\xe2\x3a\xa9\xc4\x9c\x41\xd4\x23\xa5\x70\x30\x36\xc7\x80", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 198, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 199, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 200, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 201, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 202, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xb4\x6d\x11\x98\x80\x7d\x7a\x8f\x91\x94\x3c\x54\x63\xcb\x2b\xf7", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 203, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\xf7\x9d\x45\xd6\xbb\xe3\x3e\xab\x5c\xda\x0f\x4f\x49\xb8\xff", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 204, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x35\xec\x90\x19\x01\xfc\xfb\x0e\x10\x15\xbd\xd5\xe2\x4a\xaa\x76", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 205, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\x76\x1c\xc4\x57\x3a\x62\xbf\x2a\xdd\x5b\x8e\xce\xc8\x39\x7e", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 192, 128, 206, + "\x17\x5b\x5a\x75\xe4\xfd\x7f\x9c\x1f\x99\x77\x81\xb9\x76\xa7\x2a\x01" + "\x0d\x69\x55\x9a\xbb\x0d\x1e", + "\xfc\xcc\x74\x87\xa8\xae\x2a\x85\xf6\x05\x5f\x02\x11\xa7\x3d\x0b", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 128, "\x55\x0e\x0a\x2d\x5c\xb9\x87\xba\xf4\xb9\xc3\x24", 96 }, + { 192, 128, 207, + "\x17\x5b\x5a\x75\xe4\xfd\x7f\x9c\x1f\x99\x77\x81\xb9\x76\xa7\x2a\x01" + "\x0d\x69\x55\x9a\xbb\x0d\x1e", + "\xc2\xeb\xff\x3f\x88\xc5\xd3\x7f\x41\x80\x02\x11\xd2\xbc\x60\xdd", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 128, "\x55\x0e\x0a\x2d\x5c\xb9\x87\xba\xf4\xb9\xc3\x24", 96 }, + { 192, 128, 208, + "\x9f\x93\x4f\xa4\xd1\xf3\xcc\xa7\x44\xc8\x1c\x87\x93\x72\x45\xd7\x20" + "\x76\x25\xb6\xb2\xac\x3d\x84", + "", + "\x9c\x04\x53\x1b\xce\xaf\x43\x8d\xc1\x34\x2f\x3c\x5f\x7f\x62\xb8", 1, + 0, "\x19\x12\x4f\xf0\x81\x47\x17\xab\xea\xa2\xe1\xfc\x9f\xc8\x86\xed", + 128 }, + { 192, 128, 209, + "\x0a\xe0\xba\x46\x74\xc5\xc6\x85\x35\x4e\x24\x1b\xc8\x60\x34\xcb\xef" + "\x80\xf8\x8a\xcf\x1f\x74\x88", + "\xc7", + "\xf2\xf5\xcd\xa5\x95\xfb\x7d\x7b\x0a\x7d\x69\x9b\x6a\xbc\x7f\x52", 1, + 8, "\x08\x32\x66\xeb\xda\x2b\x3d\xb8\xb1\x4c\x23\x5e\xb2\x97\x59\x88", + 128 }, + { 192, 128, 210, + "\x9f\x47\x9e\x91\x18\xe5\x60\x3b\xf1\x22\xfc\x5e\x2b\x8b\xf3\xe4\x48" + "\xac\xb9\xfb\xde\x5e\x16\xdc", + "\x96\xaf", + "\x25\xfc\x87\x7b\xc2\xfd\x80\x04\x92\x84\x33\x16\xa0\x9b\x55\x82", 1, + 16, + "\xd2\x64\x27\xfc\xa9\xc8\xe1\x71\x7a\xcd\x34\xad\x3c\xc5\xd5\x42", + 128 }, + { 192, 128, 211, + "\x41\x9a\x56\xe8\xf4\x1f\x33\x7d\xee\x4a\x63\xed\x38\x6a\x8e\xd7\xe8" + "\x5b\x25\x96\x44\x94\xb0\x75", + "\x8a\x3d\xc8\xad", + "\x3d\x13\x1a\xa1\x25\x38\xc6\x33\x22\x83\x2e\xfb\xc8\x35\x13\xdf", 1, + 32, + "\xab\x25\x03\x31\x65\xc6\xcf\x97\x60\xd1\xa5\xa4\x43\x91\xe7\x7a", + 128 }, + { 192, 128, 212, + "\xee\xc6\x72\xd9\xe7\xb9\xbd\x93\x3e\x5e\xe3\x6a\xe3\x81\xe6\x5b\x87" + "\x2a\xc8\x37\x3c\x3e\x56\x78", + "\x80\x9d\xe8\xdb\xe2\x81\x47", + "\xb6\xb7\x69\xa1\x1d\x31\x9c\xb2\x2b\xe9\x2f\x1d\x33\xad\x8c\x4c", 1, + 56, + "\x51\xe0\x64\x2b\x8e\x49\x45\x81\xdd\x04\xc5\xb1\x6b\xd8\x8b\x89", + 128 }, + { 192, 128, 213, + "\x41\x5c\x12\xb7\x84\x05\xdc\x19\x16\xb9\xf6\x5c\xc1\xd9\x60\xb6\x41" + "\x06\x2a\xb5\x89\x26\x09\x9a", + "\xb5\xde\x1b\x30\xf7\x22\x9f\x1d", + "\x2d\x71\x22\xb7\x4f\xeb\xd2\xe5\x7b\xd7\xe1\xc0\xcc\xf7\x22\x9f", 1, + 64, + "\x80\xd3\x37\x0f\x38\xc8\xd9\xc6\x74\x16\x05\x00\xa3\x36\x27\xf3", + 128 }, + { 192, 128, 214, + "\x8c\xf2\x66\x62\x23\x06\x0e\x2a\xde\xce\x28\x96\x9a\x8d\x88\x9e\xce" + "\x7f\xe3\x2c\x0a\xe3\xb9\xc6", + "\xfb\xfd\x57\x3c\x9f\x12\x14\xf7\xc6\x50\xbc\xf5\xe7\x2b\xac", + "\xbd\x53\xa4\xc7\xda\xa8\xd0\x8e\xe1\x01\xca\xc5\x86\x1c\x7f\xf9", 1, + 120, + "\xcf\x0b\xd9\xd8\x76\x7a\x3e\xae\x62\x15\xaf\x33\xb7\x21\x0f\x99", + 128 }, + { 192, 128, 215, + "\x90\xd1\x19\x6f\xf0\x04\xc2\xc1\x84\xe6\x9f\xbe\x4c\x51\x2a\xfe\x5d" + "\xd5\x30\x60\xd0\xaa\x29\xee", + "\xe0\x3b\xa4\xce\x91\xba\xc7\x71\x50\xda\x73\x11\x25\xb4\x63\x4e", + "\xae\xc2\x77\x21\xe0\xe5\x51\x30\x75\xa6\x89\x53\xcc\x97\xca\xb1", 1, + 128, + "\xb1\x65\x71\x49\x8f\x84\x36\x8f\x1c\x24\xb6\x31\x78\xb8\x8a\x5b", + 128 }, + { 192, 128, 216, + "\x2f\x08\x4b\x35\x67\xbe\xca\xf9\x1d\x1a\x9a\x39\x1a\xac\x5e\x45\x76" + "\xf9\x03\x4e\x4a\x11\x5f\x8c", + "\x8f\x86\x08\x47\xf1\x8d\x38\x7b\x64\x66\x1c\xec\x46\x20\x8c\x70" + "\x50", + "\x28\x3b\x96\x34\x4c\x2d\xe9\x37\xf4\xa7\x27\x9a\x65\x98\xe6\xbd", 1, + 136, + "\x82\x5e\x71\x3c\x0b\xcb\xe7\x89\x2d\x61\x7d\xe7\x98\xab\x9b\xd9", + 128 }, + { 192, 128, 217, + "\xe4\xdb\x14\xa3\xfc\xdb\x48\xfa\x98\x49\xf0\x3b\xf5\x2a\x4b\x44\xd3" + "\x9e\x9e\x6b\x33\x97\xdb\x59", + "\xd7\xd6\x63\x79\x83\x17\x60\x3e\x56\xbc\x8b\x22\xfc\xec\x7a\xf3\xa6" + "\xc1\x5a\x87\x92\x68\xcb\xc7", + "\x73\xc9\x5c\x43\x0d\x20\x7c\x1b\x53\x32\x68\x65\xb7\x98\x0c\x04", 1, + 192, + "\x5d\x53\x8b\x3a\xc4\x1f\xcf\xc5\x60\x65\xe3\xfa\x75\xc5\x33\x85", + 128 }, + { 192, 128, 218, + "\xa1\x4e\x5d\x0b\x28\x23\xed\x9e\x3a\x19\x1a\x3f\x89\x60\x05\xd9\xbf" + "\x18\x65\x19\xdb\xb2\x61\xb5", + "\x6e\x9f\xab\x68\xc0\xb2\x44\x37\x8d\x50\x3b\xf5\x82\xf4\x9a\x43\xbe" + "\x6c\xb2\x06\xed\xf6\xee\x02\xbb\x55\xa3\x6b\x9c\x35\x33\x52\xc1\x9a" + "\xd0\xd2\xa2\x04\x88\x09\xe4\xdd\xc1\x60\x48\x5a\x2f\xa9\xf5\x43\x88" + "\x51\xeb\x9a\xa5\x0b\x15\x3e\x4a\x2d\x9b\xe1\xb1\xa1\x77\x2e\xae\x7b" + "\x4d\xce\x9d\xd6\x49\xad\x43\x57\xa2\xe2\xfb\x4e\x90\x24\x53\x9b\x01" + "\x4c\x94\xd2\x00\x5b\x5f\xa3\x2e\x47\xc5\x0e\x9f\x91\x4f\x08\x4e\xec" + "\x58\xf6\xe1\x08\xa9\xde\x3c\x29\xcc\xe5\xd9\x29\x80\xf4\xf4\x91\x1f" + "\xc0\x0e\x96\x13\xb2\x63\xcc\xa8\xa5\x97", + "\x8a\xdb\x4c\x0c\x1d\xc1\x5e\xcb\xf6\x6b\x43\x1a\x03\x0e\xc5\x14", 1, + 1032, + "\x6d\xac\xfb\x15\xf7\xe6\xfb\x26\xe4\x23\xc0\xee\x05\xf6\x86\xba", + 128 }, + { 192, 128, 219, + "\x0c\xf9\x1c\xfd\xf3\xb0\x64\x58\x1d\xd0\x15\x34\xa3\x04\x29\x67\x74" + "\xc1\x7b\xa3\x0c\x5e\xa4\xee", + "\xb4\x57\x70\xb4\x39\xc4\x87\x75\x76\x4d\x12\xc9\xad\xbf\xcd\x2b\xe5" + "\x9a\xab\xc9\x8e\x74\x06\xdd\xd0\xc0\x5c\xb2\xde\xbb\xda\x43\x2a\x95" + "\x80\x3c\x35\x87\xe0\x1d\x67\x99\x0d\xd4\x93\xf9\x6f\xbd\x3f\x4a\x33" + "\xae\xca\x61\x71\xde\xc4\x22\x0b\xb8\x26\x47\xb3\x44\xd0\x22\x58\x56" + "\x73\xd3\xfc\x40\x78\x7f\xb3\xcc\xbf\xf5\x1c\x16\x62\xb9\xf9\xda\x1b" + "\x79\xbe\xff\x7b\x6f\x05\x87\xc6\xcd\xa6\x64\xb7\x3b\x48\xce\xfd\xcf" + "\x7a\x41\x57\x2f\xdd\x51\x49\x09\x7c\x31\xa6\x5c\x74\x82\xf3\xc7\x69" + "\x49\x3d\x6f\xb9\xbf\xca\xf7\xee\x20\x80\x59\x32\xb9\xba\x2b\xff\xee" + "\x44\xc1\xd1\x8b\x9d\x59\xad\x80\x8b\x71\x46\x69\xca\xe5\x36\x03\xa4" + "\x51\x88\x13\xb3\x1d\x7d\x4f\x03\x79\xcc\xcb\x1e\x0d\xdd\x09\xf0\x0d" + "\x9c\x06\xff\x2c\x69\xfd\xb4\x2a\xe4\x61\x67\xd6\xa5\x1e\x96\x78\x1e" + "\x6b\x91\xbe\xd4\x22\x90\xf3\x60\xb5\x01\x97\x09\x16\xa3\x66\x13\xba" + "\x10\x54\xf6\x38\xba\x59\x2e\xc8\x69\xc0\x85\x75\x7b\x94\x2d\x40\xd4" + "\xdc\xff\x3e\xeb\x3b\x99\xbd\x81\x81\xb4\x0e\x7c\xec\x75\x28\xbd\x48" + "\x73\xe4\x27\xbb\x67\xc8\xfa\x8b\xc5\xf4\x7e\xba\x7e\x55\xc9\x05\xe2" + "\x24", + "\xd5\xe0\x9f\x52\x98\x6b\x33\xaf\x35\x77\xe9\x75\xdd\x9a\x90\x16", 1, + 2048, + "\xa9\x8d\x89\x33\x2c\x05\xb1\x3d\xb1\x87\x48\x48\x11\x19\x8a\xb9", + 128 }, + { 192, 128, 220, + "\x93\xce\xcc\xe9\x6d\x37\x40\x62\x86\xa8\x81\xdb\x0d\x0d\x53\xce\x8f" + "\x7b\xf5\x3f\x4b\xdc\x74\x6a", + "\xcc\xca\x5c\x74\x61\xb8\x10\xd4\x14\xe2\x4f\xec\xf4\xd3\xee\xac\x3a" + "\x38\xc2\x96\xf7\xb8\x88\xf4\x36\x87\xd8\x8c\x8b\xa8\x19\x17\x98\xfc" + "\x43\x1f\x8b\x6c\x05\x76\xad\x1a\xa8\x41\xd3\xc9\x54\x50\x60\xdf\xfb" + "\x76\x75\xc8\xcf\xa2\xfa\x41\x98\x89\x57\x2b\x39\xe5\xed\xf1\xa4\x0b" + "\x54\xa6\x44\x30\xa6\x06\xac\x7f\x19\xf4\xeb\x28\x7a\xd2\x60\x9e\x23" + "\x80\xd7\xfe\x48\x34\xf5\xae\xac\x98\x32\xb5\xed\x0c\x66\x99\x4a\xc1" + "\x7e\x0d\x11\xbb\x57\x64\x3a\xa5\xf6\xad\x44\xcf\x57\xc5\x4d\xcd\x94" + "\x93\x1e\x52\x85\xf9\x3e\x03\x10\x79\x13\xae\xa5\x04\x8a\xd0\x4a\x4e" + "\xe8\xe3\x51\x0a\xc0\x30\x5a\x13\xa5\x74\xd7\xc9\x55\xf2\x91\x2e\xf2" + "\x39\xb4\x0d\xcf\x5a\xd7\x43\x8a\x19\xad\x7b\xec\xe2\xe7\x9e\xbd\x13" + "\x5e\x16\xed\x2b\xa5\x9f\x7e\x78\x57\xdc\x64\x80\xf7\x80\x30\x31\x57" + "\x89\x57\x84\xa7\x9f\x04\xf7\x84\x15\x07\x07\x88\x05\x2d\xcd\x74\x58" + "\xd9\xff\x86\xa7\x05\x68\x4b\xce\x83\xd2\x9f\xc8\xc0\x96\xfa\xc2\x98" + "\x53\x60\x35\x00\xb4\x9a\xce\xc5\xd4\x38\xe7\x60\x72\xde\x71\x84\x11" + "\x93\xf8\x44\x09\x81\x69\x3b\x62\x5b\x30\x82\xe2\xba\x89\x8d\xcc\x79" + "\x23\xa6\x0c\x8d\x95\x3c\xb3\x9f\xa5\xcb\xdd\x42\xea\xb1\x37\x33\x26" + "\x12\x33\x8c\xe2\x0d", + "\x66\x0b\x84\x72\x7a\x22\x9e\x09\x22\xb5\x61\xe8\xd0\xdd\xbd\x5e", 1, + 2216, + "\xbf\x13\x68\xbb\x09\x37\x3b\x1e\x0e\xed\x89\xee\xea\x93\x21\xc1", + 128 }, + { 192, 128, 221, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa0\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 222, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x18\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 223, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa3\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 224, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x1b\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 225, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x21\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 226, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x99\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 227, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1c\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 228, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x86\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 229, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x1a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 230, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\xc7\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 231, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x87\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 232, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd1\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 233, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x84\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 234, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd2\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 235, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\xfe\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 236, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\x4f\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 237, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbe\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 238, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x84\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 239, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\x3f\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 240, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x05\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 241, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x3c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 242, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xf4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 243, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb2\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 244, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x54\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 245, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf5\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 246, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd9\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 247, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf6\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 248, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xda\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 249, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\x74\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 250, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\x58\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 251, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x92", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 252, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x9a", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 253, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x91", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 254, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x99", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 255, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\xd3", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 256, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\xdb", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 257, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\x7e\xbf\x1c\xb3\x87\xf4\xcf\xdc\x13", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 258, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\xcf\x85\xd4\x55\xdc\xd8\x4d\x4f\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 259, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa0\x1d\xa3\x9a\x86\xf6\x46\x7e\xbe\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 260, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x18\x87\x2f\x47\xd0\x30\xdf\xcf\x84\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 261, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x1a\x86\xf6\x46\xfe\xbf\x1c\xb3\x87\xf4\xcf\xdc\x93", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 262, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\xc7\xd0\x30\xdf\x4f\x85\xd4\x55\xdc\xd8\x4d\x4f\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 263, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa1\x1d\xa3\x9a\x86\xf6\x46\xfe\xbf\x1c\xb3\x87\xf4\xcf\xdc\x13", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 264, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x19\x87\x2f\x47\xd0\x30\xdf\x4f\x85\xd4\x55\xdc\xd8\x4d\x4f\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 265, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x5e\xe2\x5c\x65\x79\x09\xb9\x81\x40\xe3\x4c\x78\x0b\x30\x23\x6c", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 266, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe6\x78\xd0\xb8\x2f\xcf\x20\x30\x7a\x2b\xaa\x23\x27\xb2\xb0\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 267, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 268, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 269, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 270, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 271, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x21\x9d\x23\x1a\x06\x76\xc6\xfe\x3f\x9c\x33\x07\x74\x4f\x5c\x13", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 272, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x99\x07\xaf\xc7\x50\xb0\x5f\x4f\x05\x54\xd5\x5c\x58\xcd\xcf\x1b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 273, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa0\x1c\xa2\x9b\x87\xf7\x47\x7f\xbe\x1d\xb2\x86\xf5\xce\xdd\x92", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 274, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x18\x86\x2e\x46\xd1\x31\xde\xce\x84\xd5\x54\xdd\xd9\x4c\x4e\x9a", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 192, 128, 275, + "\xdf\x23\xcd\x79\x69\x38\x87\x21\x43\x7b\xa1\x3d\x56\x2a\xc4\x59\x39" + "\x2a\x8f\xb8\x8f\x51\x92\x5e", + "\x3c\x11\x58\x1c\x4b\x96\x64\x92\x6d\x77\xe1\xa7\xd1\x87\xb0\x0a", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 128, + "\x73\x5c\xdb\x81\xfb\xbd\x61\x72\xcb\x7f\xc0\xca\xe1\x3b\x7a\xc9", + 128 }, + { 192, 128, 276, + "\xdf\x23\xcd\x79\x69\x38\x87\x21\x43\x7b\xa1\x3d\x56\x2a\xc4\x59\x39" + "\x2a\x8f\xb8\x8f\x51\x92\x5e", + "\x69\x58\xfb\xba\x11\x73\xef\xad\x3c\x90\x03\xba\xae\x27\x8d\x3d", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 128, + "\x73\x5c\xdb\x81\xfb\xbd\x61\x72\xcb\x7f\xc0\xca\xe1\x3b\x7a\xc9", + 128 }, + { 256, 128, 277, + "\xd7\x96\xa0\x5c\xef\x10\x9d\x0e\xf3\xa8\x11\x07\xa1\x1b\xd3\x69\x15" + "\x93\xc1\x1a\x0c\x98\xeb\x4d\x31\xad\x91\x3b\x1e\x4e\x93\xbe", + "", + "\x75\xb0\x98\xb6\x02\x51\x55\xfd\x69\x9e\x66\x90\x8c\x40\x01\x56", 1, + 0, "\x36\xdc\x70\x0a\xbd\x99\xd9\x9d\x8f\x5b\xc0\xce", 96 }, + { 256, 128, 278, + "\x31\xe3\x9f\x00\x79\xa1\x82\xc9\xb2\xba\x38\xb8\xbc\xc4\x85\x18\xa2" + "\xdf\xf1\xdf\x88\x52\x85\xfc\x05\x42\xf8\x3b\x25\x5a\x52\x0c", + "\x45", + "\x1e\xe0\x76\xf7\x08\xe6\x4c\xc5\x5c\x71\xd8\x63\x3e\xc2\x92\x31", 1, + 8, "\x2c\xc7\x45\x9f\xa6\xa5\x3f\x8c\x9a\xf1\x41\x54", 96 }, + { 256, 128, 279, + "\xcc\xa7\x77\xcf\x57\x59\x24\x60\x27\xd6\xe4\xc7\x3b\xaf\x3d\xb9\xb9" + "\xd8\x34\xc3\x70\x32\x03\x57\xd1\x69\xe5\x31\x93\x60\x61\xf4", + "\xc0\x0b", + "\xeb\xe4\xe5\x69\x51\xe1\xef\x7c\x72\x1f\x42\x9e\xec\x78\xc1\x5e", 1, + 16, "\x5b\x48\x47\xb7\x7b\xab\x2a\x6e\x54\x60\xda\x65", 96 }, + { 256, 128, 280, + "\xa2\x91\x2f\xfd\x04\xdd\x0c\x12\xf9\xb1\x44\x31\x9a\x46\x5a\xf5\x72" + "\xb4\xbe\x8a\xa0\x91\x6b\xaa\xa6\xed\xb9\x62\x05\xee\x8b\xb1", + "\xff\xc2\xfd\x8c", + "\x2d\x8b\x37\x38\x32\xd0\x29\x3f\x10\x37\x40\x9f\x6a\xd0\xc4\xf1", 1, + 32, "\xac\x26\xc6\xc0\x1e\xc2\xdf\x59\x07\xb7\x1a\xf2", 96 }, + { 256, 128, 281, + "\x45\x76\xc1\xe2\x47\x0b\x55\x7f\x85\xae\x2d\xef\xc0\x78\xcb\x9e\xea" + "\xc5\x2b\x32\x35\x13\x7f\x8b\xce\xfd\x9c\x11\x6b\x80\x4d\x2c", + "\x4e\x28\x4a\xb6\xf6\xe4\x9b", + "\xeb\x91\x0a\xfe\xc0\x79\xb3\xda\x46\x01\xfe\x6c\x9d\xe3\x29\x69", 1, + 56, "\x20\xd2\x9d\xd5\x28\xb2\x6a\x71\x6b\x11\x22\xd4", 96 }, + { 256, 128, 282, + "\xbe\x85\x5e\x5f\x44\x77\xa5\xf0\x6e\x64\xe9\x69\xcf\x0f\x7a\xad\x23" + "\x99\xd1\x0a\xc6\x30\x1b\xd2\x96\x18\xb5\x30\x31\x53\x04\xaa", + "\xb3\x5e\xa0\x0e\x01\xf3\x04\x38", + "\xba\xdb\xcd\xfb\x66\xe6\x8b\x94\x9b\xcb\x31\x4b\x36\x7c\x82\xcf", 1, + 64, "\x1d\xc8\xf6\xcb\xa3\x82\x8c\xb8\x86\x2e\xa9\x0d", 96 }, + { 256, 128, 283, + "\x30\xf1\x06\x09\xfc\xf8\xae\x39\x8f\xa5\x0f\xb2\x80\x7e\x0b\x60\x5f" + "\xe1\xf5\x96\x2f\x7c\xd7\xd6\x63\xfa\x2c\x50\xbb\x0e\xd5\x37", + "\xc6\x6f\x86\xfb\xee\x5e\xda\x6a\x0a\xc4\xf6\x4d\x7e\xf4\xb8", + "\xb7\xe5\xec\xd2\x35\xa4\x06\x15\x2e\xdd\x49\x09\xf1\x63\x52\x06", 1, + 120, "\xf2\x51\x34\x5f\x80\x11\xb4\xf7\xfd\xd5\x9a\xa2", 96 }, + { 256, 128, 284, + "\xb5\xca\x16\x87\x71\x37\xe3\x59\x5d\x05\x60\x01\xb1\x82\xeb\x16\x51" + "\xe4\xae\x6a\xf0\x80\xce\x7e\xe0\xa0\xc5\x65\x1a\x09\x46\x03", + "\xea\x94\xe0\x62\xb1\x0e\x5d\xfd\x2e\xa9\x93\xcb\x6a\x10\x3d\x98", + "\x21\x43\x9f\xb4\x07\x24\x7a\xc0\xa9\x8a\x30\x2d\x6c\xff\x4b\x0f", 1, + 128, "\x1f\xc2\x12\x8c\xf2\x47\xfe\xcb\x74\x68\x59\xfc", 96 }, + { 256, 128, 285, + "\xb5\x68\x42\x55\x46\x3e\x57\x88\xbe\xc2\xd7\x5c\x8c\x46\x3a\x65\x8c" + "\x79\x42\x8d\x49\xfb\x2a\xf5\xf1\x25\x6c\x6b\xb1\x71\x1f\x33", + "\x32\x9f\x30\x4b\x5d\x32\xe4\x81\x86\x82\x23\x06\xd6\x64\x1c\x09" + "\x0a", + "\xff\x54\xec\xd2\x6a\xa9\x4a\xac\xd6\xd9\x2b\xd9\xf6\x32\x3f\xf9", 1, + 136, "\x93\x88\xc4\xbf\x74\x15\x7c\x59\x01\x80\xe0\xc1", 96 }, + { 256, 128, 286, + "\x76\x07\x4f\xef\xee\x14\x8c\xd8\x87\x3d\x23\x51\x17\x5b\x8f\x0b\x46" + "\xba\x38\x81\xf0\x7d\x5b\xd7\xe6\x7a\x65\x9e\x01\x83\x0a\x40", + "\x8c\x03\x37\xfc\x94\x01\xe6\xa5\x8e\x40\x8a\x11\xdf\xf5\x7a\xb2\xdd" + "\xc1\x7c\x16\x82\x77\x8f\xa9", + "\x36\x86\xf1\xf8\xc1\x8a\x94\x4c\xb9\x70\xf0\x89\xc9\x39\xcc\xd5", 1, + 192, "\x0c\x5b\x68\xa8\xc3\xfd\xf6\x45\xe3\xac\x1e\x56", 96 }, + { 256, 128, 287, + "\xe2\xa7\x2f\x64\x30\x1e\x4d\xb6\xe8\xc0\x5c\x31\x2f\x2c\xea\x92\xfb" + "\x8b\x06\x3e\xb7\xfe\xc2\x3e\xa4\xe9\x8c\x46\xfd\x04\x84\xd6", + "\xc7\x0f\xb4\x3d\xe3\xaf\xf1\x1d\xbc\xab\x9b\x6c\x26\x7c\x4b\x5b\x35" + "\xcd\x9c\x08\xec\x69\x99\x41\x9a\x67\x71\xbc\xc7\x34\xae\x86\x50\xa5" + "\x6a\x42\x99\xc2\x10\x5b\x32\xbe\x02\x18\x1e\xaa\xeb\xe7\x9f\x07\x47" + "\x76\x15\x73\x95\x9d\x2b\xd7\x1b\x08\x74\xde\x54\xec\x2f\xd1\x7b\xfd" + "\x87\x1a\xcd\x76\x6d\x53\x13\xcb\xff\xbe\x26\xce\xd0\x83\xf5\x22\x4f" + "\x27\x77\xcd\x65\xac\x4d\x2d\xb0\x8f\x21\x3c\x0f\x7a\x5b\xcc\xbc\x19" + "\xb9\xbd\x42\xab\x64\x11\x61\x42\xf4\xd9\x4b\x09\x28\x0e\xe5\x84\x74" + "\x40\x55\xf8\x1e\x2f\xef\x29\x1b\x36\xae", + "\x54\x54\x83\x20\xe2\x80\x1d\xad\x45\x68\x32\x97\x56\x58\x67\x06", 1, + 1032, "\x89\x98\xac\x05\xe1\x1c\x96\x4f\x22\x09\x0c\xe7", 96 }, + { 256, 128, 288, + "\xd8\xd2\x86\xaf\x2b\x74\xab\x17\xbe\x1b\x23\x94\x84\x32\x82\xd2\x4a" + "\x19\xa2\x72\xa7\x1b\x0a\xc9\xb0\x5a\xbe\x82\x6e\xc7\xb9\xcd", + "\xa9\xdb\x39\x76\x5a\xd3\x0e\xa7\x18\xa2\xf7\x46\xe7\xe5\xc9\xc2\xb5" + "\xbb\xc8\xb5\xd7\x5f\x83\xde\xb6\x6d\x79\x73\xc9\x7f\x93\x62\x27\x48" + "\x71\x91\xb7\x18\x99\xc1\xaa\x7a\x32\xf9\xf4\xfd\x69\x97\x9a\x9d\x17" + "\xe8\xe3\xbf\x18\xa2\x8a\xc2\x74\x88\xdf\xa8\xf1\xc6\x96\x12\xbd\x3a" + "\x5d\xe0\x7e\x1d\x6b\x57\xb6\xab\x4a\x1c\xef\x60\xb8\x04\xac\x64\x6d" + "\x8f\x22\xcc\x47\xa1\x53\x99\x05\x86\xab\xdd\x61\xce\x8b\x8f\xb8\x4d" + "\x05\x1e\x56\x37\x81\xb5\x32\x21\xc2\xe8\x30\x22\x84\x4a\x1f\xb4\x22" + "\x5e\x60\xe8\xd7\x43\x6f\xf2\x05\x5b\x83\xec\x55\xcd\xa7\xb8\x4c\xb3" + "\x6e\x8a\x92\x62\xa5\x5e\x08\x07\x42\xc2\xda\xa9\x40\x1e\x51\x1f\x6c" + "\xec\x98\xce\xaf\xe3\xae\xa6\xe1\x07\xa6\x51\x8c\x76\x3e\xa1\x07\xd2" + "\x54\x9b\xa3\xf6\xa6\x6e\x64\xd3\x21\x0f\x96\x29\x79\x60\xf7\xac\xde" + "\x14\xb4\x67\x7a\xc3\x38\x5d\x26\x2a\xe9\x87\x0a\xe9\x73\x71\xba\xac" + "\x2e\xc7\xe9\xad\x98\x1f\x4f\x37\xaa\xdb\xff\x8d\x2f\xaf\xd1\x61\xa6" + "\x8e\x5f\x16\xb8\x21\x9f\x15\x72\xf8\x39\x07\xc4\x46\x44\x25\xf1\x43" + "\x3b\xec\xdc\xaf\x29\x09\xc5\xb6\x02\xf9\xe2\x82\x2e\x59\x5e\x8c\x9d" + "\xfc", + "\x1c\xaf\x54\x2b\xee\xa4\xbb\x14\x6d\x98\x93\xa4\xf0\x44\xf6\x93", 1, + 2048, "\x14\x3e\x28\x5a\x5c\x31\x0a\xc1\xe9\xa0\x18\x1a", 96 }, + { 256, 128, 289, + "\xdd\xba\xfd\xc9\x5b\x4c\x1c\x19\x2d\x3a\xc6\x8b\x03\xd5\x67\x4a\x36" + "\xef\x50\x77\x17\x49\xf4\xfc\xef\x6d\xf9\xac\xaf\x2e\xd0\xc2", + "\x87\xed\x96\x03\x9e\x80\xae\xe7\x91\xdc\x89\x10\xb8\xfe\xbc\x4f\x51" + "\xf2\x97\x79\x4a\x6a\x47\x38\x0b\x80\x1d\x45\x5e\x89\x37\x90\x67\xb6" + "\x9e\xe5\x6a\x52\x85\x5d\x5e\x35\xd4\x21\x12\x0b\xe0\x62\xf5\xa9\xf9" + "\xf4\x9a\x82\x9a\x9a\x19\x62\x63\x11\x63\x2e\x6a\x16\x67\x25\x79\x5b" + "\x66\x46\x57\x6d\x48\x12\x47\x0a\x9f\x20\xf0\x81\x6d\xf2\x32\x0a\x47" + "\xae\x77\x74\xc5\x3b\xda\xc5\xc9\xa1\x3a\xaa\x91\x5e\x4a\xb8\xac\x8c" + "\x9e\xb4\x5f\x06\x9f\x05\xb1\x11\x43\x10\xb7\xb2\xc2\xc8\x48\x04\x07" + "\x65\xd8\xea\xa9\xe9\x11\x22\xbb\x77\x01\x88\xeb\x14\xb1\x69\xc0\x0d" + "\x14\x11\x0d\x98\x3b\xd2\x98\xce\xbb\x26\x8c\x01\x64\xcb\xfd\x75\x70" + "\x4e\x83\xa0\x59\x7c\x0c\xcf\x5b\x83\xcb\x58\x05\x68\x98\x47\xb4\xec" + "\xa6\x45\x7e\xc1\x2e\xd7\x4d\xd7\xd2\x61\xdb\xb0\x6f\x11\x40\x14\xaf" + "\x14\xab\xc1\xd7\x6b\xd3\x0a\xb6\x9b\xe2\x80\x87\x29\x48\x96\x7d\xca" + "\xc0\xb5\x5a\x02\x08\x3a\xfc\xc4\x02\x0c\x94\x6e\x97\x09\xc3\x99\x47" + "\x70\x36\xef\xb4\xb8\xc5\x4f\xc0\x0a\x35\xfd\xcd\x28\x83\xcc\xd5\xdf" + "\x6f\x03\x3d\xc0\x13\x0a\xda\x4b\x9a\xd3\xae\xe6\x97\xa7\x00\x36\xb5" + "\xf3\x04\x83\xa0\xef\x8c\xfa\xe4\x9a\xb4\x91\xff\xee\x22\xf5\xad\xb0" + "\x24\xed\x3a\x18\xea", + "\xd8\xee\x23\xf7\x79\xc0\xb5\x4b\x7a\xb3\x83\xcb\x10\x7b\x00\x98", 1, + 2216, "\xcb\x86\x6b\x2f\x56\x88\x38\xb9\x3c\xec\xec\x38", 96 }, + { 256, 128, 290, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa6\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 291, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 292, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa5\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 293, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x53\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 294, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x27\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 295, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd1\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 296, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc3\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 297, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x95\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 298, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x81\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 299, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x14\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 300, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd1\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 301, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x22\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 302, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd2\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 303, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x21\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 304, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x99\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 305, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\xb6\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 306, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x86\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 307, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd7\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 308, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x07\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 309, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\x56\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 310, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\x89\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 311, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x4e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 312, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa8\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 313, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x05\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 314, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8e\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 315, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x53\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 316, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8d\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 317, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x50\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 318, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x0f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 319, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\xd2\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 320, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x17", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 321, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x10", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 322, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x14", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 323, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x13", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 324, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x56", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 325, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x51", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 326, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x19\x87\xa9\xa9\xc6\x8f\xe6\x85\x96", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 327, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\x36\xd6\x6e\x04\xb3\x52\xa9\x63\x91", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 328, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa6\xc2\xf2\x01\xd0\xdf\xea\x19\x86\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 329, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x94\xf6\x94\x23\x7f\x3f\x36\xd7\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 330, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x81\xd0\xdf\xea\x99\x87\xa9\xa9\xc6\x8f\xe6\x85\x16", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 331, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x14\x23\x7f\x3f\xb6\xd6\x6e\x04\xb3\x52\xa9\x63\x11", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 332, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa7\xc2\xf2\x01\xd0\xdf\xea\x99\x87\xa9\xa9\xc6\x8f\xe6\x85\x96", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 333, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x51\x94\xf6\x94\x23\x7f\x3f\xb6\xd6\x6e\x04\xb3\x52\xa9\x63\x91", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 334, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x58\x3d\x0d\xfe\x2f\x20\x15\xe6\x78\x56\x56\x39\x70\x19\x7a\xe9", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 335, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xae\x6b\x09\x6b\xdc\x80\xc0\xc9\x29\x91\xfb\x4c\xad\x56\x9c\xee", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 336, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 337, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 338, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 339, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 340, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x27\x42\x72\x81\x50\x5f\x6a\x99\x07\x29\x29\x46\x0f\x66\x05\x96", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 341, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd1\x14\x76\x14\xa3\xff\xbf\xb6\x56\xee\x84\x33\xd2\x29\xe3\x91", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 342, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xa6\xc3\xf3\x00\xd1\xde\xeb\x18\x86\xa8\xa8\xc7\x8e\xe7\x84\x17", 0, + 64, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 343, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x50\x95\xf7\x95\x22\x7e\x3e\x37\xd7\x6f\x05\xb2\x53\xa8\x62\x10", 0, + 128, "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b", 96 }, + { 256, 128, 344, + "\x79\xed\xd4\x40\xcc\x8a\x32\xb8\xb6\xd4\x63\x05\xc4\xce\x36\x34\x65" + "\x85\x0e\x28\xba\xc4\xb5\xf6\x40\x0f\xcc\xa9\x0e\x3a\xf0\xaf", + "\x81\xdb\x9c\xba\x83\xd9\x70\x4f\x30\xdb\x67\x32\xa0\x08\x21\xbd", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 128, "\x30\x57\x26\x28\x5d\x7b\x1f\x62\x2a\x37\x91\x07", 96 }, + { 256, 128, 345, + "\x79\xed\xd4\x40\xcc\x8a\x32\xb8\xb6\xd4\x63\x05\xc4\xce\x36\x34\x65" + "\x85\x0e\x28\xba\xc4\xb5\xf6\x40\x0f\xcc\xa9\x0e\x3a\xf0\xaf", + "\x37\x78\xdb\xc5\xc2\xe8\xc0\xe0\x62\x24\x9c\x79\x21\x1e\x64\x52", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 128, "\x30\x57\x26\x28\x5d\x7b\x1f\x62\x2a\x37\x91\x07", 96 }, + { 256, 128, 346, + "\x71\x08\x96\x35\xa9\xd7\xd3\x56\x6e\x8e\xe2\x22\x1c\xd1\x59\x60\xbd" + "\x83\xd1\x8c\x30\x34\x52\x36\x01\xed\x51\x48\x3c\x79\x28\x10", + "", + "\x16\x7b\x6a\x32\xc4\xac\x98\xfe\x73\xbf\xaf\xe4\x64\xba\x8c\xd2", 1, + 0, "\xe7\xe2\xe5\x65\xcb\x4c\x29\x91\x6c\xf5\x23\xbf\x32\xf9\xc8\xf8", + 128 }, + { 256, 128, 347, + "\x26\x2f\x1c\x95\x04\xff\x79\x67\xac\x50\x30\x15\xe3\x97\xff\x30\xab" + "\x0e\x88\xff\x74\xbc\xd0\xdf\x34\x41\x1f\x11\x0f\x60\xcb\xae", + "\x2c", + "\x96\x6a\x58\x80\x2c\x30\x57\x28\x12\x78\xa8\x75\x20\x86\x21\x90", 1, + 8, "\xdb\xc0\x1d\xa6\xe5\x11\x48\x18\x7e\x95\x38\x36\x25\xec\xb0\xf1", + 128 }, + { 256, 128, 348, + "\xb8\x0e\x88\x77\xe3\xf0\x2e\x5a\x1b\x92\xee\x64\xb8\x0a\x7a\xeb\x1b" + "\xe3\xb0\x18\x48\xaf\xbd\x8b\x10\xac\xf3\x46\x8f\xa7\x1c\x95", + "\x40\x12", + "\xa1\x57\x98\xc2\x75\x14\x59\xce\x9c\x1e\x7e\x35\x8e\xac\x0b\x06", 1, + 16, + "\x26\xba\x26\x1f\xd1\xe8\x95\x48\xd8\x19\x08\xe5\xa7\x4c\x11\x11", + 128 }, + { 256, 128, 349, + "\x3a\x4a\xff\xa4\x4c\x51\xed\xb8\x1d\xe1\x67\x68\xdc\x8d\x6d\x99\xbd" + "\x36\x42\x31\xc4\x45\xb0\xfa\xac\x6e\xa2\x87\x8d\x41\xf8\x25", + "\x33\x7c\x5b\xa3", + "\x86\x3f\x2e\x17\xd4\x14\x3a\x34\x88\x3e\x0f\x5c\xa9\xb0\xd6\xc5", 1, + 32, + "\x10\x80\x9d\xea\x9b\x6b\xf5\x9a\x16\xf7\x44\xe3\x69\x9e\x06\x82", + 128 }, + { 256, 128, 350, + "\xc1\x14\x77\x48\x32\xbb\xec\x17\x02\x74\x55\x74\x54\x0b\xb1\xd0\x04" + "\x48\x51\x94\xb8\xcb\x2c\x4c\x15\x9c\x26\xef\x21\x48\x18\xc6", + "\xdd\x19\xd0\x7e\x15\xcc\xd8", + "\x5f\x94\xe8\xf4\x41\xc7\x22\xb8\x57\x74\x60\xd8\x31\x33\x93\x0d", 1, + 56, + "\xb6\x44\xc2\x94\xe1\x50\x21\xd1\xbc\x95\x44\x78\x59\xce\xc0\xbf", + 128 }, + { 256, 128, 351, + "\x9e\x98\xbf\xfd\x38\x39\x22\x68\xa9\xea\x7c\x1b\x8a\xed\x18\x58\x66" + "\x66\xcc\x41\x9c\x03\x86\x09\x0f\xf8\x70\xc4\x59\x7e\x1a\x51", + "\x3f\x9c\x92\x3b\xf3\x83\x43\xa8", + "\xde\x5b\xa4\x4c\x9f\x61\x7a\x16\x32\x2f\x08\x7a\x49\x21\xad\x26", 1, + 64, + "\x9e\xc3\xb1\xf1\xcf\xa9\x7b\xa6\x0f\xae\xe6\xcf\x12\x02\x4f\x68", + 128 }, + { 256, 128, 352, + "\xe6\xd5\x91\x3e\x00\xcd\x10\xf2\xd1\x16\x8d\xc6\x6f\x45\xaf\x51\x1d" + "\x16\x11\xcc\x17\x31\xc0\xc5\x38\x9a\x99\xaf\x8a\xd7\xc4\x06", + "\xab\xb1\xbd\x62\x14\x0a\x6e\xeb\xff\x9a\x18\x62\xbc\x37\xd1", + "\xc4\x79\xcf\xd3\xc3\x41\x50\x94\x39\x76\xec\xdd\x53\x94\xd4\x92", 1, + 120, + "\x04\xf4\x49\xb5\xf0\x94\xc7\x21\xd9\xaa\x2f\x97\x02\xfe\x74\xb2", + 128 }, + { 256, 128, 353, + "\x84\x67\x5e\xd6\xeb\xa9\x25\x69\x0f\x1c\xc2\x1a\x69\xc8\x94\x3b\x82" + "\xe3\x44\x3a\x0b\x28\x23\x01\xbc\x7b\x6b\xf2\xba\xf6\xb2\x29", + "\x99\x22\x19\xd1\xbc\x60\xba\x0e\x1b\xa7\x2d\xb8\x57\xc9\xcf\x80", + "\x81\x3f\x5b\xe9\x5d\x2c\x66\x41\x2d\xf7\x12\x2f\xfc\xc9\x81\xa7", 1, + 128, + "\x02\x6e\xa8\x7e\x04\xe0\x4e\xad\xaf\xba\x49\xca\x4e\xac\x61\x0e", + 128 }, + { 256, 128, 354, + "\xaf\xf9\x02\xdd\xaf\x53\xdb\xb0\x7e\xc0\xd0\x61\xbb\x26\x66\xb7\x84" + "\xdb\xf8\x38\x66\xa4\x0f\x09\x8d\x53\xbc\x6b\xa3\x21\xc2\x31", + "\x60\x75\x42\xe0\xf7\x36\xfd\x4c\x8a\xd3\x49\xcf\xb1\x73\x07\xb1" + "\xc6", + "\xd9\x1a\x6f\xc3\x11\x0c\x4f\x82\xad\x5f\x31\x78\xe0\x6e\x72\x4d", 1, + 136, + "\x8c\xb2\x53\xaf\x95\x90\x90\x83\xa5\x73\x6c\x71\x00\xc1\xf2\x97", + 128 }, + { 256, 128, 355, + "\x49\x07\x7e\xe5\x6e\x7a\xad\x04\xd9\x02\x75\xb6\x86\xd9\xcc\x3a\x99" + "\xaf\xbb\xbf\xad\x5c\x0a\xf1\xd1\x18\xdb\xd9\xbb\x6b\x04\xbf", + "\x8a\xfc\xba\xc9\x7b\xa9\xe9\x9c\xe0\x12\x6b\xa0\x18\x69\x93\x1d\x04" + "\x1c\xa8\x1f\x49\xd8\x09\x3a", + "\xd9\x4e\x5c\x57\xfc\x36\x42\x22\xad\x35\xbe\x5b\x9e\x3d\x9a\x87", 1, + 192, + "\xa2\x0c\x60\xc2\x60\xf1\xae\xcb\xb2\xd2\x2e\xc8\x82\x9b\xf9\xf6", + 128 }, + { 256, 128, 356, + "\x7e\x4c\x3e\x47\xb9\x7f\xa6\x36\x2f\xe7\xf6\x60\xd2\x91\x67\x2e\x66" + "\xb5\x53\x58\x9e\x19\x40\x4b\x5c\x90\xa2\xff\x43\x84\x69\x14", + "\x0e\xf9\xd4\xbb\xb9\xed\xd5\x2c\x6e\x4d\x2e\x1d\xf0\x84\xc3\xda\x04" + "\x11\xfb\xd9\x60\xf9\x79\x3f\x87\x5b\xd8\xc1\xbc\x75\x1c\x7d\x78\xc0" + "\x07\x18\xce\x34\x4e\x11\xa6\xfb\x14\x87\x63\x4e\xdb\xfe\x52\xaf\x80" + "\x4c\x65\xa4\x6e\x91\x8b\xdc\x6a\xee\x98\xbe\xe3\x1f\x0b\x98\x7a\xde" + "\x33\xa0\x97\x6e\x1d\xb2\x66\xb2\xc1\x33\x45\x3e\x02\x7b\xd8\x65\x19" + "\x98\x17\xdf\x26\x89\xac\x09\xa7\x59\xd3\xbe\x19\x5c\xd4\x56\x52\x8a" + "\xbd\xef\x2f\x69\x38\xf8\x71\xe2\x55\xd0\xd0\x4e\x13\x0e\x19\x0b\x93" + "\x45\x21\x98\xc8\x5b\x0e\x0b\x9f\xc0\xce", + "\xdd\x37\xb1\xcc\x99\xa4\x81\x45\x41\x63\x36\xb6\x1d\x3b\x1f\x39", 1, + 1032, + "\x28\x00\xc4\xab\x25\xd2\xb5\x62\xce\x76\xa3\x5c\x03\x54\x2f\x66", + 128 }, + { 256, 128, 357, + "\x3c\xfa\x7a\xe7\x45\x0c\xa9\xd5\x14\x50\xd4\x81\xf6\xa8\xd4\x2a\x42" + "\x54\x07\x0d\xd8\x8d\xf3\x4e\x9d\x43\x02\x8f\xad\x1d\xad\x90", + "\x03\x92\x1a\xc5\x7a\x10\x12\xfa\x1f\x5b\x99\xfc\x96\xf1\x81\xd7\xc7" + "\x17\x29\x03\xe8\x0b\x52\xe7\xb9\x68\xa2\xdc\x96\x05\xec\x39\x07\x83" + "\xcf\xb8\xbe\x86\x7e\x6e\xab\xfc\xa3\x9c\xef\x4d\x1c\x53\xd4\x3e\xe6" + "\x1b\x18\x96\x35\xd3\x9a\x77\x9e\x00\xd4\xdc\x54\x28\x11\xc1\x1a\x3b" + "\x88\x03\xe5\x72\x99\x31\x4e\xf0\xb3\x11\x99\x3f\xa1\x57\x0e\xc2\x8a" + "\x9a\x83\xfe\x2c\xc9\x59\xc6\xd3\x0e\x1d\xaa\xa7\x1c\x81\x73\x54\x84" + "\xc6\x1a\x05\xf9\x45\x1e\x49\x41\x1b\xc6\x33\xb5\xe7\xe6\x38\xba\xe7" + "\xe3\xc9\x07\x46\xe8\xe1\xef\x79\x6d\x34\x9e\x5c\x90\x00\xc8\x7e\xfa" + "\xbf\x33\x73\xa9\xb5\x2c\xea\x26\x99\x5a\xf3\xcd\x7c\xf9\x84\xb7\x9a" + "\xcb\x79\x37\xf4\x1c\xd4\xaf\xe8\x67\xc3\x77\x81\xdb\xa2\x75\xc4\x17" + "\x6e\xe1\xa3\xfe\x7d\xe1\x1b\xb8\x6e\x28\x72\x46\x5e\xe8\xb4\x94\xca" + "\x64\x98\xc2\x34\x78\xe3\x1b\x45\x1f\x38\x28\x88\x2b\xcc\xf0\x4d\x83" + "\x29\xfb\x7d\x35\xa4\x90\x39\xc4\x54\xb6\x8f\x8f\xd2\x7c\xeb\xee\x84" + "\xcc\xd7\x19\xc5\x74\x1f\x57\xe6\xa9\x35\xfe\x3e\x95\xef\x47\xfd\xa7" + "\xb0\x90\x72\x8f\x81\x1b\x0b\x06\x3e\x50\x87\x61\xd4\x9f\x26\x3f\xfc" + "\xf2", + "\x3b\xf9\xef\xd9\x51\x02\xa4\x19\xd0\x6d\x75\xd3\xaf\x5d\xa6\x3c", 1, + 2048, + "\x8b\x71\x1c\x1b\x5b\x50\x14\xf3\x1c\x3f\xb5\x18\x3e\x09\x6f\xcd", + 128 }, + { 256, 128, 358, + "\x9c\xe7\xb1\xd9\x3f\x41\x1a\x38\xd4\x13\xf6\x33\xf9\x0c\xd4\x49\x34" + "\xa0\x76\xd6\x46\x34\xb4\x7b\x9f\x1b\xcb\xed\xc4\x07\xe3\xb5", + "\xe8\x4a\xa2\x87\x13\xdc\x0f\x02\x98\x26\x99\x8b\x18\xcc\xe8\x95\xcc" + "\xfe\xf5\xde\x60\x1c\xa1\xbc\x5a\x6f\x4b\x86\x1a\xe3\x85\x03\x1f\x89" + "\xdd\x32\x52\xc0\xaf\xcd\xfd\x5d\xce\x87\x03\xef\x12\x52\xbc\x9c\xf5" + "\x33\x85\x7e\x16\xe2\x80\xbd\x8f\x0c\x1c\xcf\x1d\xe2\xcf\x88\xe5\x3e" + "\x0f\x27\x54\xca\xd2\x61\xf1\x52\xb6\x44\xc6\xdb\x09\x87\xc8\xb1\x05" + "\x9c\x1e\xf3\x4a\xaf\x57\x30\xec\xea\x80\xbe\xec\x84\x26\x2e\xbd\xf9" + "\x72\x62\x19\x3a\x4e\x04\x30\x5d\x82\x13\x2e\x80\xf3\x30\x32\xd6\xac" + "\x43\x88\x6f\x39\xd7\xc6\x86\x50\x95\x52\xa8\xa7\x2e\xfa\x65\x81\x1a" + "\xee\xfb\x82\x06\xfd\x02\xbd\x8c\xd9\xfa\x90\xf7\x36\xa2\x9c\xc1\xe7" + "\x8f\xa7\xb3\x27\x70\x7e\xb4\x75\xd7\x0c\xa9\x2a\x4e\x39\x66\x7b\xed" + "\x7f\xed\x3c\xeb\x60\x85\xae\xf1\x30\xa2\xc0\xb0\xf9\x82\xeb\x01\xfd" + "\xcf\x9c\xdd\xbf\x33\xa6\x79\x45\x5d\x16\x90\x5a\x0f\xd6\x4d\x52\x71" + "\x06\x59\x0e\xb4\x15\x1a\x62\xf3\xc6\x5f\xb9\x1f\x5f\xe0\x02\x62\x46" + "\xa5\x18\x2f\xca\x77\x8e\x43\x04\x3f\x66\x18\xe5\x73\x7e\x8f\xe8\xd8" + "\x82\xd2\xa1\x16\x2c\x5f\x65\xf8\x20\xce\x99\xad\x44\x32\x7d\xcf\xcb" + "\x54\xbb\xd5\x89\xfa\xa3\x29\xad\x29\x57\x00\x72\x4b\xf7\x45\x5d\x97" + "\xfe\xbf\xb2\x5d\xb9", + "\x97\x0b\xf2\x36\xc7\x57\x6c\x97\x7f\xdc\xf7\x2c\x5d\xb6\x27\x76", 1, + 2216, + "\x6e\xee\x90\x6c\x6f\xe2\x6b\xcb\x3e\x63\x0e\x49\xb7\x51\x76\x33", + 128 }, + { 256, 128, 359, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4d\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 360, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbb\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 361, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4e\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 362, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xb8\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 363, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xcc\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 364, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x3a\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 365, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe3\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 366, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb5\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 367, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x19\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 368, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x8c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 369, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x74\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 370, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x87\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 371, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x77\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 372, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x84\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 373, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x86\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 374, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\xa9\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 375, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x51\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 376, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x00\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 377, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\xd0\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 378, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x81\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 379, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xe6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 380, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x21\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 381, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x40\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 382, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xed\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 383, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa7\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 384, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7a\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 385, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa4\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 386, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x79\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 387, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\x26\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 388, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\xfb\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 389, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x62", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 390, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x65", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 391, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x61", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 392, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x66", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 393, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\x23", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 394, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\x24", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 395, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x06\x50\xc6\x41\xaa\xa6\x70\x63\xe3", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 396, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\x29\x01\x01\xec\xdf\x7b\x3f\x85\xe4", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 397, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4d\xe2\x08\x99\x75\xdb\xbd\x06\x51\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 398, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbb\xb4\x0c\x0c\x86\x7b\x68\x29\x00\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 399, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x19\x75\xdb\xbd\x86\x50\xc6\x41\xaa\xa6\x70\x63\x63", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 400, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x8c\x86\x7b\x68\xa9\x01\x01\xec\xdf\x7b\x3f\x85\x64", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 401, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4c\xe2\x08\x99\x75\xdb\xbd\x86\x50\xc6\x41\xaa\xa6\x70\x63\xe3", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 402, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xba\xb4\x0c\x0c\x86\x7b\x68\xa9\x01\x01\xec\xdf\x7b\x3f\x85\xe4", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 403, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xb3\x1d\xf7\x66\x8a\x24\x42\xf9\xaf\x39\xbe\x55\x59\x8f\x9c\x9c", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 404, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x45\x4b\xf3\xf3\x79\x84\x97\xd6\xfe\xfe\x13\x20\x84\xc0\x7a\x9b", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 405, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 406, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 407, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 408, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 409, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\xcc\x62\x88\x19\xf5\x5b\x3d\x86\xd0\x46\xc1\x2a\x26\xf0\xe3\xe3", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 410, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x3a\x34\x8c\x8c\x06\xfb\xe8\xa9\x81\x81\x6c\x5f\xfb\xbf\x05\xe4", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 411, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07", + "\x4d\xe3\x09\x98\x74\xda\xbc\x07\x51\xc7\x40\xab\xa7\x71\x62\x62", 0, + 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 412, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbb\xb5\x0d\x0d\x87\x7a\x69\x28\x00\x00\xed\xde\x7a\x3e\x84\x65", 0, + 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + 128 }, + { 256, 128, 413, + "\xaf\xb8\x06\x35\x37\x60\xe5\x9a\x08\xbd\x78\x70\xed\xeb\xce\xd4\xb8" + "\x72\x3e\xe3\x1d\x7d\x4f\x96\x1e\x4e\xf2\x18\x6a\x7d\x3a\xbf", + "\x9c\x33\xf8\x42\xdb\xa5\x15\x3e\x8d\x65\xb9\x17\x77\x11\x2c\x69", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 1, + 128, + "\x3b\x44\x52\xec\xb8\xa0\xb1\x77\x2c\x0e\x50\xc1\xf5\x94\x09\xbf", + 128 }, + { 256, 128, 414, + "\xaf\xb8\x06\x35\x37\x60\xe5\x9a\x08\xbd\x78\x70\xed\xeb\xce\xd4\xb8" + "\x72\x3e\xe3\x1d\x7d\x4f\x96\x1e\x4e\xf2\x18\x6a\x7d\x3a\xbf", + "\xba\x32\xdf\xc0\x83\x55\xf8\xde\x70\xb6\xab\xf7\x32\xf2\x88\xd0", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 1, + 128, + "\x3b\x44\x52\xec\xb8\xa0\xb1\x77\x2c\x0e\x50\xc1\xf5\x94\x09\xbf", + 128 }, + { 0, 128, 415, "", "\x62\xe8\xe0\xda\x60\x00\xc6\x42", "", 0, 64, + "\xa2\x3c\xe8\xec\xb4\x4b\xab\x25\x46\xf3\x94\xc7", 96 }, + { 0, 128, 416, "", + "\x64\x46\xb9\xa0\x2b\x16\x76\xbe\x12\x46\x5f\xad\x38\x8e\x39\xfe", + "", 0, 128, "\xc9\xae\x3d\xc4\x39\xf2\xcd\x95\x4f\xff\xd0\xf8", 96 }, + { 0, 128, 417, "", "\x43\xf6\x3d\x68\x0d\x70\x12\xfe", "", 0, 64, + "\xc8\x46\xfa\x33\x6e\x6c\xe3\xf2\x04\x2f\xc9\x1b\x4a\x27\xb0\xfb", + 128 }, + { 0, 128, 418, "", + "\x8a\x27\x59\xaf\xa8\xa7\x77\xc4\x15\x27\xa6\x0b\xc5\x45\x5e\xfd", + "", 0, 128, + "\xf3\x7b\x6d\x4c\x49\xbd\xda\xc0\xbc\xe7\xf7\xdf\xb2\x76\x06\x9f", + 128 }, + { 8, 128, 419, "\x64", "\x22\x8b\xd5\xce\x6a\x59\x3d\xcd", "", 0, 64, + "\xf9\xd8\xb3\xd1\xaa\x47\x75\x5c\x9f\x31\x2a\x00", 96 }, + { 8, 128, 420, "\x23", + "\x8f\x5e\x20\x0b\x5c\x89\xad\x35\x8e\xed\xa4\xd0\xa3\xd4\x89\x17", + "", 0, 128, "\x1a\x72\x79\xd7\x41\x9a\x6a\xaa\x02\x64\x20\x4d", 96 }, + { 8, 128, 421, "\x96", "\xc2\x58\x27\xed\xb2\xf2\xee\xeb", "", 0, 64, + "\xf7\xf4\x52\xbf\xc3\x1b\xe5\x01\x2f\xbc\xdc\x79\xbd\xb5\x77\x82", + 128 }, + { 8, 128, 422, "\x71", + "\x3e\xb1\x29\x97\x19\xb9\x14\x3e\xed\xf2\x38\xe4\xc2\xde\x30\xf4", + "", 0, 128, + "\x97\xf0\xfc\x55\x34\x07\xfb\x52\xfa\xc2\x5a\xb5\xc6\x46\xfc\xf8", + 128 }, + { 64, 128, 423, "\x20\x93\xeb\x4f\x07\x02\x15\x15", + "\x2b\x1c\x35\xc5\x9f\x83\x59\x10", "", 0, 64, + "\x7c\xaa\x60\x94\xbc\xc0\x3e\x30\xdc\xb6\x63\x8a", 96 }, + { 64, 128, 424, "\x1d\x24\x5f\xe8\xb1\x2c\x12\x04", + "\x6c\xdc\x1a\xd1\x1e\x26\x57\x66\x20\x2b\xed\x63\x03\xf4\x5c\xc3", + "", 0, 128, "\xb0\x21\xda\x22\x34\xab\xde\x1c\x6e\x0b\xd4\x81", 96 }, + { 64, 128, 425, "\x8c\x4d\xcd\x56\xf3\x28\x0b\x42", + "\x65\xba\x8a\xb5\x45\xd1\xf7\xfb", "", 0, 64, + "\x62\x9c\x59\x6e\x66\x31\xcd\x62\x3d\xf9\x8f\x72\xcc\xf0\x94\xf1", + 128 }, + { 64, 128, 426, "\x5c\xda\xe7\x09\xce\x51\x7e\x61", + "\x9c\xa3\xd0\x42\xb9\xd9\x1c\x6a\xf0\xd2\x47\xcf\x28\x9f\x20\xd8", + "", 0, 128, + "\x1a\x96\x86\x28\xa0\x67\xbc\x3a\x1d\xc9\x5b\x06\x4d\x78\xbc\x3d", + 128 }, + { 160, 128, 427, + "\x1e\x09\x98\x1a\xd6\x52\x0d\xf2\x78\x96\x2c\x22\x5d\x14\x65\x2a\x14" + "\x4b\x04\x8b", + "\xa3\x4e\xc8\x9b\x01\x7a\x10\xd3", "", 0, 64, + "\xb8\xe4\xbd\xcd\x90\x1f\x5f\x56\x24\x7f\x27\xe4", 96 }, + { 160, 128, 428, + "\x09\x7f\x44\x2a\xcb\xba\xc6\xe3\xa8\x58\xc9\xbd\xf7\x6a\x36\xf0\x6a" + "\x10\x37\x06", + "\x6e\x9e\xb6\xa5\x8f\x91\xf9\x23\x1a\x94\x94\x3d\x78\x5c\x67\xa3", + "", 0, 128, "\x09\x8d\x00\xea\x1d\x92\x96\xd9\x3a\x03\x1d\x63", 96 }, + { 160, 128, 429, + "\xf1\xc1\x0c\x5a\x1c\x9a\x0a\x4a\x3c\x07\x43\x6f\xa6\xcf\xa9\xd4\xd8" + "\x78\x85\xb2", + "\x38\x05\x7f\x87\x9c\xa7\xcc\xe9", "", 0, 64, + "\x0a\xb9\xbb\xf8\xc2\xc3\xbf\xbd\xac\xd9\x56\xd8\x7b\xcb\xc5\x9c", + 128 }, + { 160, 128, 430, + "\x8e\x8d\x6c\x52\x26\xaa\x3c\x39\x83\xcb\x6a\x8e\x21\x1a\x5d\xca\x49" + "\xcb\xb3\x70", + "\x7e\xb1\xcf\xab\x0d\x9d\xea\x4d\x04\x1f\xc5\x90\x58\x55\xa3\xe4", + "", 0, 128, + "\x25\x49\xf2\x0a\x79\xac\x25\x67\x21\x47\x20\x16\x3b\x6c\x68\xef", + 128 }, + { 320, 128, 431, + "\x01\xe4\x6c\xc7\x91\x6f\xec\x9b\x53\x29\xb4\x22\xfc\xe3\x9b\x5e\xd1" + "\x4d\x21\x50\x7d\x75\x66\xf4\x98\x9d\x08\x7d\x5b\x00\xe7\x5a\x9c\xfa" + "\x03\x5f\xde\x39\x8b\x1c", + "\x76\xb0\xa2\x2e\x13\xf7\x3e\x7e", "", 0, 64, + "\x0d\x3a\xbf\xc7\x18\x44\x31\x47\x35\x52\x48\x9a", 96 }, + { 320, 128, 432, + "\x0d\x4d\x91\x11\x88\x54\x34\x2e\x7c\x26\x18\x66\xe9\xd4\x9a\x22\xfd" + "\xee\x0f\x28\xc5\xe5\xba\xa2\xcd\x74\xc9\xfd\x67\xbe\x9d\x3f\x14\xfa" + "\xaf\x0c\x60\x7c\xea\x94", + "\x91\x23\x96\x98\xee\xd6\xa6\x67\x12\x3c\xd0\xd9\xae\x85\xba\xf8", + "", 0, 128, "\xd3\x61\xf8\xeb\xfd\x7f\xb3\x43\x76\x3f\x5e\xdd", 96 }, + { 320, 128, 433, + "\x72\xdf\x7d\x65\xab\xa3\xb9\x7d\x2b\x31\xec\x7f\x39\x6c\xf2\x98\x3f" + "\x7b\x27\x97\xe3\x66\xb1\x3c\x5b\x0e\xf4\x46\x7b\xa7\xb7\xb8\x4a\xc7" + "\xc1\x82\x64\x43\x75\xee", + "\x9a\xdf\x89\x89\x56\x5b\xa6\x26", "", 0, 64, + "\x71\x08\x0a\xae\xef\x17\x2a\x0e\x3b\xf7\x32\xe5\xc4\x66\x60\x61", + 128 }, + { 320, 128, 434, + "\x85\xc3\x62\xd4\xc0\x53\xef\xcb\x5a\x72\x89\xad\x80\xfa\xf2\xa2\x78" + "\xf5\x1b\x83\x6b\x2a\x7d\x16\x41\x0b\x9f\xbc\xed\xe3\xea\xd6\x9a\x9b" + "\x1d\x3c\x96\x65\xd8\x77", + "\x06\x4d\x36\x5c\xd0\x6b\xc6\xc3\x35\x23\x62\x32\xe7\x2e\xae\x49", + "", 0, 128, + "\x22\x0f\x44\xcb\xe8\x3b\x7f\x75\x70\x24\x14\x29\x3b\x4b\x97\xed", + 128 }, + { 128, 128, 435, + "\x1a\xbc\x93\x3c\x4f\xe2\x3a\x4b\x49\x60\x5c\x3a\x9d\x30\x99\x7d", + "\x76\xc2\xb2\xb1\xd6\x92\xbc\xed\xe9\x11\x52\xb3\x45\xf6\x88\x15", + "\x36\x79\x73\xf1\x93\xfc\xc3\x9e\x23\x03\xca\x01\x93\x9b\x4e\x77", 0, + 128, "", 0 }, + { 192, 128, 436, + "\x1e\x39\xd9\x1e\xf4\xd0\x5e\xd9\xbd\x5d\x8d\x88\x6f\xbb\x93\x7e\x35" + "\xa5\x44\x73\xdf\x5d\x0c\x25", + "\xec\xf0\x19\x90\x48\x0f\xdc\xe0\xd2\x8c\x15\x53\xb8\x0e\xe1\x28", + "\x73\x2c\x22\x4d\x31\xb6\x1b\xe2\xe4\xdb\x36\x5d\x5a\x53\x3c\x1c", 0, + 128, "", 0 }, + { 256, 128, 437, + "\xc2\x92\xbb\xa4\x54\xcc\x13\xfc\x32\x4e\x19\xff\xf7\xa4\x63\x53\x4b" + "\xa9\x3a\xcb\x98\xe4\xc1\xb5\x1c\x21\xd5\xb7\xef\x53\x9f\x3a", + "\xc1\x60\xf1\x7c\x5a\xa9\x2c\xa6\x8a\x83\x7d\xc7\x51\xc0\x6f\x10", + "\x5a\xb3\x90\xc6\x7b\xde\x51\x0c\xf2\x7f\x4c\x77\x42\x5b\xff\x5a", 0, + 128, "", 0 }, + { 128, 128, 438, + "\x4f\x5d\xcb\xc6\xa2\xd7\x74\x40\xba\x17\x98\xc9\xb7\xf3\x02\xeb", + "\xd1\x0d\x2a\x06\xb3\xaf\x2e\xc1\x3b\xff\x20\xaa\xb2\x93\x97\x3b", + "\x6e\x9d\xe0\xb3\x12\x89\x25\x40\x43\x6e\x93\x59\x09\xa3\xfb\x48", 1, + 128, + "\xf1\xac\xc9\x29\x9a\xfe\x3c\x45\x48\x95\xfe\x6a\x42\x77\x11\xc0\x3f" + "\x15\xd8\xaa", + 160 }, + { 128, 128, 439, + "\x74\x00\xc7\x2f\xb3\x4b\x80\xd3\x45\xad\x4d\x17\x95\x7e\x7f\x96", + "\x6e\x0a\x7d\x3d\x9e\xda\x7d\x71\xdc\x47\x69\x81\x85\x24\x04\xdb", + "\x45\x27\x54\xf4\x89\x7f\x23\x58\xa0\xa4\xab\x02\x47\x67\xf3\x9c", 1, + 128, + "\xf8\xcb\x7e\x3d\xb0\x96\x2c\x17\xf9\x27\xde\x88\xfc\x90\xe4\xe6\xf8" + "\x06\x4f\xf0\xaf\x0d\x9d\xa3\x9d\x59\xf1\x92\xdd\x56\x13\x7e", + 256 }, + { 128, 128, 440, + "\xa9\xff\xf5\x5a\x0d\x06\x75\x32\xb2\x23\x36\xee\x01\xd2\xa7\xfc", + "\x7d\x77\xd4\x88\x4a\x13\x21\xd6\x58\x8d\xd1\xd4\x8d\x6e\x5b\xeb", + "\x68\xc9\xb2\x0e\xcf\x36\xd5\x66\xa4\x63\x07\x03\x57\x42\x39\x23", 1, + 128, + "\xef\x1d\xc5\xdc\x06\xc3\x21\x1a\x36\x66\xb6\xf1\xbc\xac\x32\xf0\x0d" + "\x00\x0f\xa2\xce\x9f\x39\xfe\xb2\x94\xe3\x1f\xb6\xc9\xd9\x83\xfa\x9e" + "\x4e\x2f\x24\x42\xf1\x79\xfa\x65\x28\xc4\x02\x64\x09\xd7\xd4\x62\x9a" + "\xdf\x38\x14\x4d\xc0\x60\x88\x7e\xf7\x2b\x5a\xd6\x82", + 512 }, + { 128, 128, 441, + "\x08\xfc\x93\x83\xee\x79\x86\xa1\xf0\xf3\x8d\xdd\x4e\x5b\xde\x90", + "\x72\xea\x2e\x0d\x0c\x5d\x7d\xc7\x63\x4e\xe0\x93\x59\xe9\xd9\x81", + "\xfb\x89\x5c\x06\xb6\xa7\xa5\x1e\xbc\x9e\xb1\x29\xd0\xeb\x9e\x1a", 1, + 128, + "\xee\xf7\x29\xba\xec\xd8\x92\x78\xda\x7c\xb9\x39\xb3\x45\xc2\x87\x2e" + "\xfc\x44\x3c\x22\xad\xdf\xb6\x8e\x42\x91\x63\x65\x6c\x56\xb3\xa3\x92" + "\x00\xd9\xe0\x78\x71\x01\x33\x00\xbb\xa9\xf9\xb0\xad\xa6\xd3\x91\x64" + "\x77\x16\x1d\xe0\x8a\xdc\xbc\x4f\xdc\x47\x67\x96\x15\x90\x1e\xfa\xf6" + "\xc2\xfd\xac\x7e\x02\x03\xca\x35\x2f\xc8\x44\x07\xb6\xa3\x76\x21\xcc" + "\x0d\x8a\x4c\x75\x26\xf5\x0f\xb2\x71\x9f\xad\x08\x56\x7e\xae\xa5\x6c" + "\x59\x88\x05\xa3\x36\xa0\xcc\x3d\x43\x09\xa1\xe2\xa2\xb8\x6d\x8d\x39" + "\x39\xce\x2c\xf3\x13\x8f\xff\xa2\x01", + 1024 }, + { 192, 128, 442, + "\x8e\xe3\x34\x45\x5b\xf9\x6b\x75\x1a\x6e\xbb\x9a\x97\x82\xfc\x0e\x46" + "\x9f\x5c\x69\xc2\x42\xad\x23", + "\x4b\xe6\x12\xa3\x5b\x8c\x98\x50\x2d\xad\x7c\x2d\x40\xc5\x1f\x34", + "\x59\xd3\x16\x61\xd8\xd4\x96\xa9\xea\x9b\xd3\x41\x2a\xe3\x3d\xa6", 1, + 128, + "\x15\xfc\xd4\x0d\x5d\x37\x43\x6e\xf3\x71\x4e\x25\x20\x6e\xfc\x23\xe4" + "\x63\x47\xce", + 160 }, + { 192, 128, 443, + "\x8a\x95\x0a\x59\x1d\xca\x1a\x24\x61\x50\x0c\x1b\xe5\x4a\x9f\x35\x40" + "\xdd\x79\x30\x0c\xd3\x21\x0b", + "\x59\x13\xaf\x7d\xe6\xbd\x1a\x2a\x64\xc6\x07\x37\x97\x8b\xfb\xaa", + "\x40\x76\x03\xda\x8b\x15\xea\x69\xc9\x51\xb2\xfb\x6d\xbf\xcb\x03", 1, + 128, + "\xa5\xa6\xa2\xbc\xee\x80\x2e\x8d\x49\xa7\x2f\x53\x6a\x49\x2c\x1a\x0a" + "\x9b\x8e\xf5\xd7\x9f\xe8\x11\x67\x24\x3f\xe0\x73\x04\xdd\xa4", + 256 }, + { 192, 128, 444, + "\x6a\x7c\x14\xce\x86\x05\xc9\x97\x8a\x0a\x7d\x9e\xba\x80\xa3\xb3\x72" + "\x39\xdb\x12\x9d\x2e\x0a\xbf", + "\xce\xfd\x73\xac\xfa\x57\xf8\xd8\x7c\xa4\x7d\xc7\x38\xa3\x0c\xdb", + "\x1a\x33\x5a\x24\x69\xd1\x54\xb0\x43\xd8\xa3\x90\x40\xd0\x71\x23", 1, + 128, + "\x04\x21\x59\xc9\xaa\xbe\xc0\x64\x41\xca\x01\xea\xc9\x27\x2d\xa4\xec" + "\x40\xb3\x9a\xa9\x6a\x53\xf0\xad\xaf\xad\x5a\x6f\xff\x86\x3a\x12\x6d" + "\xb9\x15\x0a\xd7\xa0\x26\x26\x72\xa4\xa5\x50\xfb\xcb\x10\x29\x95\xd4" + "\x86\x4e\x12\x4c\x6a\x9b\xd3\x3d\xaa\x37\xb1\xb1\x17", + 512 }, + { 192, 128, 445, + "\x24\x37\x2b\xbb\x35\xd3\xe0\xda\xfc\xf4\x45\x5a\x47\xa0\x41\x2b\x15" + "\x24\xcf\x8d\xbf\xdd\xa5\xf7", + "\x12\x1b\xf5\xdb\xe3\x61\x0a\xd5\xa0\x68\xfb\xa0\xaa\x7f\x60\x5a", + "\x5b\x46\xdd\xbf\x3f\x1e\xc8\xb1\x63\xdf\x0c\x72\xff\x47\xa8\xdc", 1, + 128, + "\xae\xd0\xf6\x78\x72\x17\xaa\x66\xb3\x8a\xac\x94\x8b\x57\x4d\x99\xf1" + "\x99\x2b\x62\x1e\x6e\x6b\xa0\x1f\x9b\x3e\xa1\xe3\x1c\x90\x16\xb9\x75" + "\x54\xca\x9a\xfd\xe1\x1d\xb8\x31\x02\xed\xbb\x76\xcd\x67\xeb\x13\x96" + "\x3a\x97\x6e\x10\x92\x99\xf7\x46\xc2\x8e\x91\x78\x32\x51\x75\xfc\xd1" + "\xac\x76\x69\x77\xf2\x97\x97\xff\x5f\xc0\x6a\x93\x4a\xed\x8e\x7e\x61" + "\xbb\x72\x3f\x89\x97\x17\x1d\x73\xdc\x61\x8e\xa6\x99\x80\x68\x97\xa3" + "\xf8\x96\x0e\x78\xe6\xfc\x14\x93\x23\x79\x0c\xf1\x05\x6a\xd3\xf1\x3f" + "\xb7\xd8\x07\x9a\xfb\x33\xf7\xe8\x36", + 1024 }, + { 256, 128, 446, + "\xa6\xe7\xfd\xbc\xd0\x72\x5e\x1d\x5d\x0f\x9d\xae\x2a\xa4\x4c\x0e\x8a" + "\x54\x2f\x4b\xf6\x25\x08\xf4\xd4\x27\x75\x08\x70\xb9\xb0\x88", + "\x91\x8d\x49\x1e\x78\x50\x2d\x60\x56\x22\x58\x25\xe7\xf7\x9f\x71", + "\xd2\x6d\x71\xab\x70\x69\x85\x52\x7a\x53\x56\xb9\xde\x56\x29\x77", 1, + 128, + "\xb4\x27\x89\x8e\xd3\xec\x7b\xf5\x2b\x62\x74\x06\x3d\xa8\x74\xc2\x5f" + "\x7d\xb0\x60", + 160 }, + { 256, 128, 447, + "\xb9\x42\x4e\xff\xc7\x76\x51\x0c\x1c\x49\x5a\x0a\x93\xd3\x7f\x3b\x1c" + "\x23\xb5\x44\x1d\xd0\x23\x51\xf2\x8c\x13\x75\xd8\x49\xe3\x43", + "\xea\x62\x21\xae\xc1\xe2\xb4\x35\x80\x65\xbf\xa1\xd8\xce\x1e\x9a", + "\xc3\x55\x60\x3c\x17\x89\x2b\x89\xd3\x0b\xed\x73\x91\x50\xad\xf1", 1, + 128, + "\xf6\xfd\xd3\x87\xe2\xec\x76\x89\xd7\x24\x18\xcb\xba\xa1\x40\x2f\x39" + "\xd9\x19\x2f\x01\x73\x46\x75\xc0\x46\x59\x70\xb6\x61\xbd\x69", + 256 }, + { 256, 128, 448, + "\x9b\xff\x9b\x26\xdc\xd6\xe6\x20\x72\x64\xea\x43\x3b\xfc\xb6\x13\xcf" + "\xa0\x02\x70\x42\xac\x7e\xc1\xd9\x86\xbb\xfc\xf4\x65\x38\xa2", + "\xc7\x7f\x51\xe5\x76\x66\xed\x25\xb4\x68\x4a\xa2\x0c\xef\x34\x91", + "\x05\xdc\x92\xc1\xe2\xc8\x0e\x0c\x7c\x9f\x1e\x82\xa8\x7d\x5b\x18", 1, + 128, + "\xa3\xc3\x85\xd0\x96\x88\x34\x4b\x76\x34\x8d\x17\xb0\x6a\xfc\x76\x4d" + "\x27\xcd\xec\xd2\x79\xcb\x6f\xab\x5e\xd2\xbc\x3c\xad\x1d\xe6\x51\xc1" + "\x43\x4b\xf3\x52\xe7\x02\xa4\x0f\x9b\x03\x38\x47\x74\x19\x93\x80\xcb" + "\x03\x79\xb6\x56\x0d\x77\x3b\xef\xd7\x8b\x11\x60\x92", + 512 }, + { 256, 128, 449, + "\x27\xef\x59\x44\x06\xaf\xad\xb5\x72\x6d\x85\xa6\xba\x85\x49\x6e\xd7" + "\xc2\xbc\xb6\xa1\xb1\x23\x65\x56\x55\xd0\xf6\xfe\x0e\x60\xb5", + "\x97\x00\x3e\x85\x74\xcd\xaa\x71\x8d\x50\x9e\x53\x32\xf2\xa5\x18", + "\x46\x5d\x99\x83\xcd\x5f\x73\x2b\x47\x5a\x9f\x72\x79\x25\x44\x33", 1, + 128, + "\xe0\x74\xf3\x26\xd3\x42\x79\x9d\xec\xe4\xe1\xff\xed\xf5\x95\x4c\xd8" + "\x0f\x4d\x45\x39\x41\x05\x06\xc3\x0c\x50\x97\x53\x45\x0e\x07\x30\x7e" + "\xae\xb4\x20\x3a\x73\x71\x75\xf6\xad\x81\xd9\x3e\x62\x5b\xc6\x0d\x78" + "\x63\xf5\x01\xac\xff\xb8\x9a\x5d\x8a\x31\xe0\xa4\x8e\x07\x37\x97\x1d" + "\x05\x7f\x24\x49\x71\x7c\x9b\x7b\xab\x24\xc9\xe4\xa8\xff\xff\x0b\x03" + "\xd9\x67\x1e\xf3\x7b\x80\x90\x32\x7d\x31\x86\x96\xc2\xb9\x6c\x40\x31" + "\x06\x4f\x95\x09\x94\x26\x17\xea\x7a\x03\x2d\x54\xbb\xc9\x7b\x82\xfa" + "\xc5\xb6\x54\xdd\x34\x89\xad\x13\x23", + 1024 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/hmac_sha1_test.json.c b/test/wycheproof/hmac_sha1_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..d0c1d36edb07db6b9c5e310f870113c6d2a961f6 --- /dev/null +++ b/test/wycheproof/hmac_sha1_test.json.c @@ -0,0 +1,1119 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* HMACSHA1, 0.8rc21 */ +#include "mac_test.h" +const struct mac_test hmac_sha1_test_json[] = { + { 160, 160, 1, + "\x06\xc0\xdc\xdc\x16\xff\x81\xdc\xe9\x28\x07\xfa\x2c\x82\xb4\x4d\x28" + "\xac\x17\x8a", + "", + "\x7d\x91\xd1\xb4\x74\x80\x77\xb2\x89\x11\xb4\x50\x97\x62\xb6\xdf\x24" + "\x36\x58\x10", + 1, 0, NULL, 0 }, + { 160, 160, 2, + "\x4c\xd6\x4e\xfd\xb7\x6d\xf5\xa8\x5d\xce\x3d\x34\x70\x12\xca\xd0\x6b" + "\x0c\x3d\xb4", + "\x6c", + "\x6d\x3d\x37\xaf\x55\xc7\x5d\x87\x2d\x2d\xa0\x7b\x9b\x90\x7b\xa2\x2a" + "\xd4\x87\xd4", + 1, 8, NULL, 0 }, + { 160, 160, 3, + "\x52\xe1\x99\x50\x25\x29\x7f\xe7\xb7\x93\xdc\x8e\x1e\x4f\x7d\x31\x2f" + "\xee\x27\x00", + "\x29\xdf", + "\x82\xcb\x24\xbf\xa3\x8f\xbd\xc9\x1d\x1e\xea\x2d\x2d\xc1\xce\x6e\x60" + "\xff\x88\x1e", + 1, 16, NULL, 0 }, + { 160, 160, 4, + "\xf3\xed\xfa\x00\x3d\x89\xc4\xe2\xa6\x42\x2e\x77\xa0\x1b\x8a\xdb\xd7" + "\xac\x26\xe4", + "\xb0\x15\xb7", + "\xcb\x24\x4c\xa6\xad\x23\x39\x47\x37\x84\x36\x07\x6f\xbf\xd2\x0c\x9c" + "\x8b\x84\x2b", + 1, 24, NULL, 0 }, + { 160, 160, 5, + "\x4b\x07\xed\x4e\x0c\x8d\xda\xa1\xf7\x6c\xf0\x01\x07\x28\x67\x9c\x88" + "\x57\xe1\x8b", + "\x3b\x2c\x1a\xfe", + "\x92\x41\x25\x53\x2e\x6b\x62\x5e\x7c\x5a\x8d\xcd\x16\x14\xe0\x43\x34" + "\xc0\x67\xcd", + 1, 32, NULL, 0 }, + { 160, 160, 6, + "\x7f\x53\x2c\x8e\xc8\x3c\xb2\x1d\xc9\x8a\xf7\x73\x4c\x64\xf5\xfd\x91" + "\x67\xec\x30", + "\xa3\x3c\x6f\x98\x26", + "\x0d\x25\xbc\x40\xf6\x0f\xbe\xd3\x6d\x8d\x7a\x10\x45\xff\xa6\x0d\x88" + "\x48\x4d\x56", + 1, 40, NULL, 0 }, + { 160, 160, 7, + "\x99\xe6\x0c\x1f\xc0\xcb\x3e\x6e\xd8\x36\x61\x97\x75\xe3\x7b\xf1\x5b" + "\x2c\xb9\x3f", + "\xb1\x29\xbb\x88\xce\xaa", + "\x69\x24\xd8\x33\xa3\xe7\x4b\x48\xf9\x91\xe6\xc4\x41\x73\x56\x5f\xdf" + "\x8c\x74\x70", + 1, 48, NULL, 0 }, + { 160, 160, 8, + "\x53\x84\x5f\x10\x34\x4b\x7f\x39\xed\xdb\xd3\xe4\x42\x31\xfa\x80\x2d" + "\x7e\x1a\xca", + "\xc6\xf5\xb1\xce\xe3\x10\x33", + "\xfd\x4b\x28\x27\x3d\x3e\xe8\xcc\x24\xde\x2d\x8d\xad\x23\xad\x4f\x35" + "\x52\x40\xc7", + 1, 56, NULL, 0 }, + { 160, 160, 9, + "\xe3\x22\x07\x00\xce\x24\xa0\x10\xcf\x62\x3f\x60\x89\x1e\x4f\x29\x8f" + "\xf2\x6b\x11", + "\xc9\x7a\xfb\x50\x63\xa9\xdd\x0d", + "\x38\x3b\x10\x3c\xe9\x05\x4c\xb7\x4a\x04\x31\xd1\x6d\xa9\x9d\x82\x33" + "\xe9\x4f\xc2", + 1, 64, NULL, 0 }, + { 160, 160, 10, + "\x46\x6c\x06\x1d\xdc\xf3\xd9\xb2\x85\xa2\x90\x0f\x87\x25\x97\x1b\x73" + "\x3f\x85\x0f", + "\x89\x02\x4c\xed\xa7\xde\x3c\x11\x4e", + "\x1b\x81\xf1\x12\x76\x35\x23\x33\x83\xb6\xea\x5b\xa8\xfd\x68\xeb\x51" + "\x12\xef\x0a", + 1, 72, NULL, 0 }, + { 160, 160, 11, + "\xa8\x1f\x9f\x51\xb0\x41\xff\x29\xb8\xd7\x05\xbb\x40\x8f\x85\x4c\xcb" + "\xd7\xe5\xab", + "\x03\x2d\x86\x6a\x27\x07\x62\xcb\xae\x24", + "\xb7\x2b\xa0\xc8\x9d\x01\x02\x15\xa8\xf2\x80\x61\x6a\xcb\xd8\x64\x0f" + "\xe8\x6c\xec", + 1, 80, NULL, 0 }, + { 160, 160, 12, + "\xe6\x0d\x0b\x14\x88\x6f\xe6\xfa\x2c\x83\x93\x29\x20\x4d\x84\xd8\x10" + "\x26\xb7\xab", + "\xfd\xa7\xf4\x8c\x11\x10\x12\x55\xe0\x2c\x8d", + "\x2b\xe7\xbb\x54\x1c\xed\xe9\x78\xf5\x41\xe2\xca\xc0\xab\x64\x51\x06" + "\x0e\x3e\x83", + 1, 88, NULL, 0 }, + { 160, 160, 13, + "\xc0\x90\xef\x12\x2a\x29\x34\x87\x40\xcc\xd5\x71\xd9\x84\x07\x76\x4b" + "\x2a\xda\xac", + "\xd1\xfe\x3d\xfa\x80\xad\xe7\x08\x7e\xfa\xbb\x52", + "\xe6\xc1\xe0\xc3\xeb\xb7\x75\x0d\x66\xa5\x0b\x6a\xbc\xcf\xde\xf9\xc2" + "\x59\x90\x08", + 1, 96, NULL, 0 }, + { 160, 160, 14, + "\x56\x4a\x56\x29\x0e\x1a\xea\x05\x22\xf1\x90\x88\xa8\x8a\xb4\xdc\xe4" + "\xc7\xcd\xf2", + "\x42\x13\xbd\x3c\xda\xeb\xbb\x1e\xc1\xcc\x81\x86\x6a", + "\x6d\x1d\x58\x08\xc0\x85\xad\x51\x24\x87\xde\xbb\x57\xfb\x93\x51\x4b" + "\x20\x50\x75", + 1, 104, NULL, 0 }, + { 160, 160, 15, + "\xf8\x98\x45\x9d\x27\x2f\xd5\xe4\x3b\x06\x21\x56\xf4\x49\x58\xd8\x5d" + "\x97\xea\x3f", + "\x5e\x86\xb0\x55\x22\xeb\x65\xa4\xfb\x7b\x93\x2c\xec\xd5", + "\xb7\x78\xf4\x21\xc2\xd1\xe2\x70\x1e\x75\xda\x6b\xd1\xbc\x65\x37\x9b" + "\x80\xe8\x79", + 1, 112, NULL, 0 }, + { 160, 160, 16, + "\x7d\x5c\xc5\x3f\x46\x4e\x75\x94\x38\xee\x90\xb4\x7f\x2f\xe6\x7a\xa8" + "\x3d\x6b\x52", + "\x9f\x38\xea\x80\x12\x2b\x40\xf7\x42\xa0\x0c\x2e\x83\xe0\x85", + "\xc8\x0c\xe6\xd3\x3f\xe8\x68\x43\x2c\x26\x27\x66\xfd\x23\xbf\x43\x1e" + "\x31\x38\x82", + 1, 120, NULL, 0 }, + { 160, 160, 17, + "\x33\xe9\x14\x01\x75\x51\x9b\x2f\x16\x19\xb4\x48\x48\x33\x17\x63\xc7" + "\x56\xfa\xd4", + "\x7d\xe0\xfc\xcc\x83\xb5\x1c\x29\xe5\xeb\x1b\x65\x8c\x10\x24\x38", + "\x34\xde\x6b\x8f\x47\x95\x23\x87\x0b\x8f\x90\x56\x84\x67\x26\x17\x66" + "\x9b\x06\x07", + 1, 128, NULL, 0 }, + { 160, 160, 18, + "\x0e\xf2\x9e\x7c\x96\x1d\xa3\x7a\xfa\xea\x81\x82\xf2\x87\x38\xd2\x2c" + "\x34\x02\x32", + "\x16\x5b\xb8\xe5\xc6\xf0\xa3\xae\x40\x94\x6d\xc8\x07\xae\xe8\x46" + "\x45", + "\x78\xe6\xfa\x53\xec\x21\x3e\x90\x19\xd4\x7e\xe7\x52\x9d\x96\x3a\x8a" + "\x25\x29\x42", + 1, 136, NULL, 0 }, + { 160, 160, 19, + "\x20\x3c\xfa\xd9\x21\xe6\x05\xc8\x0d\x7a\xa8\xb6\x4d\x3b\xf1\x83\x28" + "\xb7\xa7\xa0", + "\xd2\x89\xc7\xcd\x10\xd9\x96\xd5\xda\xca\x14\x10\xc3\x78\x15\xb2\x37" + "\xf7\x49\x29\x58\x8c\x5a\xe4", + "\x27\xd9\x6d\xa4\x18\x95\xbf\x53\xd1\x50\xac\x15\xe7\xc3\x18\x53\xf5" + "\x6a\xe3\x63", + 1, 192, NULL, 0 }, + { 160, 160, 20, + "\x8e\xb7\x41\x6e\xfd\x0c\x73\xc8\x6b\x91\xdf\x0d\x58\x89\x1f\xdb\x73" + "\x8f\x40\xdf", + "\xb4\x15\xcb\x7c\xd3\x84\xa1\x03\x5d\x2b\xac\x1f\x7b\x96\xae\x85\x8d" + "\xfd\x44\xc4\x67\x03\x0f\x30\x4e\x81\x7d\x11\xb9\xf9\xc6\x06", + "\x24\xcb\x16\x32\x3b\x7e\xc4\x7e\x3a\xdd\x8f\x55\xcb\x99\x20\xaa\x7c" + "\x16\x55\xde", + 1, 256, NULL, 0 }, + { 160, 160, 21, + "\x6f\xbb\x3c\x55\xe9\x35\xe0\xa0\x02\xc1\x70\xa9\x12\x2f\x1f\x70\x37" + "\xbc\x0c\x59", + "\x36\xac\x9a\x8c\xf0\x22\x3c\xcf\x5d\x90\x48\xbe\x9a\x65\xdf\x4a\x1f" + "\x40\xaa\xa8\x57\xce\x13\xd6\x21\xf6\x01\xbd\xee\x1f\xbe\x80\x31\x71" + "\x00\x2d\x1f\xa6\x34\xa1\x97\x7d\xc2\x3d\x9a\xa8\xfd", + "\xe2\xfe\x8b\x34\x3c\xef\x4b\x97\x54\x30\x84\x08\x93\x05\x26\x15\x95" + "\x37\xec\xc8", + 1, 376, NULL, 0 }, + { 160, 160, 22, + "\xe4\x0f\x62\x06\x10\x5f\x78\x00\xa1\xf1\x90\x60\x2b\xb6\xdd\xe8\x05" + "\x7c\x3a\x87", + "\x10\x46\x3b\x77\x1f\xa5\x86\xc5\xed\x5c\x1f\x64\x88\xd7\x93\x29\x9d" + "\xb4\x0f\xdd\x4f\x3e\x53\x33\x4a\xe3\xff\x8e\x09\xe5\xa8\x79\xda\x06" + "\xeb\x46\xd2\x10\xee\x0a\xf0\xc8\x25\x1e\x6c\x07\xaa\x1d", + "\xb5\xdf\xce\x59\x98\xd2\xe3\x21\x80\x0e\x0e\x42\x76\x2e\x62\xec\x7a" + "\x81\x44\x8f", + 1, 384, NULL, 0 }, + { 160, 160, 23, + "\xf0\x4c\xc6\x41\xff\x67\xab\xa4\xac\x2d\x17\xe6\xa0\x42\xb6\xcc\xf8" + "\x6a\xe1\xd2", + "\x73\xc1\x79\xac\xc2\x6f\xfd\x07\x10\xb6\xcb\x3f\x73\x57\x07\x02\xc9" + "\xc0\x59\xbf\x68\x56\x14\xbb\x0b\xa7\x97\x3a\xb8\x75\xff\x88\x2d\x9a" + "\xee\xce\xa4\xef\x45\x2c\x88\x93\x22\x44\x72\xcf\xa5\xb6\x1c", + "\xb7\xd1\xe6\x3f\xae\x54\x63\x80\x82\xa9\xcb\x58\xc6\x9f\xac\x9e\xfc" + "\xbe\xe1\x74", + 1, 392, NULL, 0 }, + { 160, 160, 24, + "\xf6\x1c\x1a\x87\x85\x50\xd2\x7a\xa4\x59\xb3\x01\x6b\x31\x73\x1b\x89" + "\x63\x0d\x36", + "\x82\xb3\x78\xd4\x0c\xa0\x4a\xd4\x78\xa9\x80\xd7\xb4\x6e\x56\xc9\x96" + "\x7b\xc4\xe1\x10\xa7\xad\xd8\xbc\xbd\xa4\x11\xc1\x2d\xe3\x84\xf4\x13" + "\x24\xe9\xdf\x88\x8d\x81\x70\x2f\xf2\xb9\xe8\x75\x29\x86\xba\x08\x13" + "\x63\xea\xcc\x2e\x39\x6f\x6b\x5f\xb0\x1b\xf8\x42\x35\x8f\x01\x45\xd5" + "\x69\xd3\x4f\xb3\xb4\xe2\x4e\xe9\xdc\x91\x03\x28\x4d\x74\x3c\x52\xea" + "\x86\x61\x50\x4b\x2d\xb4\x2f\x22\x1b\x6d\x49\xb6\x05\xfd\xe3\x4a\xa5" + "\x55\xe3\x3a\xb0\xa1\x40\xf6\x1f\x3c\xda", + "\xe6\x88\x19\x94\x89\xc9\xd3\x93\x8f\x2e\x33\xd7\xcb\x3f\xc8\x1b\xad" + "\x4f\xfb\x8c", + 1, 896, NULL, 0 }, + { 160, 160, 25, + "\x8c\x29\xeb\x66\x1f\xb6\x33\x08\x7f\x24\x52\xd0\x57\xf9\x8d\x55\x3d" + "\x28\x46\xf1", + "\x02\x1b\x96\x8c\x4c\xe3\x37\x59\x51\x54\xd9\x0e\x44\x22\x99\x80\xf0" + "\xe2\xb6\x47\x76\xf5\x62\xea\x25\xb2\x48\x81\x63\x7b\x44\x37\x5b\xde" + "\x65\xe5\xf9\x41\x8b\xf1\x63\xe2\xaa\xcd\x37\xbd\x10\x31\x97\x29\xac" + "\x59\x66\x15\xa3\x5c\xb6\x32\xe0\xff\xc3\x16\x93\x6a\x68\xac\xf4\xc7" + "\xae\x3a\xd3\x60\x26\x12\x4c\xee\x6d\x20\x4f\x10\x43\x2f\x08\x15\x7c" + "\xc3\x2c\x5f\x4b\xca\xda\xee\x67\xbd\x42\xbb\xeb\x82\x6a\x9e\x9c\x8a" + "\xf9\xf5\x54\xf7\x41\x9f\xb2\x65\x33\x8d\x22\xba\xe2\x19\x0b\xb6\x44" + "\xb3\x2f\xe9\xbb\x6a\x22\x87\xaa", + "\xf9\x40\xdf\x33\xb0\x99\x65\xa3\x11\x8c\x84\x7c\x2a\xe1\x59\x16\x90" + "\xd0\x40\x5f", + 1, 1016, NULL, 0 }, + { 160, 160, 26, + "\x63\x16\x29\x8f\x3a\xad\xad\xc6\x64\xed\xa2\xce\xdf\x17\x66\x9b\xc8" + "\x0d\x44\xae", + "\xba\xb8\x07\xdf\x54\xc0\x09\x61\x0a\x5c\x3f\x1e\x81\x60\x5f\x6b\xf7" + "\xd7\x6b\x29\x9d\x7e\xbd\xef\xa7\x0f\x5e\x2e\x0b\x97\x90\x11\xd1\x91" + "\xea\xd3\x9c\x3b\xbe\x5d\xd2\x65\x83\x47\xeb\x17\x29\x50\xa1\xe0\x3a" + "\x01\x55\x2b\xb3\x8a\xdd\x33\xba\xc8\x32\xb7\x17\x7a\x77\xb0\x8e\xb1" + "\x1c\xc1\xaf\xe3\xae\x84\xda\xff\xe4\xc4\xe8\x8b\xc4\x41\xe5\x4e\x4d" + "\xcb\xae\x3e\x0d\x56\x39\xf6\x35\x22\x8d\x81\x1f\x0a\x04\x3b\x13\xd5" + "\xc9\x18\x99\xc2\x6b\xce\x2d\xa2\xdd\xab\xd2\x1b\x2e\xe6\x68\xa2\x1b" + "\x45\x49\x28\x91\x5d\x65\x85\x40\x8d", + "\x23\xb9\xa6\xd6\xa9\xc7\xce\xf6\xdc\xe5\x37\x72\x2f\x45\x57\xb6\x5d" + "\xcd\xde\x99", + 1, 1024, NULL, 0 }, + { 160, 160, 27, + "\xf2\x91\x69\x6b\xf4\xf9\x65\x5a\x00\xc9\xa2\x38\x2b\xd1\x48\x73\x42" + "\x35\x87\x14", + "\x32\xa6\x50\xb5\x30\x7d\x94\xb2\x31\x39\xbe\x64\xd4\x70\xef\x14\x92" + "\xd5\x7c\xa7\xaf\x98\x20\x5b\xf9\xbc\xe8\x85\x4b\xa8\xf5\x20\x48\x80" + "\xb2\xe9\xd5\x8d\xdb\xe2\xe7\xbb\x21\xe6\xe0\x67\x3f\x5e\x1a\x39\xf5" + "\x05\x90\x92\x27\x47\x5e\x41\xc1\xd5\x9c\x73\xa9\x33\xb1\x3f\x4b\x07" + "\xa7\x5c\xb9\xf3\x27\x9c\xc3\xbf\x61\xa6\xc0\x9e\x3b\x9f\x75\x59\x07" + "\x49\x1b\x9e\x74\x5b\xfd\xa5\x8a\xd4\xe2\x30\x4f\xf7\x52\x5b\x41\x50" + "\x7a\x51\xa2\xfd\x66\x4a\x2e\xe9\x8c\xea\x00\x14\x8a\x36\x63\xd7\x7c" + "\x47\xbe\xb0\x55\xbd\x45\xe7\xdf\x48\xf6\xa0\xce\x66\xc0\xa2\xd9\xa8" + "\x48\x76\x1a\x45\x75\xd1\x95\xd7\x4e\xef\x5b\xb7\x8c\x09\x93\x55\x7a" + "\x25\xad\x7c\xa3\x2e\x0a\x96\xb2\x51\x8d\x9d\x8a\x18\x0f\x35\x74\x02" + "\xa4\x42\x17\xf1\xe3\x6a\x91\x38\xc0\x90\x9f\xaf\xfd\x0e\x9a\x90\x70" + "\x48\x58\x4b\xb0\x3a\x4e\x06\xfc\x69\xc4\x63\xf3\x95\x42\xdd\x2c\x7c" + "\x81\x46\x7d\x37\x28\x48\x1b\xbf\x6b\xb6\x02\x59\x60\x4a\xa3\x3a\x2d" + "\x4c\x61\x95\x01\x2f\xdc\x7a\xec\x99\xe2\x17\x5a\xeb\x2d\x0c\x1f\x68" + "\x09\x64\xd6\x3e\xe1\x14\x18\xcd\x4d\x26\xe7\x7e\xc1\x31\x10\x84" + "\x17", + "\x33\x46\xbf\x23\xe5\x22\x31\xa4\xae\xd7\x73\xfd\x73\xe5\x8d\x91\x85" + "\x80\xed\xe5", + 1, 2040, NULL, 0 }, + { 160, 160, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x07\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe5\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x04\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe6\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x86\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x64\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe9\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x65\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\xd0\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x80\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfd\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xcf\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfe\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xcc\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x02\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x92\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x37\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x07\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\xb6\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x86\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x41\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x1d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd8\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xeb\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe3\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x06\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe0\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x05\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\x62\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x87\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd8", + 0, 0, NULL, 0 }, + { 160, 160, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x21", + 0, 128, NULL, 0 }, + { 160, 160, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xdb", + 0, 0, NULL, 0 }, + { 160, 160, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x22", + 0, 128, NULL, 0 }, + { 160, 160, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\x99", + 0, 0, NULL, 0 }, + { 160, 160, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x60", + 0, 128, NULL, 0 }, + { 160, 160, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\x59", + 0, 0, NULL, 0 }, + { 160, 160, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\xa0", + 0, 128, NULL, 0 }, + { 160, 160, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x07\xe8\xad\x50\xfc\x10\x35\x82\x37\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe5\x64\x16\x00\xce\xdd\x7e\x12\x07\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 68, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\xd0\xfc\x10\x35\x02\x36\x61\xd9\x79\xe2\x96\x89\x68\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 69, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x80\xce\xdd\x7e\x92\x06\x3d\xea\xea\x07\x88\x78\x5f\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 70, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x06\xe8\xad\x50\xfc\x10\x35\x02\x36\x61\xd9\x79\xe2\x96\x89\xe8\xce" + "\xcd\x03\xd9", + 0, 0, NULL, 0 }, + { 160, 160, 71, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x92\x06\x3d\xea\xea\x07\x88\x78\xdf\x56" + "\x11\x35\x20", + 0, 128, NULL, 0 }, + { 160, 160, 72, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\xf9\x17\x52\xaf\x03\xef\xca\x7d\xc9\x9e\x26\x86\x1d\x69\x76\x97\x31" + "\x32\xfc\x26", + 0, 0, NULL, 0 }, + { 160, 160, 73, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x1b\x9b\xe9\xff\x31\x22\x81\xed\xf9\xc2\x15\x15\xf8\x77\x87\xa0\xa9" + "\xee\xca\xdf", + 0, 128, NULL, 0 }, + { 160, 160, 74, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00", + 0, 0, NULL, 0 }, + { 160, 160, 75, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00", + 0, 128, NULL, 0 }, + { 160, 160, 76, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff", + 0, 0, NULL, 0 }, + { 160, 160, 77, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff", + 0, 128, NULL, 0 }, + { 160, 160, 78, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x86\x68\x2d\xd0\x7c\x90\xb5\x02\xb6\xe1\x59\xf9\x62\x16\x09\xe8\x4e" + "\x4d\x83\x59", + 0, 0, NULL, 0 }, + { 160, 160, 79, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x64\xe4\x96\x80\x4e\x5d\xfe\x92\x86\xbd\x6a\x6a\x87\x08\xf8\xdf\xd6" + "\x91\xb5\xa0", + 0, 128, NULL, 0 }, + { 160, 160, 80, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", + "\x07\xe9\xac\x51\xfd\x11\x34\x83\x37\x60\xd8\x78\xe3\x97\x88\x69\xcf" + "\xcc\x02\xd8", + 0, 0, NULL, 0 }, + { 160, 160, 81, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe5\x65\x17\x01\xcf\xdc\x7f\x13\x07\x3c\xeb\xeb\x06\x89\x79\x5e\x57" + "\x10\x34\x21", + 0, 128, NULL, 0 }, + { 160, 80, 82, + "\x5e\xce\x07\x69\x74\x2f\xea\xbb\x66\x44\x46\x9c\x9b\x26\x43\x26\xb3" + "\xde\xb1\x26", + "", "\x34\x4f\x83\x51\xf1\xd2\x77\x3c\xae\x9e", 1, 0, NULL, 0 }, + { 160, 80, 83, + "\x4e\xe9\xf9\xa9\x3b\x2d\xdf\xe5\x51\x28\x1b\x39\x7c\xce\xf8\x44\xfc" + "\x21\xaf\x3a", + "\x2d", "\x3a\xab\x1a\x2c\x9a\x2f\x2b\x8a\xc8\x40", 1, 8, NULL, 0 }, + { 160, 80, 84, + "\xdf\xee\xbe\x9a\x5c\x18\x1a\xfc\x60\x5f\xf6\x3b\x22\xbf\x34\x9e\xbd" + "\xb6\xc7\xfb", + "\xa5\xf3", "\xc3\xb7\x15\x22\x30\xda\xca\xe4\xef\x48", 1, 16, NULL, + 0 }, + { 160, 80, 85, + "\xc5\x14\x7e\xcd\x59\xb7\xd4\x23\x15\xd5\xe3\xa5\x5e\xc8\xb3\xa3\x20" + "\xc8\xd6\x15", + "\x37\x17\x77", "\x0a\xbb\x78\xa2\xc6\x7c\x56\x5f\x89\xb1", 1, 24, + NULL, 0 }, + { 160, 80, 86, + "\xdb\x0d\xa5\x65\x9b\xa6\x9c\xe1\x95\xa6\x95\x24\x50\x8e\x43\x7c\x68" + "\x8f\x71\x47", + "\x4e\xf4\xec\x44", "\x40\x62\xfa\xeb\x0b\x40\x66\x98\xb7\x40", 1, 32, + NULL, 0 }, + { 160, 80, 87, + "\x49\x5d\x6c\x11\x91\x85\x2e\xca\xf0\x57\x3e\x6a\x77\x61\x0c\x32\xac" + "\xf5\xa1\x17", + "\x15\x66\xae\x63\xce", "\x45\xcb\xef\xcd\x9c\x7e\xee\xe3\x7a\xe5", 1, + 40, NULL, 0 }, + { 160, 80, 88, + "\xe5\xcf\xe7\xdc\x67\x51\x4c\x4c\x75\xd2\x8b\xb8\x05\xd0\x70\x0b\xde" + "\xa0\xd6\x69", + "\xaf\x1a\x6b\x15\xb6\x22", + "\xe8\x67\x26\x9d\x50\x6a\x37\xe8\xa6\x2d", 1, 48, NULL, 0 }, + { 160, 80, 89, + "\xcf\x1b\x8b\x90\x25\x12\x18\x6e\x38\xc3\x81\x65\xd6\xe5\x87\xbc\xee" + "\xca\xe8\x7f", + "\xa5\x95\x12\x15\x2c\x72\x21", + "\x1a\x12\x5b\x21\x41\x2c\xdc\x59\x68\x94", 1, 56, NULL, 0 }, + { 160, 80, 90, + "\xd0\x15\x7f\xb4\x0c\x77\x39\xae\x50\x6a\xad\x7d\xe6\x0f\x32\xcc\xc3" + "\x32\x55\x83", + "\x12\x92\xdf\x8d\x53\xd1\x6f\x3c", + "\x92\x24\xf1\x1b\xff\x0e\x49\xb9\xaa\x95", 1, 64, NULL, 0 }, + { 160, 80, 91, + "\xfd\x55\xb8\x1e\xdd\x55\xa1\x5b\xff\x40\x91\x29\xe9\x93\x0f\x1b\xa1" + "\x76\x3c\x33", + "\x05\x22\x0a\x69\x97\x53\x3c\x69\x9b", + "\x3b\x2d\x07\xdc\x8c\xa2\x06\xba\x16\xc4", 1, 72, NULL, 0 }, + { 160, 80, 92, + "\x07\x99\x37\xcf\x3b\xd4\x28\x64\xd5\xb1\x5c\x62\xbd\xd9\x2f\x27\x55" + "\x97\x31\x6d", + "\x5a\xc1\x3c\xe1\xb1\xf7\x77\x24\xe2\x81", + "\x0c\x24\xaf\xcd\xad\xd8\x53\x89\x77\xb1", 1, 80, NULL, 0 }, + { 160, 80, 93, + "\x54\x5b\x13\xe1\xf3\x9f\x0b\x7c\xa9\x25\x2b\xc5\x96\x27\x72\x78\x16" + "\x6a\xd4\x10", + "\x08\xa7\xbc\x90\x73\x2d\x54\x38\x1b\x6e\x30", + "\x9b\xf7\xf1\x21\x36\x5a\x82\xc2\xac\x69", 1, 88, NULL, 0 }, + { 160, 80, 94, + "\x25\x38\x5e\x9f\x89\xb6\x60\x98\xee\x81\x62\xae\xca\x03\xbb\x45\xb3" + "\x13\x56\x1f", + "\xbf\xf2\x36\xaa\xd7\x1f\xb5\xda\xf7\xfc\x43\xb8", + "\x44\xdb\x86\xe7\xa1\x47\x62\x26\xdd\x86", 1, 96, NULL, 0 }, + { 160, 80, 95, + "\x9b\x68\x13\x9d\x93\xa8\x8f\xe3\x4c\xf9\xf8\x30\x06\xc0\x3b\x31\x64" + "\xb6\x04\x68", + "\xad\x67\x2b\x97\x19\xc1\x08\x63\xfd\x6f\xa8\xdb\x88", + "\x2d\x17\xa8\x8d\x87\xaa\xe7\xdb\xac\xed", 1, 104, NULL, 0 }, + { 160, 80, 96, + "\xc3\xb7\x85\x91\x5e\x13\x75\x44\xda\xc5\x42\xcb\x4b\xdb\x16\xd5\x30" + "\x36\xfb\x11", + "\xf8\xc1\xed\xb4\x69\xb9\x3c\x07\x3b\x6f\x6b\xf7\x4c\xca", + "\x16\x71\x3d\x61\xfb\xb4\x14\x9f\x50\x0b", 1, 112, NULL, 0 }, + { 160, 80, 97, + "\xda\x67\x47\x51\x85\xb3\x61\x50\x55\xf9\x71\x81\x9d\xb2\x78\x71\xb2" + "\x3c\x75\xd0", + "\xa1\x76\x53\x33\x19\xbe\xe5\xe4\x3d\x8f\x0e\xaf\xb7\x7b\xb3", + "\xfc\xee\xce\x89\x28\x52\xd4\xa2\x60\x70", 1, 120, NULL, 0 }, + { 160, 80, 98, + "\x0c\xf1\x46\xca\x7a\x25\x4d\xb1\xe0\x01\xa2\x9a\xd0\x3c\x5e\x6d\xcb" + "\xe7\x14\x0a", + "\xa8\x3d\xf5\xd0\x99\x85\x4e\xb6\xea\xd7\x03\x1c\x51\x46\x03\x57", + "\x9b\xcf\x75\x13\x20\x6e\x27\xa4\x69\x7d", 1, 128, NULL, 0 }, + { 160, 80, 99, + "\xe0\x38\xdf\xf0\x28\x22\x7d\xc4\xb4\xd7\x45\x3d\xb3\x07\x01\x08\x46" + "\x5d\xd5\xb2", + "\x7a\xe4\xe3\x08\x34\xdb\x44\x9e\x42\x44\xa9\xfc\x03\x22\x19\x3e" + "\x7a", + "\x9a\xa8\x54\x4a\x9a\xfd\xd9\x20\xc0\xf2", 1, 136, NULL, 0 }, + { 160, 80, 100, + "\xb3\x99\xfc\xfd\x1a\xd3\x21\x40\x87\x9a\xa0\x55\x6a\xc3\x4d\x8b\x5a" + "\xc2\x67\xf2", + "\x0e\x3f\x0f\xc5\xcb\x14\x56\xfe\xde\x99\xf8\x6a\x05\x6f\x64\x0b\x8f" + "\x5e\x5e\x1b\x61\x2f\x25\xf6", + "\xfa\x09\x5c\x6f\xae\xd0\xf0\x86\xb2\x15", 1, 192, NULL, 0 }, + { 160, 80, 101, + "\x2b\xf7\xd2\x01\xef\x44\x24\x1a\x22\xae\x4b\x81\xaa\xb9\x10\xd2\x2c" + "\x2d\xb9\x18", + "\xaa\x0a\xfa\xf3\xaf\x36\x54\x82\x27\x34\x9a\xdc\xfc\xb6\xbf\x99\x8a" + "\x7f\xa7\x8d\x29\xb8\x7a\x0f\x50\x60\x9c\x42\xed\xcd\xb3\xdd", + "\x70\x8e\xc4\x5d\x41\x0b\x1f\xe0\x75\xc8", 1, 256, NULL, 0 }, + { 160, 80, 102, + "\x48\xcc\xc3\x90\x7c\x36\x12\xa1\x82\x94\xfd\xdf\x26\x60\xe3\x3d\x9c" + "\xb7\x87\xfc", + "\xed\xbb\x68\x02\x43\xa8\x25\x06\x8e\xef\xe5\xba\x18\x4e\x5e\xed\x4b" + "\x7f\x85\xca\x3b\x51\x1a\x42\xd6\x55\xbe\x3e\x05\xd8\xff\x12\x45\x41" + "\xb3\xd5\x6a\x10\xa3\x5c\xff\x8d\xa8\xb6\x22\x9a\xc1", + "\x3c\xb7\xfc\xe2\x0d\xf8\x38\x5c\xf6\xbb", 1, 376, NULL, 0 }, + { 160, 80, 103, + "\x22\x7d\x79\x6b\x78\x67\x40\x9d\xb3\xde\x1f\xfa\x3c\xfe\x37\x67\x04" + "\x04\x4f\x01", + "\xb6\x39\x3a\xb1\x83\x76\xc0\x25\xe2\xd8\xe0\x0c\xa7\x74\xa5\x1a\xec" + "\x19\xdc\x4a\x89\xcf\x6a\x9f\x8f\xc4\xab\xa8\x1d\x73\xb3\x90\x7e\xfe" + "\xf1\xa0\xd0\x18\xa5\x3c\xb8\xb8\xca\x10\x32\xe3\x15\x83", + "\x52\x53\x87\xc8\x1c\x2a\xb6\x7a\xca\x74", 1, 384, NULL, 0 }, + { 160, 80, 104, + "\x57\x18\xe7\x00\xc4\x8a\x79\x71\x35\x0d\x8a\x11\xb3\x77\x54\xae\x55" + "\xa9\xaa\xd2", + "\xcc\x95\x28\x6e\x9b\x3c\xa9\x36\x19\x1a\xff\x87\x31\xe6\xa1\x78\x06" + "\xa0\x95\x8b\x0b\x1a\x39\x97\x7c\x46\x39\x52\x40\x64\x1e\x97\xd5\x39" + "\x5a\x9c\x8a\x9d\x36\x28\x1e\xba\x82\x5a\x94\xe8\xb1\xad\x79", + "\xe5\x47\x82\x11\x0d\x40\xef\xb5\x43\x43", 1, 392, NULL, 0 }, + { 160, 80, 105, + "\xf5\x6a\xa7\x92\x79\x5e\xb0\x3a\xe0\x99\x04\x40\x71\x4a\xb1\x6c\xc4" + "\xad\x18\xc3", + "\x7a\x02\x4d\x99\x5a\xdd\xd3\x8d\x96\x7b\xc3\xb4\x16\x41\x73\x8b\x69" + "\x89\x7d\x8c\x52\xb7\xaf\xf9\x61\xa7\x00\xcb\x68\xfa\x74\x81\xda\x0a" + "\x36\x90\xa1\x51\xce\x09\xc9\x5b\x4d\xa6\x0f\x7c\xf2\x89\x90\x01\x72" + "\x92\x89\x3b\xbb\x2f\x81\xa4\xdd\xa4\x5f\xe8\x63\x98\x77\xac\x5a\xba" + "\xeb\xbe\x00\xc1\xfd\x17\x9e\xaa\xf7\xdf\xb4\xd5\x09\x29\x37\x1b\x9a" + "\xb8\xb7\xd3\x53\x1a\x63\xab\x18\x8d\x7b\x99\x16\x00\x60\x47\x5c\x33" + "\xe8\x33\x51\xf6\x5d\x5e\x32\x9e\xe8\xbf", + "\x1e\x22\x05\xd1\x7a\xd4\xde\x3f\x1e\xc1", 1, 896, NULL, 0 }, + { 160, 80, 106, + "\x00\xbe\x00\x34\xd3\x26\x99\xb1\x33\x5d\x8d\x4e\x50\x62\x35\xee\x4f" + "\x07\xbe\xf6", + "\x63\x9e\x82\x8d\x88\xbf\x06\x42\xbe\x0a\x54\x1b\x1c\x3f\xca\x07\x60" + "\x9e\xb9\x8d\x23\xa8\xb2\xcd\x4e\x60\xe1\x39\x51\x5e\x4f\xf4\x40\xdf" + "\xda\x1b\xc1\x93\x92\xfe\xff\xed\x74\x16\x4d\x6a\x9d\x8f\x5b\xaf\xe5" + "\x3f\xd3\x97\xcb\x5e\xe1\xdc\xdf\x9b\xfc\x86\x16\x9f\x1b\xc3\x8b\xa5" + "\x7f\x88\xd7\xe8\xc6\x72\x8c\x35\xfc\x07\x12\x8a\xb6\xc3\x96\xbb\x3e" + "\xf3\xc1\x4d\x13\xa0\x5f\x8c\x34\x53\x35\x3e\x85\x0d\xc1\xb2\x91\xac" + "\x70\x61\xab\x52\xf1\x21\x66\x3f\x18\xb0\x24\xe5\xcc\x00\x68\x32\x8c" + "\x88\xf5\x2c\x20\xcd\x21\x79\x3a", + "\x6e\x98\x97\x3d\x3a\x77\x5a\xc5\x08\xe7", 1, 1016, NULL, 0 }, + { 160, 80, 107, + "\xc5\xba\xa7\x50\xa8\x42\x44\x50\xf1\xb4\xd4\x53\xc5\x8e\x29\xc4\x62" + "\xe5\x26\x39", + "\x13\x7c\x72\x27\xa1\x92\xbe\xd2\x6d\x08\xda\x88\x64\x30\xf0\x10\x09" + "\x42\x43\xb5\xc4\x68\x6e\x68\x31\xe4\x8d\xb4\x50\x04\x5a\xa1\xd7\xe3" + "\xae\xcf\x19\x3e\xaa\xa1\xa7\x39\x05\xf5\xf1\x19\x06\x59\xa4\x3e\xd4" + "\xd1\x0b\xfc\xa5\x66\x8e\xbe\xb3\x43\xb2\x1f\xf7\x1d\x07\x37\xf8\x1f" + "\x67\x39\x2b\x64\x59\xaa\x95\xf9\x44\x1f\x69\x9b\xf4\x5f\xee\x24\x86" + "\x7a\x98\xa8\xa6\xc5\x7f\x97\x2a\xbe\x3e\x40\x0f\xd6\x4c\xe3\xe5\xb4" + "\x86\x22\xa0\xe9\x9e\x08\xd4\x24\x25\x0f\xa0\x0e\xd0\xdf\xa1\x19\x3f" + "\x93\x6c\x78\xaf\x27\x6a\x4b\x44\x2b", + "\x49\x0d\xfd\x2d\x5e\x6e\xa1\x30\xf6\xa1", 1, 1024, NULL, 0 }, + { 160, 80, 108, + "\x6b\xd4\x86\xce\x93\x4c\x2f\x5f\xe3\x8a\x19\x42\x3d\x25\x7b\xc5\xd8" + "\x08\xe3\x67", + "\xb8\xa6\x84\xad\xa0\xa0\x14\x05\x61\x4b\x1f\xa6\x6e\xba\xb8\xb0\x35" + "\x6e\x33\xb8\x89\xb8\x1b\x3e\xb6\x8d\x13\xb0\x5c\x4e\x60\xc7\x24\x78" + "\x5e\x63\x4c\x4e\xc0\x08\x1c\xc6\xbb\xdf\x21\x3d\xb7\x25\x4f\x92\xc0" + "\xa8\x58\xbf\xcc\x3d\x63\xa4\xe4\xdf\xd9\xe7\x5b\xd4\x83\x9a\xc0\x57" + "\x51\xc2\x3c\xb5\x99\x45\xf4\xc3\x66\x0d\x2b\x30\x09\xf5\xb0\x8a\x59" + "\x6b\xdc\x33\x07\x02\x44\xbc\xee\xa1\x11\x80\xac\x09\x06\x40\x45\x18" + "\xb0\x9d\x5d\x86\x12\xe0\xd8\xe6\x9f\x4b\x9e\x55\xbb\xc0\x53\xb5\x65" + "\x74\x71\x1b\x02\x95\x6d\xb3\xf3\xbd\x8f\x6c\x42\x06\x58\x71\x25\x58" + "\x54\xa1\x61\xe5\x71\x00\xad\xb8\x2c\xba\x79\x89\x3a\xad\x71\x5d\xc3" + "\xdf\x14\x88\xb3\xed\xb5\x6e\x58\xb8\x9c\x0b\xe3\xcf\xab\x09\xa3\xdf" + "\x40\x52\x4d\x2d\x32\x51\xb0\xfc\xf7\xfa\xab\xfc\x75\xf5\x00\x26\x79" + "\x50\x60\xc1\xd6\x28\x72\x57\x4a\x76\x9e\x3d\xa0\xe1\x9a\xf1\xb5\xe2" + "\x55\x14\xae\x17\xa1\x60\xc8\xd1\xeb\x25\x3c\x9f\x66\xec\x3d\xf7\x89" + "\xec\x0c\x6c\x70\x4a\x9e\x2f\xe8\xef\x7e\x9b\xf8\xe8\x16\x4f\x86\xd0" + "\x9d\x2a\x23\x69\x87\x33\xa8\xe4\x0a\x27\x9c\xd5\xfe\x02\xc2\x95" + "\xff", + "\xa5\x94\xd2\x6d\x98\xb5\x3b\x40\x63\xb2", 1, 2040, NULL, 0 }, + { 160, 80, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x07\xe8\xad\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe5\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x04\xe8\xad\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe6\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x86\xe8\xad\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x64\x64\x16\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe9\xad\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x65\x16\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xac\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x17\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xaf\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x14\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\x2d\x50\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x96\x00\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\xd0\xfc\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x80\xce\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfd\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xcf\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfe\x10\x35\x82\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xcc\xdd\x7e\x12\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x02\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x92\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\x37\x61", 0, 0, NULL, 0 }, + { 160, 80, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x07\x3d", 0, 128, NULL, 0 }, + { 160, 80, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\xb6\x61", 0, 0, NULL, 0 }, + { 160, 80, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x86\x3d", 0, 128, NULL, 0 }, + { 160, 80, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x60", 0, 0, NULL, 0 }, + { 160, 80, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3c", 0, 128, NULL, 0 }, + { 160, 80, 137, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x63", 0, 0, NULL, 0 }, + { 160, 80, 138, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x3f", 0, 128, NULL, 0 }, + { 160, 80, 139, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x41", 0, 0, NULL, 0 }, + { 160, 80, 140, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x1d", 0, 128, NULL, 0 }, + { 160, 80, 141, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\x21", 0, 0, NULL, 0 }, + { 160, 80, 142, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\x7d", 0, 128, NULL, 0 }, + { 160, 80, 143, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\x50\xfc\x10\x35\x82\x36\xe1", 0, 0, NULL, 0 }, + { 160, 80, 144, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x00\xce\xdd\x7e\x12\x06\xbd", 0, 128, NULL, 0 }, + { 160, 80, 145, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x07\xe8\xad\x50\xfc\x10\x35\x82\x37\x61", 0, 0, NULL, 0 }, + { 160, 80, 146, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe5\x64\x16\x00\xce\xdd\x7e\x12\x07\x3d", 0, 128, NULL, 0 }, + { 160, 80, 147, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x06\xe8\xad\xd0\xfc\x10\x35\x02\x36\x61", 0, 0, NULL, 0 }, + { 160, 80, 148, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe4\x64\x16\x80\xce\xdd\x7e\x92\x06\x3d", 0, 128, NULL, 0 }, + { 160, 80, 149, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\xf9\x17\x52\xaf\x03\xef\xca\x7d\xc9\x9e", 0, 0, NULL, 0 }, + { 160, 80, 150, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x1b\x9b\xe9\xff\x31\x22\x81\xed\xf9\xc2", 0, 128, NULL, 0 }, + { 160, 80, 151, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, 0, NULL, 0 }, + { 160, 80, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, 128, NULL, 0 }, + { 160, 80, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, 0, NULL, 0 }, + { 160, 80, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, 128, NULL, 0 }, + { 160, 80, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x86\x68\x2d\xd0\x7c\x90\xb5\x02\xb6\xe1", 0, 0, NULL, 0 }, + { 160, 80, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x64\xe4\x96\x80\x4e\x5d\xfe\x92\x86\xbd", 0, 128, NULL, 0 }, + { 160, 80, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "", "\x07\xe9\xac\x51\xfd\x11\x34\x83\x37\x60", 0, 0, NULL, 0 }, + { 160, 80, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xe5\x65\x17\x01\xcf\xdc\x7f\x13\x07\x3c", 0, 128, NULL, 0 }, + { 80, 160, 159, "\x1d\x95\x35\xa0\xda\xea\x9d\xfe\x44\x3a", "", + "\x74\x36\x08\x9f\xed\xe3\x29\x1c\x0c\x42\x1c\x9a\xd1\x3c\x35\x7e\xc8" + "\x66\x0b\xae", + 1, 0, NULL, 0 }, + { 80, 160, 160, "\xf5\xc2\xc4\x20\xc6\xf0\x56\x46\x7f\xca", + "\xbc\x8a\x29\xf5\x2e\x57\x58\x1c\xb8\x9a\x86\xe5\xd6\x44\xa1\x4d", + "\x1e\xb7\x6d\xf7\x23\x5c\x52\x37\x1d\x86\x11\x3f\x54\x23\x62\x8b\x2e" + "\xb7\xc3\xd5", + 1, 128, NULL, 0 }, + { 80, 160, 161, "\xe5\x9b\x02\x76\xe2\x7a\x0a\xbc\x75\xf1", + "\x06\x1c\xa1\xa1\xaf\x51\xc5\x13\x37\x28\xc4\x14\xf9\x64\x6b\x3f\x50" + "\x22\x3e\x9b\x20\x55\x70\x70\x32\xe7\x54\xdc\x1d\x31\x96\x4b", + "\x20\x81\x26\x0f\x65\x31\x6d\xf2\x95\x6a\xac\x72\x3a\x9b\xd7\xd2\x22" + "\x5a\x86\x69", + 1, 256, NULL, 0 }, + { 80, 80, 162, "\xb1\x8a\xba\x11\x71\xcc\x2f\xfc\x7d\x58", "", + "\xde\xeb\x3d\x6d\x81\xe3\x3d\x1c\xad\x21", 1, 0, NULL, 0 }, + { 80, 80, 163, "\x23\x08\x20\x66\xe8\xc4\x5d\xa8\x2f\xc6", + "\x06\xc1\x9c\x6e\xe4\xd2\xf0\x15\x76\x9f\x6d\x46\xeb\x46\xd6\xb4", + "\x74\x7c\xd9\x28\xe8\x83\x19\x17\xc8\x55", 1, 128, NULL, 0 }, + { 80, 80, 164, "\xa6\xfa\x1e\x04\xdf\x38\xa7\x86\x67\xeb", + "\x51\xa4\xea\x38\xe5\x56\x6d\x6f\xd8\x03\xae\xc5\xe0\x73\xe0\x87\xe9" + "\xae\x00\xd3\x7d\x4a\x98\xd5\x59\x07\x4e\xbf\xfc\x76\x58\xb7", + "\xec\x8c\x20\x0c\x1d\xda\xd6\xd3\xaa\xd0", 1, 256, NULL, 0 }, + { 520, 160, 165, + "\xab\x92\xe2\xcd\x40\xe0\x0b\x40\xc4\x44\x2d\xd7\x67\x1c\x06\x7c\x77" + "\x92\xaf\x28\xe6\x0f\x25\x85\xe8\x7f\x16\x3b\xf3\xbd\xfc\xa7\xf5\x53" + "\xce\xc7\x1b\x00\x65\x02\x55\x00\xc4\x8e\x20\x70\x98\x4a\xd9\xe2\x4e" + "\x73\x31\x07\xeb\xfd\xe2\x71\x64\xa4\x82\x89\x81\xac\x20", + "", + "\x7a\x29\xb4\x7f\xf6\xae\x90\xc9\x95\x73\xd8\xc9\x22\xa2\x3e\x83\xa6" + "\x2b\x66\xbc", + 1, 0, NULL, 0 }, + { 520, 160, 166, + "\x3b\x47\xa5\xd5\xb7\x2b\xab\xe1\x16\xe6\x19\x19\x60\x0c\xb9\x80\xc9" + "\x04\xc2\x98\xab\x91\xfa\xe3\xdb\x9c\x82\xb0\xf3\x8a\x18\x88\x8b\xc0" + "\x5a\x41\x8d\x65\xd6\x8f\x88\x50\x93\x75\x59\xbb\x37\x32\x5b\xce\x04" + "\xd0\xe5\xd1\x75\xa2\x4f\xea\x30\x98\x95\xf5\x70\x5a\xd7", + "\xd7\x18\x62\x02\x8f\xca\xf1\x34\x22\xbf\x32\xac\x0c\x5f\x07\x9b", + "\x80\x19\x23\x1e\x77\xac\xa6\x45\x18\x26\x70\xca\xdf\x88\x7a\xfd\x4b" + "\x41\x15\xa7", + 1, 128, NULL, 0 }, + { 520, 160, 167, + "\x6e\x6c\x43\xdf\x9b\xb6\xc6\xb8\xfe\x41\x4a\x18\x3e\x73\x85\x08\xf0" + "\xac\xa4\x1d\x5b\xee\xf6\xdf\x1b\x26\x0c\x39\xe1\x97\x9b\x54\x68\x36" + "\x22\xa4\xd5\x33\x54\x23\x1b\xef\x6c\x35\xe1\x29\xf8\x5f\x82\x2b\xa0" + "\x91\x98\xaa\x30\xc6\x5e\xe6\x0e\x42\x02\xde\x8c\xd1\x02", + "\x98\xf0\xa4\xb9\xa3\x6e\x17\x3d\x89\x73\x0a\x3b\x37\x07\x77\xc4\x99" + "\xb4\xcf\xf2\x84\x6f\x50\xbf\xb8\x8f\xbb\xbc\x54\x7c\xba\xe4", + "\xd1\x65\x3c\x90\xfc\x59\x1e\x3a\x3c\x28\x5a\x3b\xe8\xb1\x2c\xa9\xb2" + "\x12\x1e\x88", + 1, 256, NULL, 0 }, + { 520, 80, 168, + "\x4f\x00\xfd\x17\xae\x82\xa6\x25\x2a\xda\x98\x28\x0b\xbd\x89\x5d\x74" + "\x3f\xc4\xc2\x0b\xc9\xe6\x15\xd8\xa7\x86\xc7\x9e\x45\x4c\x2b\x13\x41" + "\xe2\x42\x54\xfa\x03\x71\xfa\xc8\x6e\x7c\x0e\xf1\xa7\xdf\x5c\x16\xf3" + "\xb3\x56\x9f\xda\x11\x2c\xca\x86\x85\xfa\xec\xbb\x89\x23", + "", "\x68\x02\xca\x52\xbe\x05\x6d\x66\xb9\xa0", 1, 0, NULL, 0 }, + { 520, 80, 169, + "\xd2\x2e\xc5\x68\x90\x99\x90\xc2\x13\x67\x9f\x70\x72\xea\xf1\x97\x63" + "\x50\x8e\xbd\xe6\x96\x2c\x75\xe7\x42\x9c\x5f\x24\x54\xd4\xb5\x47\x28" + "\x11\xee\xa8\xe0\x2f\xdc\x89\xec\x38\x6b\xc6\xf4\x1d\x2a\xd8\xa9\x1d" + "\x11\x6b\x2c\xbc\x52\xb8\x0d\x35\x71\x27\xd1\x55\x5a\x66", + "\xf7\x1b\x43\xe0\xcc\x64\xb5\x40\x9e\x65\x01\xca\x55\xa8\xd4\x50", + "\xd4\x92\xa2\x96\x86\x0c\xc5\xa8\x9c\x5f", 1, 128, NULL, 0 }, + { 520, 80, 170, + "\xbc\xf6\xad\x6e\x5c\x7e\x22\x00\x29\x9e\xa8\x60\x2e\xfb\x42\xb4\x09" + "\x29\x23\x46\xf7\x8a\x0e\x57\xa7\x89\xba\x17\xb1\x7e\xd6\x08\xe8\x84" + "\x97\xe2\xbb\x4e\xbb\xbb\x3c\xe7\x75\x0d\x22\x2b\x3b\xdf\x84\x8d\x4d" + "\xc8\xd4\x9b\x5b\x60\x37\x8f\xb9\x3c\xe3\xf6\x6a\xb4\xeb", + "\x58\x6f\x5d\xdb\xc3\x72\xc0\x71\x1b\x77\xe4\xb8\x7d\x34\x5d\x62\xb6" + "\xde\x55\xa1\xce\x6f\xa1\x8d\xe3\x34\x6c\x86\xbe\x5c\xec\x6e", + "\x1d\xe9\xae\xfc\xc5\x31\x30\x24\x5a\x6e", 1, 256, NULL, 0 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/hmac_sha224_test.json.c b/test/wycheproof/hmac_sha224_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..c8724fcebef15e0ead8852ab4dbc3e02bed214b6 --- /dev/null +++ b/test/wycheproof/hmac_sha224_test.json.c @@ -0,0 +1,1219 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* HMACSHA224, 0.8rc21 */ +#include "mac_test.h" +const struct mac_test hmac_sha224_test_json[] = { + { 224, 224, 1, + "\x7e\xef\x1e\x40\x25\x33\x50\xeb\x93\x07\xcc\x6b\xd8\xab\x8d\xf4\x34" + "\xbc\x2f\xaf\x70\x95\xe4\x5b\x50\xff\xdd\x64", + "", + "\x45\xb4\x66\x02\x12\x14\xd1\x92\x45\x50\x69\x00\x53\x2f\x52\x72\xf4" + "\x4b\x5a\xd9\xb3\xd8\x29\xf0\xf5\xc2\x10\x8c", + 1, 0, NULL, 0 }, + { 224, 224, 2, + "\x86\x48\xee\x93\x6c\x6e\xbc\x5a\xe4\xbb\x48\xc1\x13\x9a\x54\xe3\xac" + "\x5d\x89\x7b\xee\xc4\x92\xdc\x4d\x74\x07\x52", + "\x2e", + "\x5b\x72\xe3\x20\x86\x79\xe6\x3f\x92\x9e\x6e\xe1\x9a\x25\x7d\x05\x55" + "\xf2\x14\x84\xc7\xca\xac\x7c\x98\x61\xbe\x43", + 1, 8, NULL, 0 }, + { 224, 224, 3, + "\x22\x97\xd7\x8c\xc4\x5f\xaf\x9b\x88\x5b\x36\xac\x80\x20\x5c\xc0\x8e" + "\x1b\x73\x0f\x26\x4f\x23\xf4\xed\xbb\xb4\x06", + "\x32\x9f", + "\x2e\x7a\x81\xc4\xe2\x9a\x43\x5d\x91\xe9\x5f\x37\xfb\x0a\x62\xfb\xe9" + "\xa6\x9e\x06\x1f\x41\x6c\x1a\xd1\x7a\x7f\xca", + 1, 16, NULL, 0 }, + { 224, 224, 4, + "\x03\x61\xa9\x04\xf7\xcb\xd1\x07\xa6\x17\x61\x4a\xb6\x9d\x11\x20\x8e" + "\xe6\xd4\x23\xb3\xae\x90\xe2\xbb\x6d\x7e\x54", + "\xe6\xe7\x65", + "\xbb\xfa\x7f\xf9\x60\x93\x1e\x2f\x5e\xd8\xc9\x25\xcd\x74\x27\x29\x90" + "\xe7\x55\xf3\x14\x22\xe5\xc8\x58\x99\x5b\x73", + 1, 24, NULL, 0 }, + { 224, 224, 5, + "\x26\x4a\x8d\x21\x28\xe8\xfd\x09\x72\xd9\xac\xc6\x6d\xc2\x75\xb1\x28" + "\x6b\xee\xb0\xaf\xf7\xce\x8e\x97\xc7\xb9\x6c", + "\x25\x83\x8e\x50", + "\xb2\x5c\x33\xbb\xa1\xa9\x10\x24\xf4\x2c\xfb\x93\x23\x2a\xd6\x85\xd5" + "\x4b\xe2\xca\x31\x0b\x0f\xf9\xba\x51\x07\xb8", + 1, 32, NULL, 0 }, + { 224, 224, 6, + "\x6d\xde\x88\x28\xf0\x9b\x7a\xa9\x81\x08\x2a\xa1\x16\xfc\xa3\xb7\x34" + "\x17\x21\xc0\x44\x08\x03\xf5\x2c\xc9\x73\x2e", + "\xbe\x81\x60\x2d\xa7", + "\xe5\x10\xfb\xf1\x4b\xd7\x30\x1f\x75\x1c\xc0\xae\x89\xf8\x72\x5a\x76" + "\x54\xeb\xbb\xa6\xbb\x2f\x74\x16\x26\x47\x1d", + 1, 40, NULL, 0 }, + { 224, 224, 7, + "\x3b\xa1\x56\xff\xdc\x55\xd1\x55\xbd\x08\x51\x05\xac\xa6\x4d\x13\x04" + "\x4d\xb6\x0c\x82\xcf\x2c\xd9\xd6\x1d\x09\x8f", + "\x69\xc7\x6c\x89\x37\xa0", + "\xa9\xd3\x87\x40\x24\x50\x38\xd9\xc2\x3c\xbb\x59\xba\x65\x13\xf7\x03" + "\x4d\x80\x47\xa0\x7a\x90\x4a\x2a\x23\xd2\xfc", + 1, 48, NULL, 0 }, + { 224, 224, 8, + "\x9c\x27\x39\xba\xe2\xa8\x63\xfb\x02\x36\x46\x6b\xa3\x40\x8f\x4e\xec" + "\x8d\x43\x20\x6d\x56\xbb\x7a\xa2\xf8\xf7\x5e", + "\xaa\xf4\xc9\x14\x6d\xb9\x48", + "\x21\x10\x39\x3c\x6b\xa0\x1f\x53\xbe\x20\x35\x33\xfb\xc5\x47\x1f\xc8" + "\xf0\x49\x40\xfe\x91\x24\x11\x56\x4b\xa3\x6e", + 1, 56, NULL, 0 }, + { 224, 224, 9, + "\x31\xd9\xca\xe2\xc3\xdf\x06\x40\x18\x20\x9b\x12\x1f\x9e\x88\x39\x76" + "\xea\x75\x79\x42\xec\xda\x9d\x92\xfd\xad\xfd", + "\xb8\x44\x28\x95\x29\x20\x6f\x5a", + "\x1f\x1d\xdb\x86\x80\xb0\xd9\x98\x93\xc4\x98\xa7\x72\xa7\xbe\xa6\x3c" + "\x2e\x08\xc0\x25\x7a\x7f\x31\xe3\xdb\x2b\x88", + 1, 64, NULL, 0 }, + { 224, 224, 10, + "\x89\xa1\xb9\xe9\x00\x44\x44\xc1\xd4\xe9\x67\x57\x0c\x21\xa0\x55\x12" + "\xd3\xf6\x18\xec\x16\x8f\xc3\xe1\x3e\xa5\xa2", + "\x6b\x42\xeb\x6d\x84\xe9\x0c\x70\xc2", + "\x3b\x6f\x3b\x09\xe0\x34\x24\xc8\xad\xc2\x67\xfc\xce\xfa\xf6\x14\xdb" + "\x6d\x74\x97\x77\x54\xfc\xad\x8a\x8d\x1a\x9b", + 1, 72, NULL, 0 }, + { 224, 224, 11, + "\x43\x98\x73\x17\x52\xfd\x7a\xf1\xdb\x86\xeb\xcc\xbe\xe0\xad\x65\xeb" + "\x5f\xaf\x00\xac\xe6\xc9\xaa\x35\x44\x1f\xaa", + "\x1a\xe2\xe7\xd9\x17\xc4\x80\x26\x57\x0d", + "\x5f\x19\x48\x33\x69\x53\x33\x7c\x38\x1d\x44\x9c\x17\xab\x5c\x32\x7c" + "\x86\x12\x1a\x8b\x1e\x0d\xb1\x9f\x62\x4e\x3f", + 1, 80, NULL, 0 }, + { 224, 224, 12, + "\x33\x94\x60\xd6\xbb\x26\xca\x60\xeb\xce\xf1\x0c\x38\x58\x7b\x9e\x57" + "\x5c\x39\x84\x91\x78\x2c\xcf\x9e\x8f\x68\x03", + "\xca\x03\xeb\x4f\x37\x53\x6b\x23\x77\x73\x8e", + "\x51\xc5\x66\x1c\x31\xfc\x7e\xdd\x09\xde\x60\xc9\x19\x57\x03\x68\x24" + "\xa1\x97\x61\xbc\xc5\x4f\x1e\x93\xc4\x3c\x3c", + 1, 88, NULL, 0 }, + { 224, 224, 13, + "\x02\x5f\x83\x80\xd1\x0b\x82\x07\xb3\x62\x3e\x4a\x90\xf7\x9c\x3e\x75" + "\x3b\x1b\xe6\xa3\x5b\x88\xb6\x83\x30\xa4\x0c", + "\xe5\x7d\xae\xf9\xed\xe4\xe9\x15\xc3\xa9\xee\xce", + "\x8a\xfd\xb3\x71\x71\x4e\x9d\x60\x63\xec\x9e\x43\xc8\xcd\x55\xe1\xc0" + "\x32\xb2\xfd\xa5\x7f\x91\xe9\xec\x0f\x66\x01", + 1, 96, NULL, 0 }, + { 224, 224, 14, + "\x0b\xdc\x5f\x51\xf8\xa1\xa3\x5d\x75\x55\x4b\xe7\x0e\xfb\xcd\xf5\x1e" + "\x54\xf3\x0f\xa4\x69\x6f\x72\x74\x31\x94\x1f", + "\xcc\x3d\xd1\xeb\x06\x90\xf7\xaf\x09\xad\x40\x8f\x9c", + "\xc0\x91\x89\x51\xc3\x42\x2b\x48\x50\x26\x35\xb6\xe5\x8c\x5d\xce\xe9" + "\xfe\xa5\x1c\x9d\xce\x5c\x7c\x21\x5c\x9b\x93", + 1, 104, NULL, 0 }, + { 224, 224, 15, + "\x5a\xda\x97\xd9\x0a\x74\xa7\xd4\xa6\x8c\x54\x64\xff\xf2\x5a\x9b\x7f" + "\xa2\xe7\x5d\x6a\xcf\x0a\x59\xf1\x43\xa2\xe9", + "\x3f\xe4\xed\xe1\x58\xaf\x10\x8e\x09\xf5\x43\xe1\x4a\xb7", + "\x18\x0a\x6b\x88\x14\xae\x34\x22\x8a\xe9\xac\x76\xda\x83\x79\x37\x6a" + "\xae\x6f\x1a\xa0\x10\x2e\x8f\x06\xb0\x22\xdc", + 1, 112, NULL, 0 }, + { 224, 224, 16, + "\x00\x7a\xfe\x6b\x7c\x07\x01\xc3\x0c\xb7\x6b\x43\x1a\xfa\x35\x10\xc8" + "\xb3\x1d\x21\xcf\xe0\xbb\xaa\x52\x89\xcd\x08", + "\xc2\xcf\x80\x00\x5c\x59\x1c\x1f\x73\x73\x69\xfc\xc2\x12\xf0", + "\xfb\xfd\xb4\x50\xa4\x2f\x9a\x41\x54\x14\x6f\x73\xc5\x90\xa0\xee\x91" + "\x87\xaf\x85\x05\xd6\x07\x90\xa9\x61\x54\x47", + 1, 120, NULL, 0 }, + { 224, 224, 17, + "\x26\x49\x11\x68\xa3\x2c\xe8\xcb\xc4\xc0\xcd\x64\x10\x7e\x4f\xcc\x43" + "\x2f\x07\xd5\x9c\x99\x28\x62\xe1\xe5\x5b\x1e", + "\x15\xe5\x10\x91\xb4\xf4\x24\xba\x1f\xde\xcb\x5e\x2f\xba\x11\xf6", + "\x3f\xa9\x9e\xe1\x60\x32\x8f\xdd\xc4\x7a\x7c\x50\x43\xe9\xef\x64\x5b" + "\x8b\x07\x46\x2b\x71\xca\xd5\x8a\x02\x45\x17", + 1, 128, NULL, 0 }, + { 224, 224, 18, + "\x69\x78\xb6\xc1\x34\xdd\x69\x49\x83\x2d\x65\xe4\xcb\x9c\x1e\x1d\xc3" + "\x6b\xea\xe4\xa1\x34\x90\x7c\x80\xda\x0f\x44", + "\x66\x41\xd8\x34\xb3\xfb\xfd\xb5\xd1\x78\x00\x78\x01\xf7\xb4\xe7" + "\xb1", + "\x61\x38\x72\x30\x44\x6f\x31\xfd\xe8\x55\x2f\x22\xec\x52\xa7\xfe\xf8" + "\x2e\x16\xd0\xad\x39\x9d\xe9\x39\xd8\x22\x9b", + 1, 136, NULL, 0 }, + { 224, 224, 19, + "\x9f\x9f\xb2\x80\xad\xf1\x2e\x73\x95\x48\xb1\xd6\x76\xcb\x79\x4d\x68" + "\x5b\x91\x04\xe6\x3b\x61\x9b\x05\x5c\xb6\x0f", + "\x91\x51\x3d\xd6\xde\x40\xa1\xc2\x3f\x8d\x1e\xb0\xab\x8f\x5e\xa6\xf6" + "\x83\x55\x06\xec\x75\x08\x94", + "\xe6\xb9\x2f\x9c\x03\x02\x70\x89\x7c\x5d\x27\x16\x2a\x5d\x40\xf6\xd3" + "\x73\xff\x13\x61\x05\xd1\xa9\x0e\x0f\x9a\x60", + 1, 192, NULL, 0 }, + { 224, 224, 20, + "\x3b\x1b\x16\xe6\xdd\x2e\x69\x55\x9d\xbe\xb9\x64\xe1\x0f\xc9\x4c\x06" + "\x84\x71\xb2\x37\x4d\x3a\x2d\x24\xd2\xd4\x66", + "\x8e\xcd\x55\xb5\x6c\x66\x8d\xcb\x8e\x8b\x1e\xfd\x69\x9c\x0e\x4a\x46" + "\x42\x04\xd2\x9a\xf1\x40\xf8\x7d\x3f\x50\x75\x49\x53\x78\xa3", + "\x17\x58\x56\xb8\xf5\x6a\x8c\x6f\xbe\xbc\x36\x54\x17\x71\x54\x50\x46" + "\xbb\x41\x62\x54\xf0\x1f\xf1\x1a\x21\x8d\x2e", + 1, 256, NULL, 0 }, + { 224, 224, 21, + "\xfc\x29\x63\x98\x84\x50\x63\xe6\x61\xbd\xf3\x6f\xf3\x61\x59\x26\xea" + "\xcc\xbf\x06\x94\x7c\xd3\x1e\x66\x77\xf7\x10", + "\x62\xbd\x0a\xd7\x5d\x64\xc5\x54\xcb\x2c\xc1\x09\xc6\xe4\x01\x9f\xc6" + "\x01\xc6\x1c\xab\xdf\x99\xf8\xde\x87\x1e\xdc\x17\xa3\x01\xb4\xc1\xf5" + "\x5a\x15\xed\x66\xf9\x1e\xb4\x66\x6d\xd0\x8b\xc5\x9c", + "\xb7\xcf\x74\x1c\xf9\x6d\x6b\xf5\x7d\x21\x6c\x43\x61\x1c\x20\x86\x9c" + "\xa0\xd0\x08\xa4\x54\x2f\x5c\x85\x06\x05\xbc", + 1, 376, NULL, 0 }, + { 224, 224, 22, + "\x6c\x98\xd1\xfe\xaf\xff\x98\x61\x35\x19\x66\xbc\x6e\xd1\x9e\xd4\x67" + "\xf9\xdc\x76\x7f\xa0\xdf\x6b\x56\x95\x55\x54", + "\xe9\x9d\x51\xa1\xd9\xa2\x5c\x58\x42\x50\x1a\x53\x83\x13\x35\x78\xc8" + "\xde\xbe\x50\x15\x81\xb1\x61\x0f\x75\x75\x51\x9b\xbd\x26\xf0\x1a\xb7" + "\xcb\xe0\x69\xbf\xd5\xdf\x36\x99\xa2\xfe\xa5\xb4\x61\xa3", + "\x0f\xe6\x4f\xdd\x91\x29\x66\xa6\x54\x20\x69\xa2\x2b\xfd\x08\x4b\x48" + "\x4c\x01\x5c\xf4\x34\xd8\x6b\xca\x15\xcd\xb6", + 1, 384, NULL, 0 }, + { 224, 224, 23, + "\x42\xa1\x64\xf9\x4e\x33\xd5\x74\x11\x8e\x0f\x8c\x93\x8b\xbc\x28\x74" + "\xba\xb2\x19\xee\x7a\x17\x9f\x21\xe1\x3b\x02", + "\xe8\x95\x63\x96\x31\xf8\xb5\xd4\x8e\x3c\xe0\x0e\xb3\x10\xbf\x12\x99" + "\x76\xff\xce\xd9\x6a\x6f\x30\xa0\x9d\x6a\xc1\xc2\x91\xf7\x3e\x93\x69" + "\x05\x26\xd8\x6c\xc4\xd1\xa8\xe2\x1c\x11\xf5\xa8\x97\x93\x08", + "\x1e\xa9\x82\x22\x6e\x8d\x4c\xb7\xb0\x79\x22\x15\x8e\x53\x5a\xf2\x23" + "\x3b\x4c\x4d\x39\xd2\x6b\x06\x2d\x6d\x2a\xae", + 1, 392, NULL, 0 }, + { 224, 224, 24, + "\xc1\xb5\xb9\x12\x10\x66\x7e\x72\xaa\x51\x03\x46\xe1\x81\x13\x58\x81" + "\x5a\x33\x30\xc5\xed\x27\xa6\x95\xc3\x94\x51", + "\xbf\x10\x86\xc3\xea\x8b\x88\x40\x41\x8c\x69\x0c\x92\x15\x2c\x73\xa6" + "\x73\x0b\xd1\xa0\x21\x0c\x8b\x1d\x25\xc4\x3a\x21\x93\xe7\x39\x68\x4f" + "\x04\xa2\x5a\x52\xcc\x30\x55\x99\xf2\x2b\xa6\xf7\x0c\x8e\xd0\x0d\x10" + "\xb9\x14\xa9\x52\x2a\x25\xe0\x6c\x47\x1e\xbc\xa2\xff\x1b\xb4\xfa\x67" + "\x99\xb8\x51\x22\x02\x09\x78\xdf\xa6\x6e\xf1\x2e\xd2\x6a\xd3\x83\x31" + "\xb2\x6e\xaf\x59\x1a\xfc\xea\xc9\x6d\x8c\x77\x1e\xae\x50\xfb\x7f\x46" + "\x24\x23\x37\xdd\x00\x29\xf4\x81\x3b\x53", + "\x4f\x35\x5e\xdb\xe6\xa3\xc9\x3f\xa7\xad\xd3\x84\xbe\x89\x9b\xb4\xfb" + "\x55\x38\x5a\x78\x81\x2a\x26\xcb\x64\xe4\x4f", + 1, 896, NULL, 0 }, + { 224, 224, 25, + "\x4f\x09\xd1\x4d\x40\xe4\x75\xb6\x82\x88\xc0\x80\x66\x8e\xbb\x1b\xc8" + "\xc6\xbe\x31\x91\xf6\x66\x4d\x91\xa2\x3f\xcd", + "\xae\x8b\x6e\xcc\x21\x9b\x36\x8d\x22\xfb\x59\x6e\x42\x65\x2d\x0b\xff" + "\xee\x0b\x20\xd6\x9c\xfd\x08\x9c\xe3\xdc\x93\x03\xba\x2f\x05\x4c\xca" + "\xf5\xf5\x14\x7c\x79\x68\xa0\x28\xb1\x40\xf5\xe3\xc9\x27\x4e\xae\x2a" + "\xfc\x61\xc3\xbb\x62\x98\xdc\x59\x8d\xf7\x7d\xec\x1c\xd2\xdd\x84\x21" + "\x26\x93\xb0\x82\xb8\x13\x2a\xd0\xf0\xb1\x9f\x66\xdb\x69\xfa\x7f\x6b" + "\xf3\x52\xb4\xfe\xac\x72\x4c\xe0\x48\x44\x0d\x2a\x42\xb4\x4d\x53\xbb" + "\x62\xfe\x2a\xb2\x5f\x7f\x54\xbe\xdf\x9c\xe7\xdd\xaf\xd8\xe0\x93\x30" + "\xda\xcc\x6d\x52\xee\x9b\x65\xf5", + "\x29\xba\x26\x81\x03\x01\x9e\x15\x8a\x35\x61\x4c\x80\x78\x0f\xda\x3f" + "\x5e\xc3\xfc\x32\xc8\x0a\xaa\x27\xb4\x02\x5d", + 1, 1016, NULL, 0 }, + { 224, 224, 26, + "\x61\x3f\x41\x4c\xd9\x41\x30\xbb\x8a\x62\x43\xe1\x2e\xcc\xd9\x08\x36" + "\x80\x84\x28\xb4\xa7\x17\x78\x67\x93\x4d\xa0", + "\xf6\x96\xb9\x06\x3b\x64\x81\x6a\x45\x06\x4f\x48\xca\x05\xff\xe4\xd5" + "\xcc\x3d\x0b\x3b\xeb\x0d\xd4\x05\x7b\x6a\xda\x99\x49\x69\xbf\x03\x9b" + "\xfb\xb7\x2c\xe1\x97\x10\x1c\xc4\xe4\xb3\x95\x9b\x37\x02\xf0\x45\xaf" + "\xb7\xfb\x31\x13\xc9\x97\x60\x6d\xca\xf2\xaa\xab\x31\xe0\x2a\xc6\xee" + "\x59\x7d\xfc\x0f\x91\x43\xd0\xef\xfe\xdc\x9a\xe7\xea\x10\xe7\xdd\xb1" + "\xdb\x86\x0a\x91\xaf\xec\x62\xc4\x8e\xd9\xc0\xa6\xc1\x0b\x4d\xa1\xde" + "\x74\x8c\xaf\x7f\x7a\x5e\x01\x79\x9a\xc5\x70\x90\xda\xf4\xe3\x35\x2f" + "\xe8\x59\xc5\x13\x1c\x20\x5d\x26\x2d", + "\x81\x29\xe2\x09\x30\x70\x16\x8a\x20\x89\x97\x93\xa0\x44\x47\xa7\xef" + "\x01\xae\x72\x34\x19\x25\x6a\x8c\xb4\x2f\x6d", + 1, 1024, NULL, 0 }, + { 224, 224, 27, + "\x5b\x88\x27\x53\x07\xaa\xf6\x91\xa0\xcf\x0c\x51\xf5\x05\x53\xdd\xa9" + "\x72\xd1\x4f\x8a\xff\xf9\x8e\x62\xc2\xd9\x72", + "\x57\xe4\xef\xbd\xe1\xce\x9f\xee\x2e\x29\xdb\x19\xdf\xc6\xba\x3b\xcb" + "\x17\xf3\x37\x65\xaf\x7f\x20\x13\x3b\xbd\x19\x10\xd5\x42\x14\x5c\x7d" + "\xef\x18\x7a\x30\x45\x17\xb8\xd8\x95\x44\x54\xa9\x0a\x71\x7f\x67\xf9" + "\xc8\xcc\x58\x79\x65\xfd\x9b\x43\xf4\x1e\xcc\x50\xb3\x45\x8d\x8c\xe9" + "\xf6\x6b\x47\x5f\x1e\xae\xf4\xa2\x9b\xa8\x9a\x3d\x58\xe5\x01\x1c\x92" + "\xac\xd1\x53\x6f\xcd\x18\xab\xec\x29\x41\x1b\x38\x9b\x64\xf7\xf3\x44" + "\x77\x7e\xd6\xde\xae\x32\x12\x7a\xba\xa6\x9a\x50\xba\x22\xa1\x1d\x6e" + "\x59\x35\x4f\x2f\xf0\xe3\xc3\xe3\x29\x3c\xdc\x33\x54\x11\xcf\x55\xb1" + "\x80\xba\xb5\x9d\xa3\x69\x03\xa6\xfb\xa9\x1d\xf3\x4d\x2a\xad\xf7\x01" + "\x7f\xf4\x9a\x4f\xbd\x73\xc9\xc7\x44\x69\xf2\x25\xda\xfc\x0a\x0c\x70" + "\x48\xc2\xb8\x24\xcc\x0c\xba\x8c\xad\x8a\xce\xd1\x1b\x8c\xda\xc3\x24" + "\x3c\xdb\x5b\x65\x4f\x7a\x15\xce\x20\x14\xe9\x2e\xe2\x87\xd0\x69\x04" + "\xd7\x78\x51\x2a\x1b\x1f\x5e\xc0\xc9\xb0\x90\xb9\xab\x43\x9c\x44\x26" + "\x6b\x6b\xe3\xd6\xa9\x89\x47\xd2\x6d\x07\x9e\x4f\x7e\x84\x9f\x3c\x6d" + "\x93\xde\x98\x62\x4e\x6c\x5f\x53\xec\x02\xdb\xd3\x68\xbc\x24\xa3" + "\x00", + "\x65\x7d\xd0\x4b\x97\x02\x19\xed\xd6\x3a\xbf\x9d\x4a\xa1\x08\x47\x4a" + "\xa3\x16\xb6\xbb\x66\xbc\xa7\x6e\xd8\x06\xc6", + 1, 2040, NULL, 0 }, + { 224, 224, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6f\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6c\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0f\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\xee\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x98\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x20\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\xe2\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\x2e\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe4\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xde\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe7\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdd\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x13\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\x4d\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6c\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x50\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\xed\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\xd1\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x58\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xdc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb4\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xae\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x28\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x73\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x2b\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x70\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\xa9\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\xf2\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x13", + 0, 0, NULL, 0 }, + { 224, 224, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8b", + 0, 128, NULL, 0 }, + { 224, 224, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x10", + 0, 0, NULL, 0 }, + { 224, 224, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x88", + 0, 128, NULL, 0 }, + { 224, 224, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x52", + 0, 0, NULL, 0 }, + { 224, 224, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\xca", + 0, 128, NULL, 0 }, + { 224, 224, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x92", + 0, 0, NULL, 0 }, + { 224, 224, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x0a", + 0, 128, NULL, 0 }, + { 224, 224, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6f\x99\xe8\x62\xe5\x32\xe8\x93\x6c\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x21\x6f\xae\xdf\x30\x53\xcd\x50\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 68, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\xe2\xe5\x32\xe8\x13\x6d\x78\xb5\xf0\x29\x09\xb1\x30\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 69, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\x2e\xdf\x30\x53\x4d\x51\xfc\xaf\x41\x72\x22\xc8\xf1\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 70, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6e\x99\xe8\x62\xe5\x32\xe8\x13\x6d\x78\xb5\xf0\x29\x09\xb1\xb0\xab" + "\x09\x80\x6b\x2a\xf0\x2f\x7c\xb9\xd3\x9d\x12", + 0, 0, NULL, 0 }, + { 224, 224, 71, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\x4d\x51\xfc\xaf\x41\x72\x22\xc8\x71\x44" + "\xab\xd5\xf2\xf7\xfa\x00\xab\x46\x67\xd8\x8a", + 0, 128, NULL, 0 }, + { 224, 224, 72, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x91\x66\x17\x9d\x1a\xcd\x17\x6c\x92\x87\x4a\x0f\xd6\xf6\x4e\xcf\x54" + "\xf6\x7f\x94\xd5\x0f\xd0\x83\x46\x2c\x62\xed", + 0, 0, NULL, 0 }, + { 224, 224, 73, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf2\xde\x90\x51\x20\xcf\xac\x32\xae\x03\x50\xbe\x8d\xdd\x37\x0e\xbb" + "\x54\x2a\x0d\x08\x05\xff\x54\xb9\x98\x27\x75", + 0, 128, NULL, 0 }, + { 224, 224, 74, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 0, NULL, 0 }, + { 224, 224, 75, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 128, NULL, 0 }, + { 224, 224, 76, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 0, NULL, 0 }, + { 224, 224, 77, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 128, NULL, 0 }, + { 224, 224, 78, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\xee\x19\x68\xe2\x65\xb2\x68\x13\xed\xf8\x35\x70\xa9\x89\x31\xb0\x2b" + "\x89\x00\xeb\xaa\x70\xaf\xfc\x39\x53\x1d\x92", + 0, 0, NULL, 0 }, + { 224, 224, 79, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\xa1\xef\x2e\x5f\xb0\xd3\x4d\xd1\x7c\x2f\xc1\xf2\xa2\x48\x71\xc4" + "\x2b\x55\x72\x77\x7a\x80\x2b\xc6\xe7\x58\x0a", + 0, 128, NULL, 0 }, + { 224, 224, 80, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", + "\x6f\x98\xe9\x63\xe4\x33\xe9\x92\x6c\x79\xb4\xf1\x28\x08\xb0\x31\xaa" + "\x08\x81\x6a\x2b\xf1\x2e\x7d\xb8\xd2\x9c\x13", + 0, 0, NULL, 0 }, + { 224, 224, 81, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x20\x6e\xaf\xde\x31\x52\xcc\x50\xfd\xae\x40\x73\x23\xc9\xf0\x45" + "\xaa\xd4\xf3\xf6\xfb\x01\xaa\x47\x66\xd9\x8b", + 0, 128, NULL, 0 }, + { 224, 112, 82, + "\x26\xf3\x14\x17\x0b\x05\x4d\xae\xf5\x34\x98\x04\xda\x18\xf9\x69\xc9" + "\x41\x74\xba\xca\x2b\xee\xb0\x09\xd4\x7a\x23", + "", "\x81\x6d\x7a\xf2\x47\x5e\x94\x71\x3f\x2d\xc3\xaa\x30\x69", 1, 0, + NULL, 0 }, + { 224, 112, 83, + "\x17\x42\x9a\x62\x2d\xc1\x8d\x38\x71\x5b\x31\xf8\xf2\xb9\x63\x10\x8e" + "\x95\x2a\x67\x08\xf3\xe5\x2d\x5b\x25\x84\x8a", + "\xda", "\x26\x63\x07\x77\xd8\x5f\x77\x71\x87\x63\x0b\xb9\x46\x74", 1, + 8, NULL, 0 }, + { 224, 112, 84, + "\x0a\xcf\xe1\x2d\x89\xac\xd7\xd9\xca\x49\xba\xe6\x31\x8f\x35\xb2\xfb" + "\xbf\xc8\x4e\x5d\x2c\x9d\x49\x54\xbe\xde\xd7", + "\x03\xa8", + "\xaa\x4c\x4b\xb6\x3c\xad\x66\xac\x67\x51\x50\xf7\x18\xb2", 1, 16, + NULL, 0 }, + { 224, 112, 85, + "\x5a\x06\x80\xf1\x12\x35\x4b\xd4\x67\x86\x5b\x19\xae\x95\x6b\x27\x19" + "\xe2\x1e\xce\xe1\xa9\x13\xbd\xca\x29\x43\x39", + "\xa0\xfb\x73", + "\x36\xc7\xcd\x3f\x29\x0d\x1d\x7d\x33\x2b\x95\x1a\xa4\x71", 1, 24, + NULL, 0 }, + { 224, 112, 86, + "\x46\xfa\x59\xaa\x52\x4f\xe3\x0a\x0f\x4e\x39\x56\x1b\x56\x66\x85\x44" + "\x40\xdb\xd9\x70\xbb\x59\x92\x5c\xe0\xae\x1a", + "\xc8\xb2\xf5\x57", + "\xc1\xa8\xa7\xd4\x3d\xf3\x4d\x91\x7f\x0c\xb5\x12\xc5\x7d", 1, 32, + NULL, 0 }, + { 224, 112, 87, + "\x29\xef\xc5\xab\x5d\x30\xe5\x35\x35\x76\x03\xf2\x71\x1b\x6e\x0a\xa6" + "\xcf\x46\x13\x54\x6c\x23\x14\x44\x36\xd2\x13", + "\xc8\xd9\xf5\xb3\x73", + "\xa8\xcc\x7b\xeb\xef\x4c\xfc\xd5\xac\x2f\x40\x1a\x37\x2d", 1, 40, + NULL, 0 }, + { 224, 112, 88, + "\xfe\x60\xe0\x32\x20\x35\x53\x8f\x2b\x1d\xe9\xde\x38\x0c\xde\x35\xf2" + "\x91\xde\xeb\x6e\x02\x7b\x5d\x82\x9e\xcd\x1e", + "\x18\x5e\x4c\xad\xa4\xf4", + "\x32\xfa\xa1\x54\x39\x6b\x0b\x62\x43\x6e\x6b\xf9\x37\xee", 1, 48, + NULL, 0 }, + { 224, 112, 89, + "\x1b\xf7\xfc\xdf\x37\x42\xfa\x77\x99\x15\x28\xcc\x1c\x67\x8b\x98\xbe" + "\x98\x76\xa8\xc8\xc5\xb8\x09\xbe\xab\x7d\x9c", + "\x9c\x0f\x34\xa5\x65\x42\x79", + "\x08\x61\x70\xc4\x6d\x2b\x0a\x76\xc6\x15\x27\xc2\xd0\x52", 1, 56, + NULL, 0 }, + { 224, 112, 90, + "\x32\x53\x3c\x16\xf7\x92\xed\x0a\xcf\x8e\x9e\x60\xf5\x4a\xa1\x73\x93" + "\x7c\x71\x94\xb8\x82\xec\xc3\xe6\x71\x00\x9f", + "\xf9\x68\xdc\x7a\x19\xaf\xe3\x39", + "\x3f\xb4\xeb\x44\x50\xac\x4b\x26\xa7\x14\xbc\xfb\x22\x4c", 1, 64, + NULL, 0 }, + { 224, 112, 91, + "\x3c\xf2\x8a\x47\x6c\xe7\xea\xec\xfc\x3f\xbf\x1b\x08\x59\xa0\x42\xa5" + "\x68\x74\x0a\x58\x4c\x77\xcb\x8f\x96\x03\xac", + "\xdb\xca\x9e\x4b\xdd\x84\xb3\x89\x34", + "\x2c\xf1\x4e\xb8\xf4\xc7\x53\x7e\x98\x31\x98\x3b\xb5\xaf", 1, 72, + NULL, 0 }, + { 224, 112, 92, + "\xa2\xa8\x09\x0a\xef\x69\x27\x7f\x92\x83\x0e\xc7\x40\x4c\x03\x2f\x8f" + "\xde\xbf\xbc\xea\xbb\x9e\x59\x09\x68\xa7\x7f", + "\x6b\x79\x0a\x94\x6a\x83\x36\x4c\x79\xd7", + "\xd4\x67\x20\x9f\x63\xa9\xbd\x3d\x2c\x53\x98\xc3\x05\xda", 1, 80, + NULL, 0 }, + { 224, 112, 93, + "\x6f\x99\x99\x29\xe9\x16\x72\xba\xc3\x5e\xa7\x0f\x8f\xf8\xb9\xae\xef" + "\xa5\x48\x94\x93\xc9\x9b\x0d\x27\x79\x72\x07", + "\xb7\xda\xbb\x23\x7a\xea\xe2\xbe\x8b\x5e\x19", + "\x09\xb2\xbb\x6e\xae\xda\x5f\x02\x29\xb8\xc3\x5a\x2f\x54", 1, 88, + NULL, 0 }, + { 224, 112, 94, + "\x45\x25\xb9\x6c\x26\x3e\x4d\x2d\xab\x28\x90\xaa\x55\xf3\xcc\x50\x3d" + "\xc1\x20\x6d\x9f\x19\x15\xa6\xfb\xa5\xae\x61", + "\xef\x85\x8f\x49\x6f\xcb\x7c\x3f\xab\xbf\xb5\x2e", + "\x6f\x5c\xa7\xef\xcb\x9a\x70\xd0\xab\xf8\x42\x5f\x42\xca", 1, 96, + NULL, 0 }, + { 224, 112, 95, + "\xf8\x94\x01\xac\xb0\xa6\x0d\x07\xfd\x73\x3e\xd5\x63\xf2\xee\x24\x1f" + "\x4e\xcf\xea\x81\x14\x58\x7a\x44\xdf\xdb\x0c", + "\x7d\x3c\x09\x18\x08\x59\x84\xdf\x95\x09\x7a\xfa\x81", + "\xfc\x22\x7f\x29\xb5\x1f\x9c\x85\x53\x43\xdc\xd0\xea\x11", 1, 104, + NULL, 0 }, + { 224, 112, 96, + "\x58\xbc\xe8\xc0\xd1\x7f\xc7\x13\x1d\x2f\xa2\x26\x24\x09\xbb\x14\x66" + "\x3a\x6e\x68\x01\x9f\x88\x29\x99\x87\x89\x3e", + "\x1c\xa5\x0c\xd6\xc3\xf1\x22\x5e\xb6\xc4\xec\x4d\x6a\x90", + "\x53\xe1\x03\xbb\xde\xd7\xb8\x25\xaf\xfa\x24\x0f\x85\x78", 1, 112, + NULL, 0 }, + { 224, 112, 97, + "\x65\x8e\x51\x0f\xba\x4e\x22\x08\xaf\xac\x98\x33\x3f\x9e\x24\x2b\xc1" + "\x18\xf6\xe7\x9e\xf0\x66\x1d\x61\x9d\xd3\x2b", + "\x32\xc3\x85\xb7\x5a\xe8\x45\x58\xca\x30\x28\x81\xc5\x16\x39", + "\x48\x5f\x35\x1e\x2a\x9a\x82\x91\x0c\x3c\x94\x9e\x32\xb8", 1, 120, + NULL, 0 }, + { 224, 112, 98, + "\x6a\x41\xcc\x3c\xa7\x14\x2a\xe1\x4e\x6d\x97\x9a\x3f\x89\x0a\x33\x15" + "\x97\xe5\x92\xdd\x74\x52\x0c\xe4\xea\x66\x0f", + "\x78\xe3\xa7\x70\xa8\xaa\xaf\x03\x9f\xd4\xc9\xb6\xa1\x78\x04\x11", + "\x33\x1a\x58\xed\x96\xfc\x8b\x9e\x68\x4a\xb0\x5f\x63\x6c", 1, 128, + NULL, 0 }, + { 224, 112, 99, + "\xb8\x97\x2b\x93\xb6\x83\x02\xcb\xaa\x08\xd3\x29\x04\xea\xe6\x37\x5a" + "\x66\xf3\x50\x8e\xce\x3c\x9b\x22\x38\x2c\x7e", + "\x36\x87\xe6\x28\x7d\x73\xc9\xe3\xf6\x79\xa5\x0e\x76\x71\x24\x71" + "\x27", + "\x27\xd8\x11\x39\x55\x02\x6d\x4d\x31\x80\x70\xfb\xfd\x8f", 1, 136, + NULL, 0 }, + { 224, 112, 100, + "\xbc\x57\x09\x32\xab\xfa\x11\x05\x0a\xd4\xfc\x80\xa6\xd5\xaf\xe3\x27" + "\x1d\x86\xaa\x29\xdc\x62\x73\x8b\x20\x7d\x14", + "\xd5\x32\x02\xac\xd2\xec\x74\xd7\x46\x53\x1b\xd9\xad\x30\x16\xd0\x98" + "\x0e\x01\x66\xfb\x42\x7a\x08", + "\x02\x0e\x3e\x0c\x29\x40\xce\x15\xee\xb6\x73\x92\x57\x0f", 1, 192, + NULL, 0 }, + { 224, 112, 101, + "\xc9\x2a\x06\x65\xc1\x2e\x87\x02\x6e\x1b\x34\x4f\x97\x1f\xdb\x0e\x47" + "\x4d\x45\x0c\xba\x83\x4a\xae\x40\xe2\xd2\x1e", + "\x4a\x3a\x85\xac\x09\xf5\x19\x0a\xb9\x4f\x73\xfd\x91\xd9\x8f\x05\x60" + "\x15\x26\x3c\x89\xed\x5d\xa2\x23\xfc\x46\x75\xca\xb2\x5c\xdd", + "\x92\x28\x53\xf1\x59\xc4\x2b\x9e\x27\x4f\xce\xf7\xbd\xf3", 1, 256, + NULL, 0 }, + { 224, 112, 102, + "\x6f\xbe\xf6\x7c\xfb\xac\xc9\x8c\x63\x25\x2b\x1c\xa0\x09\xa6\x0e\x8e" + "\x34\x79\x76\x9a\x2d\x44\x9f\xb4\x63\x90\x64", + "\x00\x6e\x17\x9e\xac\xfa\x9e\x1e\x62\x8b\xb7\x82\x3e\xe9\x60\x9a\xe7" + "\x96\x8b\x6d\xf9\x0e\x17\x6f\x77\x2a\x79\x08\x8d\x37\xe9\xb1\x5c\xab" + "\x31\x29\x22\xaa\xf8\xfc\x65\x83\xa3\x41\x00\x2b\xda", + "\x0a\x27\xa1\x2a\xfb\xb9\xc3\x13\x62\x02\xe0\x2a\xe3\xb2", 1, 376, + NULL, 0 }, + { 224, 112, 103, + "\x70\x0b\x09\x90\x81\x74\xf1\x07\x2e\x31\xae\x8c\xcb\xda\x1c\x44\x60" + "\xfc\xf2\x1f\xdf\x14\x6a\x11\x48\x2b\x21\x0d", + "\xf7\x72\x56\x4e\xcb\x10\x9e\x80\xee\xfb\x1d\x5a\x7f\x1c\x95\xe2\x03" + "\xba\x4c\x98\x02\x33\xdd\x8d\x13\xde\x30\x46\x07\x9a\x6b\x2c\xa2\x6d" + "\xc3\x52\x1e\x5e\x0c\x80\x7e\xae\x7a\x79\x87\x7c\x73\xe9", + "\x04\xc7\x18\xa4\xcd\x8b\x58\x3d\x5f\xfb\x81\x70\x27\x6c", 1, 384, + NULL, 0 }, + { 224, 112, 104, + "\xe1\x8a\x20\x24\x6e\xbe\x1b\x57\x96\xdb\xfe\x35\x11\x0e\xfc\x76\x37" + "\xd7\x4a\x35\x5f\x0a\x67\x58\xd4\xa0\x0b\x7d", + "\x77\x72\x0d\xde\x53\x0e\x6e\xea\xa0\xe9\xaf\x33\x11\xf7\xe9\x91\x89" + "\xd6\xc4\xf7\xd7\x1d\x0a\x42\x07\xd6\x2c\x76\x6b\xee\x32\x02\x0c\x92" + "\xf5\xd5\xd2\x8d\x5d\xe4\xd0\xd9\xc9\x4b\x57\xec\x05\xf0\xc3", + "\x25\xeb\xc8\x61\x1f\x4b\x63\x6d\x89\x2f\x11\xdf\x2b\x29", 1, 392, + NULL, 0 }, + { 224, 112, 105, + "\x3c\x45\x85\xa7\x75\xbe\xc7\x6c\x7d\x8b\x27\xb8\x7e\x70\xa5\x86\x3a" + "\x85\xe6\x11\x1f\x31\x61\xb3\x81\x5f\x59\xb4", + "\x62\x8c\x0f\xf8\xc4\x32\xd7\x4f\x4c\xfb\x77\xba\x46\xb7\xce\xf6\x7a" + "\x48\xac\x05\x3c\xf0\xc1\x8b\xe4\x16\x48\x73\x6a\xbc\xc8\xc6\xfb\xe4" + "\x98\x15\x29\xba\xbd\x4b\x27\x86\x6e\x34\xce\xd1\x6d\x8b\x0b\xec\x45" + "\x6e\x14\x65\x3a\x14\x22\xf5\xa6\x25\x56\xd2\x0b\x0f\xe4\xe0\x37\x49" + "\xd5\xf6\xe9\x86\x37\x50\x62\xdb\xdd\x82\xf6\xe9\xe1\xd4\xad\x54\x7c" + "\x31\x53\x0c\x2a\x31\x38\x3c\x25\xff\x57\xe8\x79\xea\xe9\x9d\x9b\x3a" + "\x0d\xa1\xf3\xc1\xda\xcb\x97\x50\x67\xac", + "\xde\xb9\x4b\x2d\x43\xe9\x89\x26\xaf\x51\xfc\x0c\x88\xdc", 1, 896, + NULL, 0 }, + { 224, 112, 106, + "\xac\xaf\x94\xcb\x1a\x8f\xf4\x67\x7f\xc5\x86\xd2\xbd\xf9\x81\xac\x3a" + "\x65\x6b\x20\x82\x15\xe0\xa7\x64\x7b\x42\x0f", + "\x31\x4c\x2c\x25\x46\x5d\xe3\x42\x72\x79\xdb\xc8\x94\x36\x50\x5f\xee" + "\x6d\x37\xd5\x6f\xbd\xa0\xe5\xe2\xa4\x94\x49\xd9\xdb\xf0\x03\x02\x7f" + "\x2e\x4e\xf5\xc5\x2f\x7a\xf9\x3f\xd8\x01\x55\xa6\x6a\x1c\xd6\xb9\x88" + "\x5b\x56\xd8\x28\x05\x8a\x0d\xe7\xd2\x47\xe1\x95\x80\xb2\xe8\xdc\xbd" + "\xef\x2a\xe4\x68\x40\x56\x5f\xd8\xb2\x76\x56\x9c\x19\xd7\xe1\x85\x11" + "\x6e\xa1\x1a\xd6\x7d\x5f\xc2\x7f\x4a\x68\x16\xba\x45\xbe\x5d\x14\xf3" + "\xba\x43\x15\xc7\x4d\x1e\xdb\x20\xf2\x17\xb1\x16\xbe\x85\x2b\x62\xa7" + "\xf4\xe3\x2b\x3e\x70\x8f\xf9\xf7", + "\x5b\x0d\x7a\xec\x7f\xbd\x19\x6e\xe6\x9e\xd3\x73\xe1\x31", 1, 1016, + NULL, 0 }, + { 224, 112, 107, + "\xe4\x90\x34\x8a\xd7\x8f\xd2\xcd\x5b\x51\xf2\x79\x5b\x79\xe5\x80\x5c" + "\xe1\xd9\xba\xf1\x15\x1d\xbd\xf9\x95\xe1\xb0", + "\xf6\xff\x18\x45\x84\x2b\x9e\x46\xf7\x9a\xdb\x10\x79\xaf\xf4\x73\x97" + "\x39\x1d\xc2\x69\xbc\x0c\x89\x9b\xa4\x08\x7b\x58\xa6\x76\xf5\x40\x8c" + "\x3f\x76\x37\xff\xc4\x77\x2a\xf3\xe4\x1b\x5c\xea\x51\x05\x8b\xc5\x28" + "\xea\x09\xbb\x4b\xd7\x97\x59\x4c\x79\x8b\x0f\x0f\xf8\x81\x69\x5e\x98" + "\xc0\x8b\xbb\x04\x0c\x12\xc5\xcb\xdb\x22\x8d\x61\xcc\x99\xe3\x32\xe9" + "\x63\x12\x8d\x06\xe9\x7e\xd2\xee\xfd\xed\x2e\x1b\x5a\x03\x5f\x3b\xea" + "\x68\x27\x3e\xfa\xc0\x3a\x89\x4d\xcf\x2f\xcc\x79\xa5\x69\x62\x18\x59" + "\x54\x04\xb2\x75\x8d\xeb\x9a\x80\xee", + "\x59\x07\x27\xf3\x44\xd8\xa5\x40\xe5\xc5\xe0\xf4\xda\xe9", 1, 1024, + NULL, 0 }, + { 224, 112, 108, + "\xc8\xe0\x99\xdb\xb6\x0a\x8f\x19\xd8\xb8\x68\x56\xb2\x1c\x55\xf3\x43" + "\x7a\xe2\x7f\x77\xdf\xf9\x80\x8f\x12\xa1\xb5", + "\xed\xbc\x10\x9b\xf2\x8c\x8a\xb3\x2b\x12\x38\xef\xf1\xcd\x14\x30\x8c" + "\xdd\x84\x5f\xa9\x19\xbf\xd8\xa0\x0c\x99\x1c\xf9\xa8\xd6\xb0\x5d\xd8" + "\xcc\x7d\x23\x93\x78\x29\x49\xc8\x99\xde\x79\xe7\x71\xef\x7d\x85\x67" + "\xf3\x22\x87\x62\x39\x63\x04\x8e\x6c\x80\xd9\x1f\x07\x78\xdd\x63\x31" + "\x11\x06\xe9\xd0\x91\x3c\x08\xb7\xa0\xb7\x25\x3f\xa3\xce\x30\x7a\xc4" + "\x0e\xc5\x5a\x4c\x44\x5f\x54\x55\xa5\x70\xfe\xc0\x90\xe2\x51\xe8\x64" + "\x6b\xad\xa1\xa4\x86\xd4\x1c\x37\x94\xbc\xe5\x63\x97\x32\xf2\xc6\xcf" + "\xd5\x80\x81\xc4\x79\xa6\x8c\x51\x5f\x5d\x47\xbc\x5b\x2f\x16\x22\xa0" + "\x8d\x38\xa5\x96\xa8\x17\xf3\xd4\xef\xef\x80\x03\xae\x43\x0e\x6a\xe9" + "\x3b\x0a\x3a\xe8\xfa\x95\xa2\xac\xe3\xd2\x4d\x90\xa9\xef\x86\x1d\xc0" + "\x4c\x13\xe3\x8f\x6e\x52\x4b\x3a\xbd\xf9\xcc\xe4\xfa\x49\x07\x07\xc8" + "\x0c\x16\xe2\x54\xb7\xa7\x1a\xf0\x0a\x12\xdb\xf4\x73\xb5\x0b\x9f\xe4" + "\x09\x7e\xc0\x0a\xb2\x7e\x66\xb6\xf3\x02\x2b\x0f\x10\x1e\xe1\xa9\xf7" + "\xfa\x86\x52\xe9\xf0\x95\xca\x24\x0a\x44\x60\x67\x44\x68\x67\xf7\x8e" + "\x83\x52\xc4\x11\x07\x94\xc2\xe3\x38\x3d\xfe\xdf\xb3\x5e\x74\xa3" + "\x3e", + "\x33\xe7\xdc\xb0\xfd\xe3\xb1\xc5\xb9\x25\x06\xe6\x35\xeb", 1, 2040, + NULL, 0 }, + { 224, 112, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6f\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6c\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0f\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\xee\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x98\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x20\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\xe2\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\x2e\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe4\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xde\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe7\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdd\x30\x53\xcd\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x13\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\x4d\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6c\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x50\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\xed\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\xd1\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x58\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xdc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb4\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xae\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x28\x09", 0, 0, + NULL, 0 }, + { 224, 112, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x73\x22", 0, 128, + NULL, 0 }, + { 224, 112, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x2b\x09", 0, 0, + NULL, 0 }, + { 224, 112, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x70\x22", 0, 128, + NULL, 0 }, + { 224, 112, 137, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\xa9\x09", 0, 0, + NULL, 0 }, + { 224, 112, 138, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\xf2\x22", 0, 128, + NULL, 0 }, + { 224, 112, 139, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x08", 0, 0, + NULL, 0 }, + { 224, 112, 140, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x23", 0, 128, + NULL, 0 }, + { 224, 112, 141, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x0b", 0, 0, + NULL, 0 }, + { 224, 112, 142, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x20", 0, 128, + NULL, 0 }, + { 224, 112, 143, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x49", 0, 0, + NULL, 0 }, + { 224, 112, 144, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\x62", 0, 128, + NULL, 0 }, + { 224, 112, 145, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\x62\xe5\x32\xe8\x93\x6d\x78\xb5\xf0\x29\x89", 0, 0, + NULL, 0 }, + { 224, 112, 146, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\xae\xdf\x30\x53\xcd\x51\xfc\xaf\x41\x72\xa2", 0, 128, + NULL, 0 }, + { 224, 112, 147, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6f\x99\xe8\x62\xe5\x32\xe8\x93\x6c\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 148, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x21\x6f\xae\xdf\x30\x53\xcd\x50\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 149, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6e\x99\xe8\xe2\xe5\x32\xe8\x13\x6d\x78\xb5\xf0\x29\x09", 0, 0, + NULL, 0 }, + { 224, 112, 150, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0d\x21\x6f\x2e\xdf\x30\x53\x4d\x51\xfc\xaf\x41\x72\x22", 0, 128, + NULL, 0 }, + { 224, 112, 151, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x91\x66\x17\x9d\x1a\xcd\x17\x6c\x92\x87\x4a\x0f\xd6\xf6", 0, 0, + NULL, 0 }, + { 224, 112, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xf2\xde\x90\x51\x20\xcf\xac\x32\xae\x03\x50\xbe\x8d\xdd", 0, 128, + NULL, 0 }, + { 224, 112, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, 0, + NULL, 0 }, + { 224, 112, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, 128, + NULL, 0 }, + { 224, 112, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, 0, + NULL, 0 }, + { 224, 112, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, 128, + NULL, 0 }, + { 224, 112, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\xee\x19\x68\xe2\x65\xb2\x68\x13\xed\xf8\x35\x70\xa9\x89", 0, 0, + NULL, 0 }, + { 224, 112, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x8d\xa1\xef\x2e\x5f\xb0\xd3\x4d\xd1\x7c\x2f\xc1\xf2\xa2", 0, 128, + NULL, 0 }, + { 224, 112, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "", "\x6f\x98\xe9\x63\xe4\x33\xe9\x92\x6c\x79\xb4\xf1\x28\x08", 0, 0, + NULL, 0 }, + { 224, 112, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x0c\x20\x6e\xaf\xde\x31\x52\xcc\x50\xfd\xae\x40\x73\x23", 0, 128, + NULL, 0 }, + { 112, 224, 161, + "\x77\xb0\xde\x54\xe8\x93\x64\x2c\xae\xac\x34\xbf\xd1\xab", "", + "\x20\x14\xa9\xf2\x72\x37\x8f\xa1\xc9\xf6\x74\x4d\x4d\xb4\x86\x1b\x52" + "\xe6\x1a\x19\xeb\x28\x32\x0e\xbe\xe2\xd1\x74", + 1, 0, NULL, 0 }, + { 112, 224, 162, + "\x73\x46\xc7\xe4\xb1\x18\xb2\x4e\x51\xf4\x51\x2f\x90\x6a", + "\x50\x6d\x4f\xaf\x62\x4f\x92\x96\x5a\xa6\xb5\xc0\x1e\x0c\x80\xa8", + "\xc4\xe0\xad\x2f\x62\x27\x98\x98\xa7\xed\xe0\xf7\x09\xa1\xcc\xb8\xc1" + "\x00\x49\x41\xf3\xc5\x07\x43\x92\xe7\x95\x33", + 1, 128, NULL, 0 }, + { 112, 224, 163, + "\xca\xa8\x64\x17\x9f\x66\xe8\x26\xa0\xef\x3b\x5e\xdb\xe3", + "\x73\xf6\x42\x53\x70\x6c\xe6\xb5\x09\x4c\x24\xee\x01\x2e\xce\x9a\xc2" + "\x49\x52\x83\xdc\xd8\xc7\xf1\x11\x4e\x81\xe4\x58\x7d\x8e\xa4", + "\xa1\x22\x07\x45\xbb\x03\xd9\x82\x76\x3b\xfa\x7c\xe3\x52\xb8\xbc\x87" + "\x57\x6a\x0a\xd5\xd4\x6a\x0d\xa0\x8f\xf2\xd6", + 1, 256, NULL, 0 }, + { 112, 112, 164, + "\x66\x3a\x97\xd6\xb5\x49\x3d\xbf\xa6\x0c\x8d\xd0\x87\xed", "", + "\x0c\x6e\x21\xa8\x5e\x3c\xd2\xcd\x41\x3f\x36\x50\x7d\x6e", 1, 0, + NULL, 0 }, + { 112, 112, 165, + "\xb0\x8c\x34\x5a\x7c\x71\x66\xfd\xd3\x3c\xe7\x68\xc1\xdc", + "\x99\x64\xd8\x0e\xe2\x33\x8c\xff\xe2\x84\x83\xaa\x44\x6a\x6f\x76", + "\xaa\x00\x30\x15\x30\x9f\x2e\xd6\xfd\x77\x52\xe4\x9c\x31", 1, 128, + NULL, 0 }, + { 112, 112, 166, + "\xfc\x9d\x28\x83\xc6\x75\x34\xfe\xfb\xd6\xed\x4a\x97\x98", + "\xa4\x98\x20\xc1\x94\xa4\x3d\xee\xf1\x1f\x3a\x0f\x4e\xaa\x80\x42\x54" + "\x39\xfc\xa9\xd9\xf1\xd7\xc8\xe6\x65\xd6\xb1\x30\xe4\xe9\x08", + "\x1c\x2b\x96\x62\x3c\x91\xca\x9c\x50\x27\xf8\xf8\x1e\xde", 1, 256, + NULL, 0 }, + { 520, 224, 167, + "\xcf\xa6\x39\x65\x6c\xd4\x9f\x8d\x70\xf0\xb1\xa5\xa0\x56\xab\x4f\xc0" + "\xae\xee\xbc\x91\x33\x8d\x06\x7f\x36\xc4\x7b\x60\x12\xdc\x8d\x85\x6b" + "\x8a\xbc\xc4\xe1\xab\xff\xc9\x10\xae\xae\xe2\x1b\x4d\x36\x6e\x90\x74" + "\x88\xff\xd0\xca\x55\xb3\x6a\x62\x1a\xee\x0b\x2e\x9f\x0c", + "", + "\x0e\xf4\xfe\xda\xea\xab\x4a\xd5\x2c\x84\x36\x57\x04\x7b\x19\x78\x8a" + "\x9f\xa9\x10\x61\xb7\xa1\x4a\xdd\xa8\xc4\x90", + 1, 0, NULL, 0 }, + { 520, 224, 168, + "\xb3\x6d\x3d\x47\xa4\x58\x5b\x40\x1f\xc6\x4c\x98\xef\xf5\x62\x43\xd4" + "\xda\x78\x86\x30\x63\xd8\x14\xe8\x8f\x37\x0b\x92\x57\x64\x06\xd4\x47" + "\xfc\xf3\xd1\x29\xa1\xed\xe5\x7d\xdc\x56\xea\x3a\x0a\x1f\x10\x01\x05" + "\xa9\x5e\x83\x13\x8c\xdf\x45\xec\xf2\xa5\x99\x2a\xcf\x90", + "\x15\xc7\x5a\x64\xb0\x4d\x09\x7a\xf2\x37\x1a\xf3\x80\x07\x9e\xb8", + "\x4e\xcb\x2d\xaa\x5f\xb0\x8d\xbd\x83\x6e\x92\xa5\x1e\x20\x0b\xb2\x30" + "\xf5\x4a\xc2\xc9\x77\x8f\x52\x26\xb3\xab\xc9", + 1, 128, NULL, 0 }, + { 520, 224, 169, + "\xcf\x78\xb9\x91\x38\x2d\xb5\xe8\x66\x6c\xcb\x23\x33\xfb\x67\x21\x79" + "\xb1\x0a\x75\xcf\x9e\x5a\x76\x99\xae\x64\x00\x05\xe1\x97\x72\xef\x64" + "\x99\xa3\xbc\x97\xf1\x2e\x58\xe8\x35\xbb\x00\x17\xbb\x3b\x2e\x64\xc6" + "\xab\x44\xa0\xd6\x19\xdf\xa0\x36\x34\x84\xd1\xc9\x91\xe2", + "\xf6\x61\xe5\x98\xf1\x80\xf2\x5d\xc6\xdd\x76\xdb\x8a\x9e\x0e\x4c\x9c" + "\x27\x2b\x96\x65\xa6\xb1\x75\x65\x60\xc7\x23\xb8\xe0\x85\x95", + "\xcd\x55\xcd\xb0\xc4\xf0\x2b\x9f\x61\x48\x39\x29\x93\xb1\x8b\x4f\xf0" + "\x0a\x5e\x73\xb6\xf3\xfb\xf8\x3a\x85\x4a\xeb", + 1, 256, NULL, 0 }, + { 520, 112, 170, + "\x37\x72\xff\x6b\xb4\xe5\xb2\x81\x1c\xfd\x4d\x6a\x3d\x34\xdc\x74\xbc" + "\xa3\xdb\xf8\x9a\x58\x17\xb7\x9d\x84\x72\xa1\x38\x3b\x8c\x9a\xfb\x27" + "\xb3\x00\x61\x96\xce\x99\x66\x82\x9e\xae\x6a\x31\x3c\x2d\x72\x4d\x99" + "\x5f\x4d\xef\x17\x11\x7c\x09\xed\xcf\xc8\xc0\xcb\xbc\x93", + "", "\x40\xbe\xb1\xd3\xaa\xab\x25\xa4\x03\x22\x4e\x57\x77\x70", 1, 0, + NULL, 0 }, + { 520, 112, 171, + "\x2b\xa9\x10\xbc\x0b\xca\x90\x64\x4c\xb2\x1e\x96\x06\x3e\x2c\xd8\x5f" + "\x5d\xd0\x2f\xda\x75\xd3\x53\xc9\xb5\x1e\xaf\x45\xee\xe9\x4c\x16\x5c" + "\xa6\x59\x2d\x6c\xfd\xd9\x87\xbf\xdc\x1c\xba\x66\x36\x3d\x53\x5a\x14" + "\xb2\xf7\xea\xd8\x41\xb1\x7c\x4d\x76\xa5\x04\x91\x05\xf9", + "\x7b\xa4\x61\x04\x0d\xe9\xea\x3c\xef\xd4\x80\x91\x24\xf7\x8b\x39", + "\x4d\x28\xa9\x26\xdf\x1b\x18\x8e\x85\xd0\x92\xba\xcf\x11", 1, 128, + NULL, 0 }, + { 520, 112, 172, + "\x7f\xcf\x3c\xb1\xb1\xc5\xb5\x37\x49\x2a\xed\xe4\x68\x92\x84\xb5\x88" + "\x19\x35\xe3\x53\x7b\xb7\x30\x71\x98\xd6\x51\x8e\x7a\x6a\xab\xf7\x0b" + "\x50\xb4\x4e\x4a\x8d\xfe\xe3\x5e\x9f\x5c\xba\xda\x74\x47\xe5\x11\xa3" + "\x72\x09\x39\x0f\xcd\x17\x1c\x62\x07\x5c\x6a\x8b\xf1\xeb", + "\x83\xd2\x9c\x1c\x4d\x05\x9d\xdb\x0d\x2a\xca\x78\x7e\x5b\x70\x1b\xac" + "\x39\x53\xfb\x9b\xc7\x2d\xc8\x7b\x1e\xf9\x2a\x58\x2e\x97\x48", + "\x39\x2c\xe3\x8f\x78\x38\xb2\xf8\x71\x63\xee\xa0\x0b\x86", 1, 256, + NULL, 0 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/hmac_sha256_test.json.c b/test/wycheproof/hmac_sha256_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..68c7406e6f6cbaab783b0d433e5d837e3a06e0c9 --- /dev/null +++ b/test/wycheproof/hmac_sha256_test.json.c @@ -0,0 +1,1262 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* HMACSHA256, 0.8rc21 */ +#include "mac_test.h" +const struct mac_test hmac_sha256_test_json[] = { + { 256, 256, 1, + "\x1e\x22\x5c\xaf\xb9\x03\x39\xbb\xa1\xb2\x40\x76\xd4\x20\x6c\x3e\x79" + "\xc3\x55\x80\x5d\x85\x16\x82\xbc\x81\x8b\xaa\x4f\x5a\x77\x79", + "", + "\xb1\x75\xb5\x7d\x89\xea\x6c\xb6\x06\xfb\x33\x63\xf2\x53\x8a\xbd\x73" + "\xa4\xc0\x0b\x4a\x13\x86\x90\x5b\xac\x80\x90\x04\xcf\x19\x33", + 1, 0, NULL, 0 }, + { 256, 256, 2, + "\x81\x59\xfd\x15\x13\x3c\xd9\x64\xc9\xa6\x96\x4c\x94\xf0\xea\x26\x9a" + "\x80\x6f\xd9\xf4\x3f\x0d\xa5\x8b\x6c\xd1\xb3\x3d\x18\x9b\x2a", + "\x77", + "\xdf\xc5\x10\x5d\x5e\xec\xf7\xae\x7b\x8b\x8d\xe3\x93\x0e\x76\x59\xe8" + "\x4c\x41\x72\xf2\x55\x51\x42\xf1\xe5\x68\xfc\x18\x72\xad\x93", + 1, 8, NULL, 0 }, + { 256, 256, 3, + "\x85\xa7\xcb\xaa\xe8\x25\xbb\x82\xc9\xb6\xf6\xc5\xc2\xaf\x5a\xc0\x3d" + "\x1f\x6d\xaa\x63\xd2\xa9\x3c\x18\x99\x48\xec\x41\xb9\xde\xd9", + "\xa5\x9b", + "\x0f\xe2\xf1\x3b\xba\x21\x98\xf6\xdd\xa1\xa0\x84\xbe\x92\x8e\x30\x4e" + "\x9c\xb1\x6a\x56\xbc\x0b\x7b\x93\x9a\x07\x32\x80\x24\x43\x73", + 1, 16, NULL, 0 }, + { 256, 256, 4, + "\x48\xf3\x02\x93\x34\xe5\x5c\xfb\xd5\x74\xcc\xc7\x65\xfb\x2c\x36\x85" + "\xaa\xb1\xf4\x83\x7d\x23\x37\x08\x74\xa3\xe6\x34\xc3\xa7\x6d", + "\xc7\xb8\xb2", + "\x6c\x13\xf7\x9b\xb2\xd5\xb6\xf9\xa3\x15\xfe\x8f\xd6\xcb\xb5\xcb\x81" + "\x7a\x66\x06\x87\x00\x9d\xec\xcd\x88\xc3\x77\x42\x9e\x59\x6d", + 1, 24, NULL, 0 }, + { 256, 256, 5, + "\xde\x8b\x5b\x5b\x2f\x09\x64\x5b\xe4\x7e\xcb\x64\x07\xa4\xe1\xd9\xc6" + "\xb3\x3a\xe3\xc2\xd2\x25\x17\xd3\x35\x7d\xa0\x35\x7a\x31\x39", + "\xcc\x02\x1d\x65", + "\xe8\x75\x38\xeb\x16\x7e\x62\xd7\xcb\x23\x66\x90\xff\x3f\x03\x4a\x9c" + "\x12\xd4\x17\xaa\x8d\xfa\x69\x4d\x74\x05\xf9\xe1\xf8\x5f\xe8", + 1, 32, NULL, 0 }, + { 256, 256, 6, + "\xb7\x93\x89\x10\xf5\x18\xf1\x32\x05\xca\x14\x92\xc6\x69\x00\x1a\x14" + "\xff\x91\x3c\x8a\xb4\xa0\xdc\x35\x64\xe7\x41\x8e\x91\x29\x7c", + "\xa4\xa6\xef\x6e\xbd", + "\x01\xa9\x3f\x4e\xd2\x16\xd0\xb2\x80\x89\x63\x01\xe3\x66\xaa\x67\xb2" + "\x5e\x6b\x6a\x5a\x6e\x84\xf2\x91\xa1\x33\x91\xc6\xe4\x96\xc5", + 1, 40, NULL, 0 }, + { 256, 256, 7, + "\x1b\xb9\x97\xff\x4d\xe8\xa5\xa3\x91\xde\x5c\x08\xa3\x3b\xc2\xc7\xc2" + "\x89\x1e\x47\xad\x5b\x9c\x63\x11\x01\x92\xf7\x8b\x98\xfe\x78", + "\x66\x7e\x01\x5d\xf7\xfc", + "\x06\xb5\xd8\xc5\x39\x23\x23\xa8\x02\xbc\x5c\xdd\x0b\x3c\x52\x74\x54" + "\xa8\x73\xd9\x65\x1c\x36\x88\x36\xea\xa4\xad\x98\x2b\xa5\x46", + 1, 48, NULL, 0 }, + { 256, 256, 8, + "\x32\xfd\xed\xa3\x9f\x98\xb4\xf4\x42\x6c\x2d\x2a\xc0\x0a\xb5\xdd\x4b" + "\xfa\xbb\x68\xf3\x11\x44\x72\x56\xed\x6d\x3d\x3a\x51\xb1\x54", + "\x41\x63\xa9\xf7\x7e\x41\xf5", + "\x1b\x01\x03\x72\x9f\x48\xc2\x77\x2b\xb1\x32\xae\xf9\xeb\xd6\xdd\x6a" + "\xaf\xc9\x14\x5d\xf6\xd5\xc5\x14\xb2\x33\xee\x92\xef\x4a\x00", + 1, 56, NULL, 0 }, + { 256, 256, 9, + "\x23\x3e\x4f\xde\xe7\x0b\xcc\x20\x23\x5b\x69\x77\xdd\xfc\x05\xb0\xdf" + "\x66\xf5\x63\x5d\x82\x7c\x66\xe5\xa6\x3c\xdb\x16\xa2\x49\x38", + "\xfd\xb2\xee\x4b\x6d\x1a\x0a\xc2", + "\x12\x0b\x26\xee\x13\x55\xc1\x34\xc2\x62\x51\x3c\x79\x22\xde\xb6\xc4" + "\xfd\x90\x30\x3d\xe4\xcd\x61\xb9\xf9\xcd\x08\xf2\x2d\x6e\x18", + 1, 64, NULL, 0 }, + { 256, 256, 10, + "\xb9\x84\xc6\x73\x4e\x0b\xd1\x2b\x17\x37\xb2\xfc\x7a\x1b\x38\x03\xb4" + "\xdf\xec\x40\x21\x40\xa5\x7b\x9e\xcc\xc3\x54\x14\xae\x66\x1b", + "\xde\xa5\x84\xd0\xe2\xa1\x4a\xd5\xfd", + "\x88\xbc\x22\x82\xe5\xfc\xe4\x7e\xc6\xd9\x89\x53\x95\xcd\x47\xff\xf9" + "\x1a\x0c\xdc\x58\x9a\x8f\xd5\x6d\x8d\x34\x46\x16\x53\x3a\x3d", + 1, 72, NULL, 0 }, + { 256, 256, 11, + "\xd0\xca\xf1\x45\x6a\xc5\xe2\x55\xfa\x6a\xfd\x61\xa7\x9d\xc8\xc7\x16" + "\xf5\x35\x8a\x29\x8a\x50\x82\x71\x36\x3f\xe1\xff\x98\x35\x61", + "\x18\x26\x1d\xc8\x06\x91\x3c\x53\x46\x66", + "\xf6\x78\xf0\x81\xd8\x3c\xf1\x26\xad\x6b\xd5\x2c\x2d\xff\xd7\x86\x21" + "\x4f\x51\x9c\x47\x45\x2b\x85\xa9\x74\x58\xd0\xc1\x0c\x3e\xe5", + 1, 80, NULL, 0 }, + { 256, 256, 12, + "\x83\x5b\xc8\x24\x1e\xd8\x17\x73\x5e\xc9\xd3\xd0\xe2\xdf\x4c\x17\x3e" + "\xe4\xdd\xed\x4a\x8e\xf0\xc0\x4a\x96\xc4\x8f\x11\x82\x04\x63", + "\x26\xf8\x08\x3e\x94\x4b\xac\xf0\x4e\x9a\x4d", + "\xe0\xe4\x6c\xd7\xd1\xa7\x5b\x3d\x10\x28\x93\xda\x64\xde\xf4\x6e\x45" + "\x53\x08\x76\x1f\x1d\x90\x87\x86\x62\x8c\xa7\xee\x22\xa0\xeb", + 1, 88, NULL, 0 }, + { 256, 256, 13, + "\x05\x5f\x95\xc9\x46\x1b\x08\x09\x57\x5e\xcc\xdf\xa5\xcd\xd0\x62\x75" + "\xf2\x5d\x30\x91\x5c\x4e\xb8\xdb\x40\xe1\xac\xd3\xab\x75\x91", + "\xbf\xb7\xd6\xa0\x8d\xba\xa5\x22\x5f\x32\x08\x87", + "\xe7\x6d\x5c\x8c\x07\x0a\x6b\x3c\x48\x24\xe9\xf3\x42\xdc\x30\x56\xe6" + "\x38\x19\x50\x9e\x1d\xef\x98\xb5\x85\xae\xba\x0d\x63\x8a\x00", + 1, 96, NULL, 0 }, + { 256, 256, 14, + "\xe4\x0f\x7a\x3e\xb8\x8d\xde\xc4\xc6\x34\x7e\xa4\xd6\x76\x10\x75\x6c" + "\x82\xc8\xeb\xcc\x23\x76\x29\xbf\x87\x3c\xca\xbc\x32\x98\x4a", + "\x7f\xe4\x3f\xeb\xc7\x84\x74\x64\x9e\x45\xbf\x99\xb2", + "\xaa\x57\xd0\x20\xaa\x24\xad\x82\x34\x72\xc2\xb8\x0f\xf2\xd0\xcf\x47" + "\x5f\x7d\xe0\x06\x8f\x9a\x59\xe8\x11\x2f\xed\xe5\x3a\x35\x81", + 1, 104, NULL, 0 }, + { 256, 256, 15, + "\xb0\x20\xad\x1d\xe1\xc1\x41\xf7\xec\x61\x5e\xe5\x70\x15\x21\x77\x3f" + "\x9b\x23\x2e\x4d\x06\x37\x6c\x38\x28\x94\xce\x51\xa6\x1f\x48", + "\x81\xc7\x58\x1a\x19\x4b\x5e\x71\xb4\x11\x46\xa5\x82\xc1", + "\xf4\x5c\x72\x60\x3c\xc1\x60\xc0\x76\x2f\x70\x34\x07\x84\x4a\x77\x81" + "\xdf\xe0\xf1\xdd\xf0\xaa\xf4\xcc\xd8\x20\x5e\x94\x46\x9a\xed", + 1, 112, NULL, 0 }, + { 256, 256, 16, + "\x9f\x3f\xd6\x1a\x10\x52\x02\x64\x8e\xcf\xf6\x07\x4c\x95\xe5\x02\xc1" + "\xc5\x1a\xcd\x32\xec\x53\x8a\x5c\xce\x89\xef\x84\x1f\x79\x89", + "\x2a\x76\xf2\xac\xda\xce\x42\xe3\xb7\x79\x72\x49\x46\x91\x2c", + "\x02\x26\xee\x13\xcc\x05\xe2\x34\x01\x35\xb3\xf4\xb2\x7a\x9d\xa1\xa1" + "\x60\xf6\x17\x0f\xe8\x05\xda\xdd\x98\xa3\x71\x1e\xc9\xc4\x21", + 1, 120, NULL, 0 }, + { 256, 256, 17, + "\x6f\xa3\x53\x86\x8c\x82\xe5\xde\xee\xda\xc7\xf0\x94\x71\xa6\x1b\xf7" + "\x49\xab\x54\x98\x23\x9e\x94\x7e\x01\x2e\xee\x3c\x82\xd7\xc4", + "\xae\xed\x3e\x4d\x4c\xb9\xbb\xb6\x0d\x48\x2e\x98\xc1\x26\xc0\xf5", + "\x9e\xd7\xf0\xe7\x38\x12\xa2\x7a\x87\xa3\x80\x8e\xe0\xc8\x9a\x64\x56" + "\x49\x9e\x83\x59\x74\xba\x57\xc5\xaa\xb2\xa0\xd8\xc6\x9e\x93", + 1, 128, NULL, 0 }, + { 256, 256, 18, + "\x53\x00\x48\x94\x94\xca\x86\x22\x1c\x91\xd6\xd9\x53\x95\x2a\xe1\xa5" + "\xe0\x97\x13\x9d\xc9\xcf\x11\x79\xc2\xf5\x64\x33\x75\x38\x24", + "\x90\xfe\xa6\xcf\x2b\xd8\x11\xb4\x49\xf3\x33\xee\x92\x33\xe5\x76" + "\x97", + "\x5b\x69\x2c\xba\x13\xb5\x4f\xff\xc3\xad\xcb\xb0\xe0\x15\xcc\x01\x1f" + "\xbf\xd6\x12\x35\x30\x3f\xf0\xad\x2a\x49\x77\x50\x83\xbf\x22", + 1, 136, NULL, 0 }, + { 256, 256, 19, + "\x38\x3e\x7c\x5c\x13\x47\x6a\x62\x26\x84\x23\xef\x05\x00\x47\x9f\x9e" + "\x86\xe2\x36\xc5\xa0\x81\xc6\x44\x91\x89\xe6\xaf\xdf\x2a\xf5", + "\x32\x02\x70\x5a\xf8\x9f\x95\x55\xc5\x40\xb0\xe1\x27\x69\x11\xd0\x19" + "\x71\xab\xb2\xc3\x5c\x78\xb2", + "\x4e\x49\x01\x59\x2b\xa4\x64\x76\x40\x8d\x75\x84\x35\xc7\xd1\xb4\x89" + "\xd2\x68\x9a\xfd\x84\xce\xaa\xee\x78\xbf\xb9\x1f\xd9\x39\x1d", + 1, 192, NULL, 0 }, + { 256, 256, 20, + "\x18\x6e\x24\x8a\xd8\x24\xe1\xeb\x93\x32\x9a\x7f\xdc\xd5\x65\xb6\xcb" + "\x4e\xaf\x3f\x85\xb9\x0b\x91\x07\x77\x12\x8d\x8c\x53\x8d\x27", + "\x92\xef\x9f\xf5\x2f\x46\xec\xcc\x7e\x38\xb9\xee\x19\xfd\x2d\xe3\xb3" + "\x77\x26\xc8\xe6\xce\x9e\x1b\x96\xdb\x5d\xda\x4c\x31\x79\x02", + "\x3f\xc1\xd7\x3d\xd4\xa8\x85\x8c\x1f\xc3\xd8\xc4\xa3\xf3\x3e\xd5\xad" + "\x0c\x70\x21\x00\x38\x39\x4a\x59\x02\xcb\x26\xfe\x28\x73\x48", + 1, 256, NULL, 0 }, + { 256, 256, 21, + "\x28\x85\x5c\x7e\xfc\x85\x32\xd9\x25\x67\x30\x09\x33\xcc\x1c\xa2\xd0" + "\x58\x6f\x55\xdc\xc9\xf0\x54\xfc\xca\x2f\x05\x25\x4f\xbf\x7f", + "\x9c\x09\x20\x7f\xf0\xe6\xe5\x82\xcb\x37\x47\xdc\xa9\x54\xc9\x4d\x45" + "\xc0\x5e\x93\xf1\xe6\xf2\x11\x79\xcf\x0e\x25\xb4\xce\xde\x74\xb5\x47" + "\x9d\x32\xf5\x16\x69\x35\xc8\x6f\x04\x41\x90\x58\x65", + "\x78\x8c\x05\x89\x00\x0f\xb7\xf0\xb5\xd5\x1f\x15\x96\x47\x2b\xc9\xec" + "\x41\x34\x21\xa4\x3d\xf9\x6e\xe3\x2b\x02\xb5\xd2\x75\xff\xe3", + 1, 376, NULL, 0 }, + { 256, 256, 22, + "\x8e\x54\x0c\xb3\x0c\x94\x83\x6a\xe2\xa5\x95\x0f\x35\x5d\x48\x2a\x70" + "\x02\xe2\x55\x20\x7e\x94\xfd\xa3\xf7\xef\x1a\x09\x90\x13\xa0", + "\xd6\x50\x0f\x95\xe1\x12\x62\xe3\x08\xbf\x3d\xf4\xdf\x4b\x85\x5f\x33" + "\xe8\x57\x56\x3d\x45\x43\xf1\x95\x63\x9a\x0a\x17\xb4\x42\xeb\x9f\xdc" + "\xc1\x36\x7d\x2e\xee\x75\xc8\xf8\x05\x73\x0b\x89\x29\x0f", + "\x39\x69\x7e\x70\xce\x74\x1f\xeb\x33\xde\xdc\x06\x9f\x00\xb5\x62\x7f" + "\xd9\xb8\x37\xd1\x0c\xbd\xd5\xb6\xd1\x9c\xfb\xd5\x11\xdd\x2c", + 1, 384, NULL, 0 }, + { 256, 256, 23, + "\x69\xc5\x0d\x52\x74\x35\x81\x88\xcf\xf4\xc0\xfa\xe7\x42\x24\x3d\x4e" + "\x8a\x5e\x5b\xa5\x5d\x94\xff\x40\xed\xd9\x0f\x6a\x43\xdd\x10", + "\x1a\xc5\x25\x5a\xff\x05\x28\x28\xd8\xea\x21\xb3\x76\xf1\xeb\xdd\x4b" + "\xb8\x79\x94\x99\x13\x90\x04\x05\xae\xbc\xe8\x3e\x48\xfe\xb6\x81\x3b" + "\x5e\x9c\x89\xf9\x45\x01\xa8\xad\xe4\x1b\x26\xb8\x15\xc5\x21", + "\x4b\x0b\x4d\x04\x16\xfa\x2e\x11\x58\x6f\xbf\xa7\xfb\x11\x26\x1e\x69" + "\x99\x1d\xfa\x34\x01\x9b\x98\x93\xd6\x9a\x2b\xe8\xc1\xfc\x80", + 1, 392, NULL, 0 }, + { 256, 256, 24, + "\x23\x20\x9b\x7c\x5a\xad\xcb\xd1\x3f\x72\x79\xaf\x1a\x86\xd3\xc7\xae" + "\x8f\x17\x9d\x1b\xca\xaa\xd0\xdf\xf9\xa1\x53\x02\xe7\x8d\xbf", + "\x84\xbd\xac\x37\xe1\xaf\x35\xd9\x35\x64\x04\xe2\x78\x7d\x47\xec\xe5" + "\x83\x48\xde\xa7\x6a\x4a\x46\xe8\xaa\xde\x34\x63\xd4\xdb\x8c\x94\xa0" + "\x51\xbe\x37\x33\xb3\x8d\x75\x69\x84\x86\x5d\x56\xc6\x0e\x80\x25\xf1" + "\x5e\x3f\x96\x8f\x09\x3e\x7f\xb7\xeb\xc7\xe3\x11\x89\xc5\x69\x2d\x15" + "\xed\x42\x56\x73\x7b\x9b\x18\x94\xe5\x80\x95\x03\xaa\xa1\xc9\x98\x3f" + "\xb0\x96\xaa\x21\x91\x63\x61\xee\xb6\xef\x45\x5b\x12\x97\x23\xa1\xa1" + "\xdd\xf9\xde\xdd\xea\x20\x85\x29\xa6\x48", + "\x4a\x85\xc4\x79\xd1\x65\x0d\xbd\x73\xbc\x52\x48\x07\x4a\x55\xff\x50" + "\x21\x8b\xdd\xaa\x8d\x1f\xdd\xaa\xf4\x49\x46\xdc\x19\xae\xfb", + 1, 896, NULL, 0 }, + { 256, 256, 25, + "\x7c\x9c\xc6\x67\xca\xe1\x75\xf4\x48\xfa\xa9\x66\x47\x31\x96\x33\xb2" + "\xd4\x85\x31\x37\x3a\xe7\xd3\x16\xc4\x4d\xdd\x8b\x9f\x69\xcf", + "\x92\x33\xc1\xd7\x3b\x49\x8c\x51\x06\xff\x88\x95\x1e\x07\xb9\x65\x2c" + "\xb0\xdd\xae\x74\x07\x37\xec\x20\x5c\x98\x76\xd0\x94\x97\x8b\xfc\x94" + "\x7f\x7d\xc9\x37\x11\x9f\xd6\xa9\x39\x15\xb1\x9b\x62\x59\x58\xa7\xa2" + "\x23\x63\xaa\x2a\xc3\x3f\xb8\x69\xed\x16\xb3\x03\x33\x6a\xb7\x40\xa0" + "\x49\x8a\x2d\xf6\x6a\x65\x99\xda\x71\x00\x94\x48\x1a\x7b\x54\x4b\xd9" + "\x55\xb6\xf9\x71\x35\xba\x46\x73\x40\x1d\xb2\xdb\x14\x4a\x6e\x28\x70" + "\x41\xe4\x7a\x51\xed\x9b\x6b\xa9\x56\xc1\x35\x08\xc1\xc0\xc2\x53\x10" + "\x10\x52\x39\xab\x73\x62\x9e\x30", + "\xca\x1b\x80\x44\x1d\x33\x39\x09\xc2\xbb\x30\x76\x96\x50\x05\x50\x51" + "\xed\x20\xf1\x7d\xe8\xee\x95\x3c\xb9\x07\x0a\xf5\x6c\x70\x4f", + 1, 1016, NULL, 0 }, + { 256, 256, 26, + "\x82\x31\x45\x40\x56\x4e\xa3\xce\x30\x59\x1e\x97\xf6\x8b\x26\x02\xde" + "\x40\xfa\x29\xf7\x73\xc2\x50\x83\x27\x47\x1b\x83\x48\xe8\xc4", + "\x6a\x6d\x2f\x45\xce\xbf\x27\x57\xae\x16\xea\x33\xc6\x86\x17\x67\x1d" + "\x77\xf8\xfd\xf8\x0b\xed\x8f\xc5\xcd\xc5\xc8\xb7\x08\x6b\xd2\x8e\x7e" + "\xb3\xee\xcc\x71\x63\x49\x11\x04\xe5\x30\x94\x55\xe6\x7f\x83\x65\x79" + "\xb8\x2a\x1d\xa3\xbf\x59\x91\xa8\xe2\xb2\xf1\x89\xa4\x9e\x05\x70\x0e" + "\x46\xc4\x09\xed\x5d\xe7\x77\x80\xa5\xf3\x89\xe3\xf1\x3d\xad\x40\x6c" + "\x9d\x55\x67\x53\x29\xc5\xc9\x21\xf0\x70\x34\x18\x09\x37\xc0\xf6\xef" + "\x34\xa2\x30\x8b\x6f\xf3\xe1\xa0\xe9\xdc\x1e\xa6\x5f\x56\x32\x73\x0e" + "\x87\x44\xd1\xdb\x2c\x40\xa6\x59\x5b", + "\x09\x00\xb3\xe6\x53\x5d\x34\xf9\x0e\x2c\x33\x57\x75\xe8\x6b\xf3\x8e" + "\xe7\xe3\xd2\x6f\xb6\x0c\xd9\xcd\xf6\x39\xeb\x34\x96\xb9\x4c", + 1, 1024, NULL, 0 }, + { 256, 256, 27, + "\xd1\x15\xac\xc9\xa6\x36\x91\x52\x41\x79\x5f\x48\x85\x20\x52\xe0\x7b" + "\x51\x27\x3a\xe2\x44\x82\x51\xec\x1d\x0d\x0f\x98\x07\xf3\xdb", + "\x69\x6d\x24\x56\xde\x85\x3f\xa0\x28\xf4\x86\xfe\xf4\x37\xb6\xb6\xd1" + "\xb5\x30\xa8\x47\x5e\x29\x9d\xb3\xa9\x00\x5a\xe9\xce\xf8\x40\x19\x85" + "\xb7\xd3\x1e\x17\x2e\x8f\x43\x9c\xcd\x1a\xd1\xec\x44\xc9\xb8\x6b\x78" + "\xf3\xf2\x43\xc1\x30\x5b\x53\xbc\x21\xab\xad\x7a\x8f\xc5\x25\x63\x11" + "\xbf\xd3\x4c\x98\xe3\x7d\xfd\xc6\x49\xe7\xae\x4b\xda\x08\xcf\x29\x94" + "\xb0\x63\xc0\xc7\x10\x6e\xd0\xb0\x2a\x1f\x48\xaf\x91\x91\xcb\xfb\x0d" + "\x6a\x95\x3b\x7e\x04\x32\x7d\xfe\x8c\x93\x77\x9c\xb5\x74\xba\x9c\xba" + "\x57\x5d\x01\x67\x4e\x83\x62\x1a\xa0\xc5\xf4\x00\xd6\xe6\xcd\x24\xb3" + "\x01\xe3\x3c\x9f\x33\x03\xe7\x3b\xf3\x57\x40\x8c\x1b\xe8\x6c\x24\x89" + "\xc0\x9d\xe9\x98\xff\x2e\xf3\x2d\xf5\x54\xf1\x24\x7d\x93\x13\xce\x1a" + "\x71\x60\x11\x5d\x06\xf4\xc1\x8d\x65\x56\xff\x79\x86\xef\x8a\x55\xe2" + "\xad\xcf\xa2\x7e\x4c\x69\xc7\x1c\xc2\xff\x01\x63\x9e\x9d\x49\xbd\x9e" + "\xd0\x68\x7f\x53\x0f\xfe\xb0\x89\x01\x32\x45\x7d\xf2\x08\x80\x81\xbc" + "\x4a\x2f\x7f\x0a\x9f\x4d\xce\xa2\xc8\x0d\x99\x1d\xb7\xf3\x74\x7a\x18" + "\x03\xd7\x61\x9a\xaf\x3d\xd3\x82\xc6\x95\x36\xa0\xbc\xdb\x93\x1c" + "\xbe", + "\x82\xf9\x29\x77\xf0\xb6\x05\xea\xad\xa5\x10\xff\xce\xb5\x3a\xd7\x5f" + "\xde\x16\xa8\x02\x9f\x1b\x75\xb4\x06\xa8\x42\x70\xdb\xb8\xb7", + 1, 2040, NULL, 0 }, + { 256, 256, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd2\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd1\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xda\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x53\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8a\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb8\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x89\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\xa7\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6c\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x08\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6f\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x0b\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\xdf\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\xf4\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x83\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x16\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x02\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x97\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x4b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x0c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x45\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbf\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd4\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x83\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd7\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x80\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\x55\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x02\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xca", + 0, 0, NULL, 0 }, + { 256, 256, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4f", + 0, 128, NULL, 0 }, + { 256, 256, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xc9", + 0, 0, NULL, 0 }, + { 256, 256, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4c", + 0, 128, NULL, 0 }, + { 256, 256, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\x8b", + 0, 0, NULL, 0 }, + { 256, 256, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x0e", + 0, 128, NULL, 0 }, + { 256, 256, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\x4b", + 0, 0, NULL, 0 }, + { 256, 256, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\xce", + 0, 128, NULL, 0 }, + { 256, 256, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd2\x8b\x42\x09\x6d\x80\xf4\x5f\x83\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xb9\x9f\x27\x09\xa3\xca\x74\x16\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 68, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x89\x6d\x80\xf4\xdf\x82\x6b\x44\xa9\xd5\x60\x7d\xe7\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 69, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\xa7\x09\xa3\xca\xf4\x17\x2c\xbe\x93\x82\x4c\x1f\x29\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 70, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\xdf\x82\x6b\x44\xa9\xd5\x60\x7d\x67\x24" + "\x96\xa4\x15\xd3\xf4\xa1\xa8\xc8\x8e\x3b\xb9\xda\x8d\xc1\xcb", + 0, 0, NULL, 0 }, + { 256, 256, 71, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\xf4\x17\x2c\xbe\x93\x82\x4c\x1f\xa9\xb2" + "\x3a\x0c\x1e\x9c\x21\xbd\x85\x1f\xf2\xd2\xc3\x9d\xbe\xf1\x4e", + 0, 128, NULL, 0 }, + { 256, 256, 72, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x2c\x74\xbd\xf6\x92\x7f\x0b\xa0\x7d\x94\xbb\x56\x2a\x9f\x82\x18\xdb" + "\x69\x5b\xea\x2c\x0b\x5e\x57\x37\x71\xc4\x46\x25\x72\x3e\x34", + 0, 0, NULL, 0 }, + { 256, 256, 73, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x27\x46\x60\xd8\xf6\x5c\x35\x8b\xe8\xd3\x41\x6c\x7d\xb3\xe0\xd6\x4d" + "\xc5\xf3\xe1\x63\xde\x42\x7a\xe0\x0d\x2d\x3c\x62\x41\x0e\xb1", + 0, 128, NULL, 0 }, + { 256, 256, 74, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 0, NULL, 0 }, + { 256, 256, 75, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 128, NULL, 0 }, + { 256, 256, 76, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 0, NULL, 0 }, + { 256, 256, 77, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 128, NULL, 0 }, + { 256, 256, 78, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x53\x0b\xc2\x89\xed\x00\x74\xdf\x02\xeb\xc4\x29\x55\xe0\xfd\x67\xa4" + "\x16\x24\x95\x53\x74\x21\x28\x48\x0e\xbb\x39\x5a\x0d\x41\x4b", + 0, 0, NULL, 0 }, + { 256, 256, 79, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\x39\x1f\xa7\x89\x23\x4a\xf4\x97\xac\x3e\x13\x02\xcc\x9f\xa9\x32" + "\xba\x8c\x9e\x1c\xa1\x3d\x05\x9f\x72\x52\x43\x1d\x3e\x71\xce", + 0, 128, NULL, 0 }, + { 256, 256, 80, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd2\x8a\x43\x08\x6c\x81\xf5\x5e\x83\x6a\x45\xa8\xd4\x61\x7c\xe6\x25" + "\x97\xa5\x14\xd2\xf5\xa0\xa9\xc9\x8f\x3a\xb8\xdb\x8c\xc0\xca", + 0, 0, NULL, 0 }, + { 256, 256, 81, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xb8\x9e\x26\x08\xa2\xcb\x75\x16\x2d\xbf\x92\x83\x4d\x1e\x28\xb3" + "\x3b\x0d\x1f\x9d\x20\xbc\x84\x1e\xf3\xd3\xc2\x9c\xbf\xf0\x4f", + 0, 128, NULL, 0 }, + { 256, 128, 82, + "\x7b\xf9\xe5\x36\xb6\x6a\x21\x5c\x22\x23\x3f\xe2\xda\xaa\x74\x3a\x89" + "\x8b\x9a\xcb\x9f\x78\x02\xde\x70\xb4\x0e\x3d\x6e\x43\xef\x97", + "", + "\xf4\x60\x55\x85\x94\x97\x47\xde\x26\xf3\xee\x98\xa7\x38\xb1\x72", 1, + 0, NULL, 0 }, + { 256, 128, 83, + "\xe7\x54\x07\x6c\xea\xb3\xfd\xaf\x4f\x9b\xca\xb7\xd4\xf0\xdf\x0c\xbb" + "\xaf\xbc\x87\x73\x1b\x8f\x9b\x7c\xd2\x16\x64\x72\xe8\xee\xbc", + "\x40", + "\x0d\xc0\x0d\x72\x17\xbb\xaf\xe8\xd7\x8b\xf9\x61\x18\x9b\x8f\xd2", 1, + 8, NULL, 0 }, + { 256, 128, 84, + "\xea\x3b\x01\x6b\xdd\x38\x7d\xd6\x4d\x83\x7c\x71\x68\x38\x08\xf3\x35" + "\xdb\xdc\x53\x59\x8a\x4e\xa8\xc5\xf9\x52\x47\x3f\xaf\xaf\x5f", + "\x66\x01", + "\xff\x29\x6b\x36\x8d\x3b\xf0\x59\xcc\x48\x68\x2f\x69\x49\xcc\xaa", 1, + 16, NULL, 0 }, + { 256, 128, 85, + "\x73\xd4\x70\x96\x37\x85\x7d\xaf\xab\x6a\xd8\xb2\xb0\xa5\x1b\x06\x52" + "\x47\x17\xfe\xdf\x10\x02\x96\x64\x4f\x7c\xfd\xaa\xe1\x80\x5b", + "\xf1\xd3\x00", + "\x2d\x02\xbd\x1c\x25\xb1\xfe\x52\xb1\xea\xd0\x73\x74\xd6\xe8\x83", 1, + 24, NULL, 0 }, + { 256, 128, 86, + "\xd5\xc8\x1b\x39\x9d\x4c\x0d\x15\x83\xa1\x3d\xa5\x6d\xe6\xd2\xdc\x45" + "\xa6\x6e\x7b\x47\xc2\x4a\xb1\x19\x2e\x24\x6d\xc9\x61\xdd\x77", + "\x2a\xe6\x3c\xbf", + "\x4d\x9e\x8b\xdd\xf9\xb7\xa1\x21\x83\x09\xd5\x98\x8a\xa1\xb0\xd9", 1, + 32, NULL, 0 }, + { 256, 128, 87, + "\x25\x21\x20\x3f\xa0\xdd\xdf\x59\xd8\x37\xb2\x83\x0f\x87\xb1\xaa\x61" + "\xf9\x58\x15\x5d\xf3\xca\x4d\x1d\xf2\x45\x7c\xb4\x28\x4d\xc8", + "\xaf\x3a\x01\x5e\xa1", + "\xcb\x8a\x4b\x41\x33\x50\xb4\x2f\x4a\xc3\x53\x3c\xc7\xf4\x78\x64", 1, + 40, NULL, 0 }, + { 256, 128, 88, + "\x66\x5a\x02\xbc\x26\x5a\x66\xd0\x17\x75\x09\x1d\xa5\x67\x26\xb6\x66" + "\x8b\xfd\x90\x3c\xb7\xaf\x66\xfb\x1b\x78\xa8\xa0\x62\xe4\x3c", + "\x3f\x56\x93\x5d\xef\x3f", + "\x1c\xfc\xe7\x45\xdb\x1c\xa7\xde\x9a\x1d\x44\x20\xe6\x12\xca\x55", 1, + 48, NULL, 0 }, + { 256, 128, 89, + "\xfa\xcd\x75\xb2\x22\x21\x38\x00\x47\x30\x5b\xc9\x81\xf5\x70\xe2\xa1" + "\xaf\x38\x92\x8e\xa7\xe2\x05\x9e\x3a\xf5\xfc\x6b\x82\xb4\x93", + "\x57\xbb\x86\xbe\xed\x15\x6f", + "\x0b\xde\x0d\x0c\x75\x6d\xf0\x9d\x4f\x6d\xa8\x1b\x29\x9a\x3a\xdf", 1, + 56, NULL, 0 }, + { 256, 128, 90, + "\x50\x5a\xa9\x88\x19\x80\x9e\xf6\x3b\x9a\x36\x8a\x1e\x8b\xc2\xe9\x22" + "\xda\x45\xb0\x3c\xe0\x2d\x9a\x79\x66\xb1\x50\x06\xdb\xa2\xd5", + "\x2e\x4e\x7e\xf7\x28\xfe\x11\xaf", + "\x40\x6a\x5c\x2b\xd3\xe6\xa9\x59\x5f\x9b\x7d\xff\x60\x8d\x59\xa7", 1, + 64, NULL, 0 }, + { 256, 128, 91, + "\xf9\x42\x09\x38\x42\x80\x8b\xa4\x7f\x64\xe4\x27\xf7\x35\x1d\xde\x6b" + "\x95\x46\xe6\x6d\xe4\xe7\xd6\x0a\xa6\xf3\x28\x18\x27\x12\xcf", + "\x85\x2a\x21\xd9\x28\x48\xe6\x27\xc7", + "\x0b\x1b\xf9\xe9\x8d\x0a\x79\x4f\xa5\x5c\x09\xb6\x3e\x25\x79\x9f", 1, + 72, NULL, 0 }, + { 256, 128, 92, + "\x64\xbe\x16\x2b\x39\xc6\xe5\xf1\xfe\xd9\xc3\x2d\x9f\x67\x4d\x9a\x8c" + "\xde\x6e\xaa\x24\x43\x21\x4d\x86\xbd\x4a\x1f\xb5\x3b\x81\xb4", + "\x19\x5a\x3b\x29\x2f\x93\xba\xff\x0a\x2c", + "\x71\xf3\x3f\x60\x21\xd9\x08\x58\xca\xdb\x13\x53\xd7\xfb\xe8\xd7", 1, + 80, NULL, 0 }, + { 256, 128, 93, + "\xb2\x59\xa5\x55\xd4\x4b\x8a\x20\xc5\x48\x9e\x2f\x38\x39\x2d\xda\xa6" + "\xbe\x9e\x35\xb9\x83\x3b\x67\xe1\xb5\xfd\xf6\xcb\x3e\x4c\x6c", + "\xaf\xd7\x31\x17\x33\x0c\x6e\x85\x28\xa6\xe4", + "\x4b\x8d\x76\x37\x2e\xbe\x5e\x5c\xaa\x56\xca\x4e\x5c\x59\xcd\xd3", 1, + 88, NULL, 0 }, + { 256, 128, 94, + "\x2c\x6f\xc6\x2d\xaa\x77\xba\x8c\x68\x81\xb3\xdd\x69\x89\x89\x8f\xef" + "\x64\x66\x63\xcc\x7b\x0a\x3d\xb8\x22\x8a\x70\x7b\x85\xf2\xdc", + "\x0f\xf5\x4d\x6b\x67\x59\x12\x0c\x2e\x8a\x51\xe3", + "\xc5\x80\xc5\x42\x84\x6a\x96\xe8\x4e\xa7\x77\x01\x77\x84\x55\xbf", 1, + 96, NULL, 0 }, + { 256, 128, 95, + "\xab\xab\x81\x5d\x51\xdf\x29\xf7\x40\xe4\xe2\x07\x9f\xb7\x98\xe0\x15" + "\x28\x36\xe6\xab\x57\xd1\x53\x6a\xe8\x92\x9e\x52\xc0\x6e\xb8", + "\xf0\x05\x8d\x41\x2a\x10\x4e\x53\xd8\x20\xb9\x5a\x7f", + "\x13\xcd\xb0\x05\x05\x93\x38\xf0\xf2\x8e\x2d\x8c\xe1\xaf\x5d\x0a", 1, + 104, NULL, 0 }, + { 256, 128, 96, + "\x3d\x5d\xa1\xaf\x83\xf7\x28\x74\x58\xbf\xf7\xa7\x65\x1e\xa5\xd8\xdb" + "\x72\x25\x94\x01\x33\x3f\x6b\x82\x09\x69\x96\xdd\x7e\xaf\x19", + "\xaa\xcc\x36\x97\x2f\x18\x30\x57\x91\x9f\xf5\x7b\x49\xe1", + "\xbd\x99\x3e\x44\x28\xcb\xc0\xe2\x75\xe4\xd8\x0b\x6f\x52\x03\x63", 1, + 112, NULL, 0 }, + { 256, 128, 97, + "\xc1\x9b\xdf\x31\x4c\x6c\xf6\x43\x81\x42\x54\x67\xf4\x2a\xef\xa1\x7c" + "\x1c\xc9\x35\x8b\xe1\x6c\xe3\x1b\x1d\x21\x48\x59\xce\x86\xaa", + "\x5d\x06\x6a\x92\xc3\x00\xe9\xb6\xdd\xd6\x3a\x7c\x13\xae\x33", + "\x86\xc9\xf4\xdd\xe0\xb2\x57\xa7\x05\x3a\x7b\x03\xc7\x50\x44\x09", 1, + 120, NULL, 0 }, + { 256, 128, 98, + "\x61\x2e\x83\x78\x43\xce\xae\x7f\x61\xd4\x96\x25\xfa\xa7\xe7\x49\x4f" + "\x92\x53\xe2\x0c\xb3\xad\xce\xa6\x86\x51\x2b\x04\x39\x36\xcd", + "\xcc\x37\xfa\xe1\x5f\x74\x5a\x2f\x40\xe2\xc8\xb1\x92\xf2\xb3\x8d", + "\xb9\x6b\xca\xca\xfa\xc3\x00\x94\xf1\x8a\xc5\x03\x9e\x7b\x36\x56", 1, + 128, NULL, 0 }, + { 256, 128, 99, + "\x73\x21\x6f\xaf\xd0\x02\x2d\x0d\x6e\xe2\x71\x98\xb2\x27\x25\x78\xfa" + "\x8f\x04\xdd\x9f\x44\x46\x7f\xbb\x64\x37\xaa\x45\x64\x1b\xf7", + "\xd5\x24\x7b\x8f\x6c\x3e\xdc\xbf\xb1\xd5\x91\xd1\x3e\xce\x23\xd2" + "\xf5", + "\x6e\x59\x7c\x4c\x38\x61\xa3\x80\xc0\x68\x54\xb4\x46\xfc\x2a\x87", 1, + 136, NULL, 0 }, + { 256, 128, 100, + "\x04\x27\xa7\x0e\x25\x75\x28\xf3\xab\x70\x64\x0b\xba\x1a\x5d\xe1\x2c" + "\xf3\x88\x5d\xd4\xc8\xe2\x84\xfb\xbb\x55\xfe\xb3\x52\x94\xa5", + "\x13\x93\x7f\x85\x44\xf4\x42\x70\xd0\x11\x75\xa0\x11\xf7\x67\x0e\x93" + "\xfa\x6b\xa7\xef\x02\x33\x6e", + "\xf7\x31\xaa\xf2\xf0\x40\x23\xd6\x21\xf1\x04\x95\x34\x46\x79\xa0", 1, + 192, NULL, 0 }, + { 256, 128, 101, + "\x96\xe1\xe4\x89\x6f\xb2\xcd\x05\xf1\x33\xa6\xa1\x00\xbc\x56\x09\xa7" + "\xac\x3c\xa6\xd8\x17\x21\xe9\x22\xda\xdd\x69\xad\x07\xa8\x92", + "\x91\xa1\x7e\x4d\xfc\xc3\x16\x6a\x1a\xdd\x26\xff\x0e\x7c\x12\x05\x6e" + "\x8a\x65\x4f\x28\xa6\xde\x24\xf4\xba\x73\x9c\xeb\x5b\x5b\x18", + "\x95\x24\x3e\xb1\xa9\xd4\x48\x17\x4a\xe4\xfc\xcf\x4a\x53\xeb\xfe", 1, + 256, NULL, 0 }, + { 256, 128, 102, + "\x41\x20\x15\x67\xbe\x4e\x6e\xa0\x6d\xe2\x29\x5f\xd0\xe6\xe8\xa7\xd8" + "\x62\xbb\x57\x31\x18\x94\xf5\x25\xd8\xad\xea\xbb\xa4\xa3\xe4", + "\x58\xc8\xc7\x3b\xdd\x3f\x35\x0c\x97\x47\x78\x16\xea\xe4\xd0\x78\x9c" + "\x93\x69\xc0\xe9\x9c\x24\x89\x02\xc7\x00\xbc\x29\xed\x98\x64\x25\x98" + "\x5e\xb3\xfa\x55\x70\x9b\x73\xbf\x62\x0c\xd9\xb1\xcb", + "\x34\x33\x67\x20\x7f\x71\x42\x5d\x8f\x81\xf3\x11\x0b\x04\x05\xf6", 1, + 376, NULL, 0 }, + { 256, 128, 103, + "\x64\x9e\x37\x3e\x68\x1e\xf5\x2e\x3c\x10\xac\x26\x54\x84\x75\x09\x32" + "\xa9\x91\x8f\x28\xfb\x82\x4f\x7c\xb5\x0a\xda\xb3\x97\x81\xfe", + "\x39\xb4\x47\xbd\x3a\x01\x98\x3c\x1c\xb7\x61\xb4\x56\xd6\x90\x00\x94" + "\x8c\xeb\x87\x05\x62\xa5\x36\x12\x6a\x0d\x18\xa8\xe7\xe4\x9b\x16\xde" + "\x8f\xe6\x72\xf1\x3d\x08\x08\xd8\xb7\xd9\x57\x89\x99\x17", + "\x15\x16\x18\xee\xc4\xf5\x03\xf3\xb6\x3b\x53\x9d\xe0\xa5\x89\x66", 1, + 384, NULL, 0 }, + { 256, 128, 104, + "\x7b\x0d\x23\x7f\x7b\x53\x6e\x2c\x69\x50\x99\x0e\x61\xb3\x61\xb3\x84" + "\x33\x3d\xda\x69\x00\x45\xc5\x91\x32\x1a\x4e\x3f\x79\x74\x7f", + "\x3d\x62\x83\xd1\x1c\x02\x19\xb5\x25\x62\x0e\x9b\xf5\xb9\xfd\x88\x7d" + "\x3f\x0f\x70\x7a\xcb\x1f\xbd\xff\xab\x0d\x97\xa5\xc6\xd0\x7f\xc5\x47" + "\x76\x2e\x0e\x7d\xd7\xc4\x3a\xd3\x5f\xab\x1c\x79\x0f\x80\x47", + "\xce\x20\x1c\x0d\xcf\xdc\x3f\x2b\xef\x36\x06\x09\xa3\x1f\xb1\x9e", 1, + 392, NULL, 0 }, + { 256, 128, 105, + "\x17\xc9\x26\x63\x74\x1f\x01\x2e\x5b\xb6\x71\x4e\x61\x4c\x2d\x15\x59" + "\x48\x61\x7f\x10\x93\x62\x69\xd9\x54\xc5\x8a\xba\x2a\xe6\x2d", + "\x7f\xdd\x6a\x15\xc8\x61\xd0\x31\x3f\x66\x35\xd7\x7d\xc5\x5e\x11\x5f" + "\xf1\x8c\x8a\xb0\x63\xb5\xd0\x3e\xab\x47\x2e\xec\xa8\x7a\x37\x81\x88" + "\xf2\x58\x13\x51\x5c\xf9\x0b\x6c\xff\xa9\x4a\x8f\xf3\x6b\x29\xd6\x56" + "\x03\xea\xb3\xfb\xd2\xaa\x95\x00\xb2\x61\xe1\x84\x04\x98\x93\xdc\x6c" + "\xa2\x01\x0b\xec\xac\x16\x30\x53\xf2\x11\x07\x0b\xdd\xa6\x21\xb8\xbd" + "\x8a\xf7\x7e\x45\x02\x68\x60\x3b\x52\xdb\x34\xc9\x0b\xe8\x36\xdf\xeb" + "\xdd\xef\x42\x30\x3f\x72\x4e\x63\xbf\x0f", + "\x76\xe8\xdf\xd9\x4d\xb4\xaf\x9d\x79\xd9\x71\x8e\xec\x46\xcb\x2d", 1, + 896, NULL, 0 }, + { 256, 128, 106, + "\x42\x4c\x6b\x22\x60\x6f\xcc\x09\x4a\xe8\x2f\xc5\xd3\xcb\xe4\x84\x17" + "\x4c\x22\x11\xb3\xec\x77\x80\x91\xca\xc3\x4a\x8e\x38\xa1\x52", + "\xd9\x6f\xf0\x62\xe2\x49\x0e\x8e\x0c\x54\xc5\xa8\xb8\x9e\x85\xb2\x5a" + "\x66\xd9\x3d\x7c\x2b\x93\xbd\xfe\xf8\x46\xb7\x0d\x38\x67\x27\x46\xa4" + "\xb9\x88\xd0\x8f\x15\xa5\xc5\x27\xca\x4f\x2c\x80\xe5\x3f\x7c\x6a\xc0" + "\x52\x1b\xc5\x7e\xbe\x38\x20\x91\x80\xcb\xf9\x34\xe0\xbb\xeb\x58\xcf" + "\xb6\x3d\x75\xda\x64\xaf\x41\xd0\x9c\xe1\x74\xaf\x18\x96\xf4\x25\x22" + "\x91\x0f\xce\xd3\x5e\xa0\x00\x40\x2e\x95\xfd\x3a\xc7\xaa\x6d\x5e\x0a" + "\x6b\x53\x3b\x08\x79\xbc\x46\x60\x19\xb3\xa5\xe6\xb1\x6e\x4b\xd1\xea" + "\x6c\xdf\xc9\xcc\xc1\xd6\xf0\xf0", + "\xed\xa7\x09\xc7\x00\x97\x14\xc3\x72\xd0\xd6\xa6\x3d\xfd\xe4\x69", 1, + 1016, NULL, 0 }, + { 256, 128, 107, + "\x15\xd5\x53\xc8\xda\x43\x3d\x53\xcd\xc7\xf1\x50\x87\xa7\x03\x49\xca" + "\xab\x57\xb3\x79\xa4\x07\x89\x28\xce\x9b\x99\x30\x2e\x31\xa6", + "\xd6\xc0\xc5\x3b\x73\xf7\x4f\xb4\x26\xad\xfd\xc1\x43\xd7\x0d\xb7\xf7" + "\xa8\xf8\xed\x32\xa2\xfa\xef\x26\x3c\xf9\xab\x11\x75\x37\xb6\xb9\xd1" + "\x72\x8b\xd1\x00\x0c\x1f\x28\x90\x6c\x6c\xe6\xad\x21\x86\x2b\xfa\x4d" + "\x68\x9c\x1a\x8e\xbe\x38\x68\xb9\x92\x09\x8b\x7f\x98\x1b\x2a\xf5\x18" + "\x9a\x6a\xde\xdf\xf5\x3a\x6c\x70\xc8\x36\x93\xf5\xc8\xd6\x38\x5a\x9a" + "\x8a\x4d\xca\x01\x7c\x57\x16\xac\x4d\x5b\x97\x65\xc5\xca\x2a\xb5\xf9" + "\x86\x7e\x02\x79\x51\x98\xc0\xb9\x52\x7e\x07\xd0\x8a\xf5\x2d\xbc\xb9" + "\x1c\xeb\x3d\x8b\x41\x2a\x2b\x24\x02", + "\x8c\xa1\x40\x2b\xf8\xfc\x23\x44\x2a\xc2\x06\x7b\xe9\x25\xb8\x28", 1, + 1024, NULL, 0 }, + { 256, 128, 108, + "\xff\xe5\x59\x46\x8a\x10\x31\xdf\xb3\xce\xd2\xe3\x81\xe7\x4b\x58\x21" + "\xa3\x6d\x9a\xbf\x5f\x2e\x59\x89\x5a\x7f\xdc\xa0\xfa\x56\xa0", + "\x23\x88\x99\xa8\x4a\x3c\xf1\x52\x02\xa1\xfb\xef\x47\x41\xe1\x33\xfb" + "\x24\xc0\x09\xa0\xcd\x83\x85\x4c\x6d\x1d\x7c\x92\x66\xd4\xc3\xea\xfe" + "\x6d\x1d\xfc\x18\xf1\x38\x45\xcc\xda\xd7\xfe\x27\x76\x27\xb5\xfd\x5f" + "\xf2\x55\x5c\xe6\xdf\xde\x1e\xe0\x78\x54\x0a\x0a\x35\x90\xc6\xd9\xbf" + "\x2f\xb6\x3b\xa9\xaf\xbe\x93\x80\xe7\x97\xbe\x7c\xd0\x17\x64\x5c\x5a" + "\x36\x13\xee\xf3\x8e\xf8\x9e\x3b\x74\x61\xe6\xe7\x00\xff\x2b\x4d\xee" + "\xf5\x63\x6c\x9d\x21\x98\xb1\x43\xf7\x97\xca\x18\x20\xa3\xdc\xc5\xd4" + "\x62\xeb\xf4\xa8\xc4\xc0\x9e\xb2\x02\xa2\x35\x92\xeb\x95\x24\x08\x2c" + "\x79\xad\xda\x8f\xcd\x56\xd2\x56\x04\x1a\x26\xbf\x8f\x52\x39\x62\xba" + "\x91\x1c\xe5\xa5\x78\x65\x70\xd6\x5b\xe3\xc4\xdf\x72\x2e\xd8\x83\x03" + "\x02\x06\x5f\xeb\xdf\x94\x47\x15\x29\x8a\x1f\xbb\x7d\x10\xb6\x8d\x7d" + "\xa2\xbf\x88\x93\x24\x31\x4c\xe5\x1e\x81\x5c\x7f\xbf\x03\xaa\x0a\x83" + "\x58\xaf\xf3\xa8\x6e\xb7\xa3\x3f\x9a\x49\x23\x66\x0d\xb3\x04\x7e\x79" + "\x3b\xeb\xb0\xc6\x91\x8f\x43\x95\xd4\x00\x38\x17\x23\xfd\xae\x28\x32" + "\xc3\x6e\xfc\x8e\x36\x8a\x68\xf3\x0f\x63\x51\xc3\xbc\x94\x2c\xd5" + "\x60", + "\xa8\x30\xb3\x13\xf4\x93\x6d\xea\x56\xa3\xae\xfd\x6a\x3e\xbe\x7d", 1, + 2040, NULL, 0 }, + { 256, 128, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd2\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd1\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xda\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x53\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8a\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb8\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x89\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\xa7\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6c\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x08\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6f\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x0b\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\xdf\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\xf4\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x83\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x16\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x02\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x97\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x4b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x0c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x45\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbf\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd4\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x83\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd7\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x80\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 137, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\x55\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 138, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x02\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 139, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe6", 0, + 0, NULL, 0 }, + { 256, 128, 140, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x28", 0, + 128, NULL, 0 }, + { 256, 128, 141, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xe5", 0, + 0, NULL, 0 }, + { 256, 128, 142, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x2b", 0, + 128, NULL, 0 }, + { 256, 128, 143, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\xa7", 0, + 0, NULL, 0 }, + { 256, 128, 144, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\x69", 0, + 128, NULL, 0 }, + { 256, 128, 145, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\x5f\x82\x6b\x44\xa9\xd5\x60\x7d\x67", 0, + 0, NULL, 0 }, + { 256, 128, 146, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\x74\x17\x2c\xbe\x93\x82\x4c\x1f\xa9", 0, + 128, NULL, 0 }, + { 256, 128, 147, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd2\x8b\x42\x09\x6d\x80\xf4\x5f\x83\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 148, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xb9\x9f\x27\x09\xa3\xca\x74\x16\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 149, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x89\x6d\x80\xf4\xdf\x82\x6b\x44\xa9\xd5\x60\x7d\xe7", 0, + 0, NULL, 0 }, + { 256, 128, 150, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\xa7\x09\xa3\xca\xf4\x17\x2c\xbe\x93\x82\x4c\x1f\x29", 0, + 128, NULL, 0 }, + { 256, 128, 151, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd3\x8b\x42\x09\x6d\x80\xf4\xdf\x82\x6b\x44\xa9\xd5\x60\x7d\x67", 0, + 0, NULL, 0 }, + { 256, 128, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd8\xb9\x9f\x27\x09\xa3\xca\xf4\x17\x2c\xbe\x93\x82\x4c\x1f\xa9", 0, + 128, NULL, 0 }, + { 256, 128, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x2c\x74\xbd\xf6\x92\x7f\x0b\xa0\x7d\x94\xbb\x56\x2a\x9f\x82\x18", 0, + 0, NULL, 0 }, + { 256, 128, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x27\x46\x60\xd8\xf6\x5c\x35\x8b\xe8\xd3\x41\x6c\x7d\xb3\xe0\xd6", 0, + 128, NULL, 0 }, + { 256, 128, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 0, NULL, 0 }, + { 256, 128, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", 0, + 128, NULL, 0 }, + { 256, 128, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 0, NULL, 0 }, + { 256, 128, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", 0, + 128, NULL, 0 }, + { 256, 128, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\x53\x0b\xc2\x89\xed\x00\x74\xdf\x02\xeb\xc4\x29\x55\xe0\xfd\x67", 0, + 0, NULL, 0 }, + { 256, 128, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x58\x39\x1f\xa7\x89\x23\x4a\xf4\x97\xac\x3e\x13\x02\xcc\x9f\xa9", 0, + 128, NULL, 0 }, + { 256, 128, 161, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "", + "\xd2\x8a\x43\x08\x6c\x81\xf5\x5e\x83\x6a\x45\xa8\xd4\x61\x7c\xe6", 0, + 0, NULL, 0 }, + { 256, 128, 162, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xd9\xb8\x9e\x26\x08\xa2\xcb\x75\x16\x2d\xbf\x92\x83\x4d\x1e\x28", 0, + 128, NULL, 0 }, + { 128, 256, 163, + "\xa3\x49\xac\x0a\x9f\x9f\x74\xe4\x8e\x09\x9c\xc3\xdb\xf9\xa9\xc9", + "", + "\x3a\x84\x37\xb8\x77\xb7\x5c\xc0\x8a\x4d\x8d\x75\x59\xa8\xfc\x68\x69" + "\xa5\x8c\x71\x3d\xa6\x3d\x1d\x4b\x35\x0d\x59\xb5\x97\xe3\x0c", + 1, 0, NULL, 0 }, + { 128, 256, 164, + "\xac\x68\x6b\xa0\xf1\xa5\x1b\x4e\xc4\xf0\xb3\x04\x92\xb7\xf5\x56", + "\x2f\xa4\x3a\x14\xae\x50\x05\x07\xde\xb9\x5a\xb5\xbd\x32\xb0\xfe", + "\x00\x85\x32\xa5\x3d\x0c\x0a\xb2\x20\x27\xae\x24\x90\x23\x37\x53\x74" + "\xe2\x23\x9b\x95\x96\x09\xe8\x33\x9b\x05\xa1\x57\x42\xa6\x75", + 1, 128, NULL, 0 }, + { 128, 256, 165, + "\x73\xef\x9e\xf1\xa4\x22\x5e\x51\xe3\xc1\xdb\x3a\xce\x1f\xa2\x4f", + "\xff\xad\x38\x0d\x9a\xab\xb0\xac\xed\xe5\xc1\xbf\x11\x29\x25\xcd\xfc" + "\x3d\x37\x9f\xc2\x37\x6a\x4f\xe2\x64\x44\x90\xd0\x43\x0a\xc3", + "\x9c\x7c\xb9\xf7\xc2\x07\xec\x46\xd1\xe3\xc5\x57\x64\x73\x1c\x4a\xb5" + "\xdd\xba\xe4\xe1\x40\x1e\x52\xa8\x95\xdf\x0c\xff\x47\x87\xc9", + 1, 256, NULL, 0 }, + { 128, 128, 166, + "\xe3\x4f\x15\xc7\xbd\x81\x99\x30\xfe\x9d\x66\xe0\xc1\x66\xe6\x1c", + "", + "\x1d\x76\x5a\xb9\xe2\x98\x92\xf7\xbf\xec\x29\x75\xad\x4b\xc2\xdc", 1, + 0, NULL, 0 }, + { 128, 128, 167, + "\xe0\x9e\xaa\x5a\x3f\x5e\x56\xd2\x79\xd5\xe7\xa0\x33\x73\xf6\xea", + "\xef\x4e\xab\x37\x18\x1f\x98\x42\x3e\x53\xe9\x47\xe7\x05\x0f\xd0", + "\xcf\xc1\x9e\xc0\x79\x02\xec\x8b\xe4\x89\x60\x6d\x8f\x40\xd1\x72", 1, + 128, NULL, 0 }, + { 128, 128, 168, + "\x9b\xd3\x90\x2e\xd0\x99\x6c\x86\x9b\x57\x22\x72\xe7\x6f\x38\x89", + "\xa7\xba\x19\xd4\x9e\xe1\xea\x02\xf0\x98\xaa\x8e\x30\xc7\x40\xd8\x93" + "\xa4\x45\x6c\xcc\x29\x40\x40\x48\x4e\xd8\xa0\x0a\x55\xf9\x3e", + "\xac\x50\xad\xad\x97\x85\xa8\x9c\x72\x82\xd8\xab\x88\x1d\xc6\x15", 1, + 256, NULL, 0 }, + { 520, 256, 169, + "\x8a\x0c\x46\xeb\x8a\x29\x59\xe3\x98\x65\x33\x00\x79\x76\x33\x41\xe7" + "\x43\x9d\xab\x14\x96\x94\xee\x57\xe0\xd6\x1e\xc7\x3d\x94\x7e\x1d\x53" + "\x01\xcd\x97\x4e\x18\xa5\xe0\xd1\xcf\x0d\x2c\x37\xe8\xaa\xdd\x9f\xd5" + "\x89\xd5\x7e\xf3\x2e\x47\x02\x4a\x99\xbc\x3f\x70\xc0\x77", + "", + "\xf5\xbf\xb9\x40\x56\x1f\xb4\xdb\x73\xeb\xba\x49\xbf\x2e\x48\x93\xbb" + "\x0c\xca\x61\x8a\x71\xb7\xec\xf6\xac\xa3\x82\x31\xe1\x67\xea", + 1, 0, NULL, 0 }, + { 520, 256, 170, + "\x28\x77\xeb\xb8\x1f\x80\x33\x4f\xd0\x05\x16\x33\x74\x46\xc5\xcf\x5a" + "\xd4\xa3\xa2\xe1\x97\x26\x9e\x5b\x0a\xd1\x88\x9d\xfe\x2b\x4b\x0a\xaa" + "\x67\x6f\xac\x55\xb3\x6c\xe3\xaf\xfc\x7f\x10\x92\xab\x89\xc5\x32\x73" + "\xa8\x37\xbd\x5b\xc9\x4d\x1a\x9d\x9e\x5b\x02\xe9\x85\x6f", + "\xba\x44\x8d\xb8\x8f\x15\x4f\x77\x50\x28\xfd\xec\xf9\xe6\x75\x2d", + "\x16\x90\xed\x41\x80\x64\x28\x99\xe0\xde\xb9\xec\x22\x70\x37\x4e\x8b" + "\x0a\x48\x42\x17\xf5\xa6\x82\xc5\x24\x31\x6e\xca\x21\x9b\x64", + 1, 128, NULL, 0 }, + { 520, 256, 171, + "\x21\x17\x8e\x26\xbc\x28\xff\xc2\x7c\x06\xf7\x62\xba\x19\x0a\x62\x70" + "\x75\x85\x6d\x7c\xa6\xfe\xab\x79\xac\x63\x14\x9b\x17\x12\x6e\x34\xfd" + "\x9e\x55\x90\xe0\xe9\x0a\xac\x80\x1d\xf0\x95\x05\xd8\xaf\x2d\xd0\xa2" + "\x70\x3b\x35\x2c\x57\x3a\xc9\xd2\xcb\x06\x39\x27\xf2\xaf", + "\x7d\x5f\x1d\x6b\x99\x34\x52\xb1\xb5\x3a\x43\x75\x76\x0d\x10\xa2\x0d" + "\x46\xa0\xab\x9e\xc3\x94\x3f\xc4\xb0\x7a\x2c\xe7\x35\xe7\x31", + "\xe5\x42\xac\x8a\xc8\xf3\x64\xba\xe4\xb7\xda\x8b\x7a\x07\x77\xdf\x35" + "\x0f\x00\x1d\xe4\xe8\xcf\xa2\xd9\xef\x0b\x15\x01\x94\x96\xec", + 1, 256, NULL, 0 }, + { 520, 128, 172, + "\x81\x3e\x0c\x07\x8c\x22\x13\x75\xe8\x05\x90\xac\xe6\x77\x4e\xaf\xd2" + "\xd2\xc2\x42\x35\x09\x88\xd0\x2e\xfa\x55\x0e\x05\xae\xcb\xe1\x00\xc1" + "\xb8\xbf\x15\x4c\x93\x2c\xf9\xe5\x71\x77\x01\x5c\x81\x6c\x42\xbc\x7f" + "\xbc\x71\xce\xaa\x53\x28\xc7\x31\x6b\x7f\x0f\x30\x33\x0f", + "", + "\xbb\x6a\xb6\x6f\x51\xe5\x3f\xa0\x86\xc9\xc6\x1a\x26\xca\x27\xe0", 1, + 0, NULL, 0 }, + { 520, 128, 173, + "\x57\x13\x34\x30\x96\xb0\xaa\xf0\x56\x2a\x6b\x92\xc1\xa1\x55\x35\x92" + "\x41\x60\x47\x5a\x4e\x42\x33\x58\x91\x59\x72\x8c\x56\x2e\x3b\x2a\xd9" + "\x6f\x74\x0c\x6a\x4d\xa2\xbc\x3f\x76\x8c\xe9\x8c\x9b\xd6\x6b\xac\x28" + "\xd1\x64\x6f\xf5\x92\x02\x8c\x94\x0d\x45\x5f\x35\xee\xb4", + "\x71\x71\x2d\xe2\xfa\xc1\xfb\x85\x56\x73\xbf\xf7\x2a\xf6\x42\x57", + "\xc1\x81\x65\xb8\xb9\x7d\xb1\xca\x5e\x24\x86\xa3\x2b\x39\x73\x1e", 1, + 128, NULL, 0 }, + { 520, 128, 174, + "\x72\x08\xaf\xbe\xcf\x5f\x1f\x34\x82\x8f\x98\xb7\x19\x41\x4e\x28\x07" + "\x16\xde\x64\xf5\xed\xd1\xae\x1c\x77\x41\x53\xcd\x20\x22\x33\x7b\xb2" + "\x0f\xad\xe1\xb7\x85\x6f\x1d\xbf\xd4\x0e\x2b\x43\x07\xf1\x29\x3c\xef" + "\xf1\x69\x2e\xe9\x0d\x8c\x90\xb5\xfd\xf9\x53\xab\x01\xa5", + "\x43\xb5\x33\x02\xb6\x04\xd6\x13\xe6\x2d\xb0\x02\x04\x4a\x47\x82\xd5" + "\x72\xac\x8f\xbd\x3c\xd0\xec\xe9\x1b\x43\xbc\x52\xe1\x8e\x98", + "\x2f\xec\xfe\x45\xd7\x93\x39\xc5\x7d\xdd\xba\x68\xab\x34\xf5\xf1", 1, + 256, NULL, 0 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/hmac_sha384_test.json.c b/test/wycheproof/hmac_sha384_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..09bd57fc021e044f1aa87a9ff6537a9f41553753 --- /dev/null +++ b/test/wycheproof/hmac_sha384_test.json.c @@ -0,0 +1,1604 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* HMACSHA384, 0.8rc21 */ +#include "mac_test.h" +const struct mac_test hmac_sha384_test_json[] = { + { 384, 384, 1, + "\xee\x8d\xf0\x67\x85\x7d\xf2\x30\x0f\xa7\x1a\x10\xc3\x09\x97\x17\x8b" + "\xb3\x79\x61\x27\xb5\xec\xe5\xf2\xcc\xc1\x70\x93\x2b\xe0\xe7\x8e\xa9" + "\xb0\xa5\x93\x6c\x09\x15\x7e\x67\x1c\xe7\xec\x9f\xc5\x10", + "", + "\xa6\x55\x18\x4d\xaf\x33\x46\xff\xc6\x62\x9d\x49\x3c\x84\x42\x64\x4e" + "\x49\x96\xa2\x79\x9e\x42\xe3\x30\x6f\xa6\xf5\xb0\x96\x7b\x6c\xf3\xa6" + "\xf8\x19\xba\xb8\x9b\xce\x29\x7d\x1d\x1a\x59\x07\xb2\xd0", + 1, 0, NULL, 0 }, + { 384, 384, 2, + "\x97\x66\x96\xc0\xdc\x97\x18\x2c\xa7\x71\x97\x5c\x39\x28\xff\x91\x68" + "\xef\x89\xcd\x74\x0c\xd2\x29\x28\x58\xfd\x91\x60\x68\xa7\x02\xbc\x1d" + "\xf7\xc6\xcd\x8e\xe1\xf0\xd2\x5e\x61\xd4\xc5\x14\xcc\x5d", + "\x2b", + "\x36\x3e\x89\x73\xfe\xdc\xf7\x89\x20\x13\xdf\xae\x0b\x70\x65\xd6\x1d" + "\x80\xb9\x8c\x63\x5b\xc0\x9e\xd8\x60\xa0\x14\x73\xb9\xbc\xd0\xdc\x55" + "\x0d\xbf\x66\xcf\x0d\x60\x1f\xe9\xcb\xf3\xae\x59\x62\x0d", + 1, 8, NULL, 0 }, + { 384, 384, 3, + "\xc5\x5e\xa4\xc6\x4a\x0a\x63\xe2\xd1\x4a\xd4\x25\x59\xba\x7c\x81\x6b" + "\x88\x24\xd2\x63\xc2\xcc\x6a\x01\x57\x61\xb5\x3f\x68\x1e\x51\x43\x69" + "\xf0\xdf\xba\x5c\xde\x16\x53\x20\xee\x10\xa9\x6e\xb1\xfc", + "\x5a\xbd", + "\xcc\xc2\x92\x5f\x16\x4a\x7d\x96\x62\xf1\xe7\x6b\xca\xf6\x34\x54\x92" + "\xbb\x09\x1d\x4d\x2d\x77\x5a\xf2\x17\x8a\x4b\xcc\x1c\xa2\x1d\xcf\x8b" + "\x3b\xf8\xf0\x56\x82\x37\x70\x78\x2f\x25\xa4\x19\xbb\x3e", + 1, 16, NULL, 0 }, + { 384, 384, 4, + "\x29\x28\xd4\x65\xd9\x2f\xa4\x00\x72\xca\x9d\x67\x76\x1b\xe6\x6e\x49" + "\x17\x55\xe4\x34\x99\x00\x3c\x10\x57\xd3\xbe\xc8\x70\xf2\x55\x12\x6c" + "\x36\x58\xd0\xd8\xa0\xc7\xd2\x07\xdf\x87\x10\x03\x7c\xa7", + "\xc4\x05\xae", + "\xd9\xe1\x9c\x67\x2a\x46\x6e\x4c\x83\xa8\x49\x90\x57\x28\xc4\xbe\x1d" + "\xb9\x9b\xdd\x26\x09\x46\xd9\xff\x52\x93\x97\x79\x00\x2d\xcc\x46\x0c" + "\x57\x6f\x02\xb4\x0d\xda\x07\x17\x18\x2b\xe9\x6b\x54\x11", + 1, 24, NULL, 0 }, + { 384, 384, 5, + "\x68\x6a\x37\x30\x08\x5c\xc9\x44\xfc\xeb\x14\x16\x28\x41\x98\x18\xe6" + "\x62\xfe\x21\xe5\x2b\xea\x27\x48\xf3\xb7\x04\xf8\x0c\xe8\x01\x08\x6d" + "\xb1\xe3\x06\x89\x17\xb2\x42\xe6\x2b\x4d\x6e\x6e\xd6\x85", + "\x66\x01\xc6\x83", + "\x10\xdc\x39\x10\x39\x83\xb3\xa6\xbe\x37\x6a\x8e\xda\x7b\x6f\x36\x3c" + "\xb9\x1e\xfe\x11\xb0\x27\xa6\x24\x40\xae\x13\x6b\xd6\x6f\x98\xb0\xa1" + "\xd8\xb8\xf2\x39\x90\x99\x49\x20\x21\x07\x6a\xfa\x14\xa0", + 1, 32, NULL, 0 }, + { 384, 384, 6, + "\xf2\x2d\x86\x7b\x97\x2b\x23\x2e\x3f\x44\x4a\x48\x8d\xd7\x94\xd1\x70" + "\x80\x7c\x70\xeb\x65\x0f\x95\x2b\x61\x77\x59\x6f\x76\xc5\x58\xa5\xd8" + "\x60\xd6\xf7\xbe\x0b\xe9\xe6\x66\xf9\xbd\x53\x73\x2f\x8d", + "\x15\xb2\x93\x77\xe0", + "\xe0\x2e\x4e\x20\xb5\xf1\xe5\xf0\x69\x13\xbc\x97\x45\xc9\x06\x9c\x09" + "\xec\x13\x69\xf1\xa2\x96\xad\x1d\x07\xc0\x4c\xc4\xf9\xcb\x47\x41\x24" + "\x8d\x7b\xa0\x97\xcd\x3b\xa0\xe7\x5d\x24\x09\xd6\xa0\x1b", + 1, 40, NULL, 0 }, + { 384, 384, 7, + "\x3a\xc9\xab\xd5\x3d\xbd\x0f\xbb\x89\x1f\x9b\x5e\x16\xdd\x45\xdf\x99" + "\x4e\x52\x83\x52\x78\x32\x70\x71\x38\xfc\x27\x12\xba\xd9\xe3\x47\x61" + "\xe7\xd9\xc6\xd0\x5d\x46\xf2\xc8\x32\x3d\xdb\x0e\xfe\x99", + "\x5a\x34\x15\x5b\x11\x15", + "\x78\xc5\x3d\xd1\xa2\x43\x11\x74\x62\x8f\x5f\x48\x67\xfa\x77\x7a\xfa" + "\x6d\xf1\xb3\x62\x69\xbb\xa1\x14\xd0\x16\xd1\x06\x5f\xcb\x02\x11\x70" + "\xba\xad\x09\xb4\xa5\x28\xf4\x05\x73\x90\x3a\x65\xf5\x40", + 1, 48, NULL, 0 }, + { 384, 384, 8, + "\xae\x3a\xa9\x4f\xdd\x35\xe2\xbe\xf4\x04\x72\xd2\x9b\xda\xd3\xa4\x09" + "\x84\x0e\xa4\x41\xc3\xd7\x02\x5c\xd7\x2f\x3e\x81\xff\x56\xda\x60\x21" + "\x61\xd8\x4b\x23\xd1\x63\x40\x61\x38\x5b\xe3\x0c\x5b\xbd", + "\x8a\x14\x0d\x78\x1e\x71\x91", + "\xfd\x22\xba\x89\x6c\xb1\x14\x7b\xb8\x6f\x8a\xd5\x1c\x25\x3b\x79\x26" + "\x57\xc0\xbe\xcc\x91\x3e\x90\x10\x4d\xa0\xf1\x39\xf9\xb0\x8c\x91\x69" + "\x70\x6f\x15\x31\xa2\xc6\xc0\x3d\x6b\xd7\x2a\x77\xef\xf2", + 1, 56, NULL, 0 }, + { 384, 384, 9, + "\x44\xb7\x98\x52\xca\xbc\xf3\xfe\x93\xd2\xff\xf5\x5d\x2a\xfe\x6a\x46" + "\xc3\x5b\x7a\xd1\x95\x4c\xe0\x88\x8d\xe7\xb4\x59\xb9\x82\x72\x2f\xaf" + "\x8b\x49\x0e\x6b\x00\xe7\xbc\xab\xbd\x36\xf1\x84\x43\xf5", + "\x93\x98\xcd\x25\x1d\xea\xfe\x8b", + "\x56\x12\x8f\xb4\x38\xa9\x3f\x6f\x48\xf4\x7c\x0f\x4c\x75\x49\xf8\x00" + "\x8a\x8e\x69\xbb\xdb\xf0\x88\x6e\xc4\x0f\x86\xe7\x87\x00\x34\xef\x90" + "\x90\xd2\xb0\x40\x57\x39\x1f\x1d\xef\x5b\x25\xe8\xf0\xad", + 1, 64, NULL, 0 }, + { 384, 384, 10, + "\x03\xfe\xd2\xf5\x79\xa3\xeb\xde\xce\xcf\xb1\x84\xeb\xe2\x98\x48\x76" + "\x11\x33\x99\xc4\xa5\x93\xd9\x8b\x5f\x5e\x60\x6d\xd3\x30\xfb\x39\x4c" + "\x28\x5d\x9e\xad\x60\x17\x48\x25\x9b\x49\x33\x35\xf8\xe5", + "\x18\xd8\x79\xb1\xf6\x3d\xf3\xac\x7a", + "\xa0\xe3\xb5\x66\x0e\xeb\x5f\xc4\xa5\xdd\x48\xe7\x25\xb0\x9a\x0e\x28" + "\x2b\x22\xbb\xe2\x69\x3d\x8b\x89\x3d\xdf\x0f\x21\x16\x45\x0e\x08\x75" + "\x92\x54\x07\xe9\x09\xfd\xe0\xf1\xf7\x28\xf6\x08\xfb\xa9", + 1, 72, NULL, 0 }, + { 384, 384, 11, + "\xf4\xef\x48\xbf\x40\x56\xd3\x9d\xbb\xa4\x15\x40\x18\xc6\x3b\xdf\x29" + "\x42\x0b\x99\x91\xea\x59\x4f\xf0\x5e\x3c\xc1\xcb\x02\xe1\x76\xd5\x4b" + "\xa0\x38\xa6\xb7\x86\x92\x51\x9d\x67\x88\xe4\x95\xbb\xab", + "\x0a\x5d\xe1\x3c\xd9\xba\x31\xc9\x44\x86", + "\xe9\xa1\x21\x9e\x86\x98\x3d\x69\xe3\x36\x06\x8b\x28\x03\x09\xf9\x74" + "\xab\x61\xf2\x59\x68\xfc\x63\x52\x32\x4b\xa4\x9c\x36\xce\x42\xc5\x78" + "\x67\x6a\x3a\x31\xef\x11\xe9\x60\xd6\x77\x13\x86\x65\x0e", + 1, 80, NULL, 0 }, + { 384, 384, 12, + "\xfc\x77\x1f\x7c\xcd\x49\x9a\x1e\xd6\x33\xd8\x68\x76\xd7\x07\xb5\xf1" + "\xd5\x3c\x6b\xcd\xf2\x1a\xa2\x90\x77\x66\xab\x3c\xa7\xfa\x6c\xdd\x6a" + "\x9b\x98\x1b\x1a\x84\xa5\x28\xe8\x14\x44\x30\x3f\x10\x57", + "\x03\xba\x11\xf3\xf3\x17\x3b\x85\x22\x6b\x25", + "\xcf\xb4\x97\x1d\x54\x49\xdb\x36\x4e\x2c\x8d\x0d\x42\x9a\x07\x67\x05" + "\x0d\x48\x0a\x53\x97\xf0\xdc\xc7\x42\x94\xf5\x2e\xa9\x62\x60\xa5\x7f" + "\xe6\xca\xd1\x44\x09\xad\x67\xda\x6f\xbe\xbf\x2d\xa0\xd8", + 1, 88, NULL, 0 }, + { 384, 384, 13, + "\xb3\x99\x9d\xe6\x80\xb1\x15\x50\xe1\x86\x31\xc8\x19\x9f\x7e\xb8\xa7" + "\x4e\x21\xbd\xc9\xd9\x7f\x78\x12\x45\xc2\xaf\x19\xf8\x54\x97\xd9\xf3" + "\x8b\x25\x0a\x56\x4e\x48\x65\x0f\xd0\x0b\xe3\x65\xf1\x55", + "\x9c\x65\x8c\xb5\xe6\x01\xd8\x5d\xc3\x85\x78\x63", + "\xd5\x47\xe4\xcb\xd5\x6e\x82\xb4\x7d\x2e\xc9\x3e\xeb\x6b\x34\x92\x4e" + "\xbd\xa4\x61\xfb\x60\xe4\x75\xbf\x32\x8d\x23\x68\x61\x8f\x55\xfb\xf7" + "\xb0\xe2\xeb\x1f\xf5\x42\xc4\xeb\x7e\xef\xbf\xc8\xbd\x2b", + 1, 96, NULL, 0 }, + { 384, 384, 14, + "\x88\x00\x5a\x62\x86\x4e\xa6\x99\xe1\x50\x96\x16\xec\x48\x03\x3e\x84" + "\xd2\xe2\xa1\x3b\x8b\xc2\xe8\xa7\x6f\x2e\xcc\xbd\xb2\x07\xa9\x5a\xc8" + "\xe2\xf5\xb5\xa7\x03\xb2\x2a\x0b\x57\x1e\x8a\xcc\x59\x9a", + "\x5a\x94\xf8\x45\x41\xa7\x94\xbf\x23\xd7\x2d\xb1\x6d", + "\xd6\xb7\x3e\xe6\x7e\x88\xa2\x0f\xce\xb5\x52\x0b\xe9\x25\x94\xda\xf1" + "\xb3\x78\x6c\x71\x87\x53\x5c\xcb\x1f\x0b\x92\x6d\xae\x11\xad\xde\x6e" + "\x86\x97\xba\x80\x3b\x15\x90\x19\x84\x9d\xf3\xc9\xd2\xc7", + 1, 104, NULL, 0 }, + { 384, 384, 15, + "\xb1\xcb\xda\x2c\x9a\x12\xf9\x23\x15\xa5\x10\x1a\xef\x31\x1e\x99\xd6" + "\xdb\x00\x2b\x0e\x04\xfb\x53\xc5\x01\x06\xaa\x4d\x28\xe9\xa3\x46\x69" + "\x7b\xa9\x70\x84\x57\x2e\xea\x56\xcc\xfc\x4a\xd7\xe5\x72", + "\xce\x12\xc0\xc7\x8e\x3f\x6b\x27\x6a\xc5\x6e\xd7\x43\x5e", + "\x5c\x08\x02\xcd\x0e\xd8\x23\x80\xe4\xc2\xa6\x1d\x14\x6e\xd7\x27\x62" + "\x61\x3d\xe8\x9e\xb4\xab\x9f\xe7\x1d\xa9\xad\x3d\x79\xe1\xd2\x32\x1c" + "\xae\x18\x62\x92\xf7\xc5\x2a\xb6\x39\xd3\xba\x6a\xa8\x5a", + 1, 112, NULL, 0 }, + { 384, 384, 16, + "\x08\x51\x7e\x80\x14\xe0\x0d\xb5\xc3\x7f\x2a\x20\xf9\x87\xea\x2e\xc5" + "\x2e\x79\x38\xde\x01\x8a\xd6\xbe\x25\x6b\xa2\x23\x68\x04\x14\x4a\xd2" + "\xa1\xbc\xc2\x42\x73\x88\x62\xb4\x06\x47\x00\x7e\x0a\x2c", + "\x21\xe2\xa0\xa1\x67\x78\x9a\x6b\x72\x2d\x17\x37\xd9\x2f\x8b", + "\x22\x64\xd3\xc9\xb8\x35\xae\xdf\x69\x9d\x5f\xbf\xc0\x5d\x46\xf0\x85" + "\x59\x14\x41\xdf\x75\xaa\x2b\x28\x73\xf6\xc8\xa1\x1a\x08\x56\xa2\xb7" + "\x9a\xe1\x1e\xa0\xa9\x16\x09\xdb\xd5\x64\xa0\xbe\xd4\x56", + 1, 120, NULL, 0 }, + { 384, 384, 17, + "\x50\x3d\x74\x78\xa7\x73\xb6\x94\xd6\xe5\x52\xc9\x70\x3c\xc8\xbc\x56" + "\xfd\x49\xfa\xfc\x9a\x17\xca\xb8\xb0\x33\x2d\xca\x8d\x49\x33\x6f\xa7" + "\xe9\xec\x2b\xcb\x56\x25\x3f\xe5\xbb\x50\x4e\x3e\x7f\x7f", + "\xd9\x6e\x6f\xed\x89\x3a\xdd\xfd\x92\x37\xc8\x1c\x4f\x4e\x34\x1b", + "\x19\x38\x97\x66\x78\x99\x12\x26\x0f\x3f\x97\x57\xdf\x36\x51\x66\x38" + "\x29\xc3\x58\xbb\x48\xb2\x2c\x1c\x63\x13\x20\x70\xdf\x31\x89\x05\xbe" + "\xff\xd4\x5f\x51\xe4\xdf\xcb\x3e\x78\x5f\x44\xcf\x91\x06", + 1, 128, NULL, 0 }, + { 384, 384, 18, + "\x41\x34\x1b\xab\x90\x2e\x76\x7d\x4d\x19\x64\xc0\xac\xfe\xcf\x46\xef" + "\xf1\xb0\x2b\x64\x55\xbc\xb2\x09\x7d\xe9\xc1\x54\xbe\x1f\x66\x7f\x21" + "\xbe\x07\x6d\xe1\x8c\xd2\xc1\x5c\x00\x58\x96\xfc\xa8\x7f", + "\x4c\x43\xac\x7d\xe3\x63\x1c\xc8\x6f\x4d\xa7\x2f\xe6\xb6\xa5\x52" + "\xf1", + "\x3c\x31\x04\xf2\x4b\x70\x70\xcc\x32\x77\xd9\xae\x64\x0d\x41\x62\x98" + "\xfc\x91\x7a\x0c\x1c\xdc\x3c\x2e\x7b\x6d\xa7\x57\x06\xfd\x2a\xe2\x34" + "\xef\xd5\x51\xaf\x12\xae\x29\x14\x47\x04\x79\x3e\x2f\x6a", + 1, 136, NULL, 0 }, + { 384, 384, 19, + "\xc2\xf8\x3b\xe1\xac\xce\x7b\x89\xa5\xf9\xe9\xea\x7e\x4c\x4f\x8b\x0f" + "\x43\x19\x98\x6f\xbe\x47\x9f\xa3\xb4\xa3\xc2\x98\x16\x83\x62\x39\x3b" + "\x56\xea\x03\xb5\xce\xf7\x7f\x48\xe5\xa7\x2a\xbe\x6d\x08", + "\x8d\xd0\xcd\x78\x6c\xd8\x00\xff\xeb\xec\x09\x87\x28\x92\x3d\x69\x24" + "\x9d\x32\x23\xc4\xc5\x95\xcb", + "\x75\x1c\x6c\x7d\x00\xfe\xf5\xe4\xed\xc9\x93\x91\x5f\xba\x69\x49\x43" + "\xa7\xee\x3a\x2c\x8e\x5b\x70\x0d\x0e\xe5\x36\xbf\x85\xfb\x11\x7a\x9c" + "\xd6\xc4\x56\x48\x5c\xd6\x70\xf7\xa0\xb4\x90\xc8\x3e\x61", + 1, 192, NULL, 0 }, + { 384, 384, 20, + "\x6b\xd2\xae\xe9\xdd\x98\xd6\xb6\x60\x9f\xce\x82\x18\x1b\x10\xc2\x0b" + "\xba\x86\x1d\xa6\x8a\x15\x90\x58\x6f\xab\x08\xc5\xe9\xe9\x0f\xf5\x84" + "\x04\x7d\xb4\x76\x08\x28\x64\x3f\xea\x38\x08\x71\x60\xe4", + "\x33\x23\x6a\x9d\xe6\x03\xc1\xe4\xf5\xe1\x11\x64\x22\x47\x40\x62\x7d" + "\x10\xf6\x00\x8e\xb7\x3e\xc2\x64\x23\x21\xbf\x0b\x82\xd5\x79", + "\xe4\xcd\x8b\x88\x68\xbb\x07\x8e\xd5\xd6\x93\x8e\x40\xd9\xff\x4b\xf6" + "\x1a\x49\x94\xbe\x40\xa5\xf2\xb5\x44\x64\x63\xe5\xdb\x90\x51\x6b\xcc" + "\xdd\x19\xf1\x6c\x92\xe3\xf8\x39\xb9\xd6\xde\x68\xb2\xa9", + 1, 256, NULL, 0 }, + { 384, 384, 21, + "\x2f\x98\xba\x2c\xea\xad\xc5\xba\x08\x88\x0a\x35\xcb\x00\x80\xdc\x87" + "\x0a\x57\x34\xa7\x82\xeb\xe3\x1c\x4b\xab\x10\x0f\xf8\x78\x6d\xcc\x3b" + "\xe6\xde\x18\x48\x2e\xa5\xd1\xb3\xbf\x14\xae\xab\xb4\x70", + "\x2d\x74\xa6\x6d\xac\xf1\x2e\xdb\x85\xef\x30\x73\xfe\xaf\xd1\x22\x88" + "\x9c\xb6\x34\xad\xd0\x0f\xf0\x39\x5d\x22\x4b\x4f\xf8\xb5\xd5\xd6\x7c" + "\xa6\x41\x9b\x68\x26\xab\xff\xdb\x41\xba\xb4\x27\xd5", + "\xa8\xea\x72\x10\x08\x59\xf4\xb7\xb6\xf2\xfe\x59\x62\x48\xf1\x72\x9b" + "\xcd\xf0\x60\x6c\x90\x0a\xb5\x2e\x51\xea\xb5\x48\xd2\x6e\x1e\xb6\x34" + "\xa4\x2e\x5f\xc7\xcc\xc1\x83\x56\xc0\xd2\x83\x59\x7e\xe2", + 1, 376, NULL, 0 }, + { 384, 384, 22, + "\x5e\x5f\x60\xe4\x0d\x84\xc7\xca\x26\x08\xaf\x3b\xcc\x6e\x04\xab\xc5" + "\xf8\xb7\xca\x73\x0a\x78\xaf\x7f\x6f\x03\x2e\x5a\x15\x01\x69\x5b\xd9" + "\x1f\x3b\xeb\xb2\x85\x90\xaf\x1d\xb9\x0d\x83\x90\xca\x58", + "\x2e\xfe\x6a\x14\xea\x8d\x67\x9e\x62\xdb\xce\xdf\x35\xe6\x18\x52\x27" + "\x8c\x83\xc5\x4a\xdb\xe1\xf1\xc7\x2c\xb1\xa7\x46\xb1\x1c\xff\x8c\xb4" + "\xfc\x3a\x2c\x3a\xcd\x44\x25\x5d\x51\xc0\x20\xca\x6d\x47", + "\x6e\x8c\x95\xa4\x09\x7e\xa1\x3d\x06\x4e\xd1\x08\x09\xa3\x3b\x56\x9a" + "\x6a\x84\x20\x51\x58\xbd\x69\x2f\xf8\x2b\xc4\xb7\x0b\x47\xa6\x0e\xd3" + "\x32\xf2\xf5\xbc\xa5\x21\x1a\x1c\xc8\x9c\x06\xf9\xc5\x95", + 1, 384, NULL, 0 }, + { 384, 384, 23, + "\xbc\x31\x0b\xc3\x91\x3d\x9f\xe5\x9e\x20\x12\xa0\x58\xc9\xe1\x50\x53" + "\x4d\x25\x61\x1e\x36\x20\x6c\xf0\x7c\xca\xef\xe1\x53\xf3\x8e\xb0\xea" + "\xad\x99\x41\xb6\x88\x3d\xfb\xce\x01\xbc\xb5\x19\x60\x41", + "\x9f\x07\x47\xd7\x39\x6b\xfb\xe0\x1c\xf3\xe8\x53\x61\xe5\x00\x85\xe0" + "\xa9\x1a\x74\x90\xb9\x94\x03\x1d\x81\x85\x1b\x72\x50\x65\x99\x3f\x45" + "\xda\xd0\xd6\x0d\x79\x4a\xed\xec\x7b\xa5\xd9\xd6\xdb\xbe\xe4", + "\x3a\x86\x49\x8f\x78\xc3\xfb\x7e\xb3\xb7\xb3\xd8\x2f\x67\x7d\x2d\xfe" + "\x01\x16\x6f\xe7\x6e\x23\x20\x83\x33\x4d\x74\xf1\x15\x88\xfd\x08\x96" + "\x37\xc9\x47\x61\xe9\xcf\xe8\x36\x43\x60\x05\xde\xae\xf7", + 1, 392, NULL, 0 }, + { 384, 384, 24, + "\xdc\x77\x0c\x64\xd0\x0d\x15\x6e\x43\xcb\x74\x97\x0e\x3a\x1a\x2a\xd2" + "\x8b\x6d\x9e\xc6\xb2\xb6\xe5\xac\x3e\x35\x6a\x99\xf8\x79\xcb\x62\x0f" + "\x00\x34\x0c\x04\x4c\xc1\xf3\x1b\xdc\xcf\xa0\xdb\xd1\x77", + "\x40\x3f\xd8\xe3\xef\x51\xb6\x53\x9d\xb6\x58\xa8\x94\xbe\x85\xb5\x8f" + "\xbc\x84\x88\x1e\x61\xc5\xe0\xcb\x13\xae\x42\x1a\x09\xd3\x1d\x78\x06" + "\x03\x25\x6d\x39\x0e\xdd\x05\x6d\x19\x08\x56\xbe\x00\xad\x20\xa7\x04" + "\x8f\x0c\x67\x41\x6f\xe8\xe0\x28\x84\x08\x61\x55\xf4\x26\x32\x62\xe8" + "\xc1\x27\x55\x04\xd4\xf9\x1f\x27\x51\xd3\xc3\xdc\xcd\x44\x09\xff\x2b" + "\x45\xe4\x1d\xe9\x3f\x7b\x10\x4d\x58\xf6\xe1\x5b\xac\xb6\x2a\xce\x97" + "\x00\x61\x5e\xcc\x1b\x30\xa0\xcc\x1b\x35", + "\x1c\x4f\x64\x74\xf3\x9e\x6e\xab\xbe\x7a\x99\xfa\xa2\x34\xf4\x98\x33" + "\x44\x41\x30\xac\xf0\x1d\xae\x68\xd6\x82\x51\xa9\x30\x41\x99\x60\xb0" + "\xfb\x5f\x48\x36\x01\x49\xe0\x5d\x12\x09\x94\x1c\xc9\xec", + 1, 896, NULL, 0 }, + { 384, 384, 25, + "\xcc\xa9\x29\x9c\x7b\xdc\x26\xa4\xb5\x95\x05\x5c\x99\xca\x23\xbe\xc8" + "\xed\x11\xb5\xde\xed\xa9\x1f\x83\xe2\x36\x5e\x73\x40\x39\x5c\xee\xf4" + "\xe8\x6e\x5c\xd9\x1f\x25\x93\xbc\xfe\xc4\x98\xa6\x7f\xc9", + "\xa0\x5b\x40\xb8\xd3\xa7\xbc\x7b\x75\xb0\xe9\x73\x09\xc9\xbd\x1c\x9d" + "\x87\x55\xc1\xff\x52\x45\xef\x63\x08\xa6\xa5\xca\xd3\xec\xfb\xcb\x63" + "\x64\xb4\x1c\xa6\xf3\xd2\x4b\xbe\xe8\x44\xd6\x20\x4d\x10\x26\xab\xe3" + "\x45\xaf\x7b\xde\xc1\x14\xa3\x73\xb1\x09\xaa\x57\x24\xb7\x38\xd5\x0a" + "\xb7\xa8\x26\xc2\x68\xe8\x73\x70\x9f\x8b\x35\x13\x5a\x87\x00\x45\xd5" + "\xfb\x9d\xaa\x82\xd3\xc2\x45\xb5\x33\x89\x17\x35\x4e\x72\xb3\x05\x8c" + "\x9a\x4b\x80\x71\x17\x46\x52\x17\xd7\xd1\x4f\x36\xf8\xa8\xd4\xe9\x7b" + "\xc3\xb9\x35\x87\xc9\x26\x41\xe7", + "\x1b\x6b\x5b\xa8\x48\xbc\x13\xdd\x46\xc3\x51\x77\xae\x9f\xf9\xbd\x2d" + "\x6c\xa5\xf4\xc9\x37\x39\x64\xd3\x18\x24\x83\xd9\x80\xb4\x65\x45\x27" + "\xf3\x6d\x7c\xc5\x1b\x9e\x2e\xfe\x7e\xd9\x7a\x82\xe3\xbe", + 1, 1016, NULL, 0 }, + { 384, 384, 26, + "\xc7\x28\xe6\x5e\x08\xd9\x29\x6f\xe3\xcd\xf2\xde\xdb\x49\xc8\x1a\x30" + "\xb6\x03\xa6\x25\x69\xee\xce\x4e\xe5\xd0\x1e\x9a\x32\xae\x3b\xcb\x4e" + "\xc1\x63\xe4\x55\xe4\x52\x58\x24\x54\xce\xef\xef\xc0\x46", + "\xe6\xc6\xba\xc8\x7c\x17\xe2\x69\xa4\x71\x43\x4c\xa9\x56\x84\x01\x45" + "\x1d\x78\xc2\x44\x4a\x9d\x6e\xdc\xda\x3c\xda\xb5\x1c\x5b\xed\x1c\x19" + "\xea\xf3\x43\x26\x58\x0f\xd8\x5a\xe5\x23\x6a\xd5\x1b\xc5\xda\xe3\x86" + "\xb3\x61\x01\xf5\x46\x95\xc5\x95\xee\xed\xcd\xd0\x18\x2a\x4a\x11\x7f" + "\x80\x93\xf4\xf4\x81\x2e\x03\xdb\x39\x6e\xde\x98\x49\xd1\x93\xe7\x72" + "\x20\x81\xae\xec\x4b\xe6\xc4\xca\xf6\xc9\x79\xd3\x6e\xad\x56\x63\x4a" + "\x21\xbe\x21\x16\x2e\xa2\x32\xde\xc9\xcf\xfd\xbd\x24\x74\x24\x58\x78" + "\xdc\xa3\x69\xe8\x14\xfd\x02\x83\x03", + "\x53\x39\x20\xa0\x13\xcf\x00\x6a\xa2\x9b\x26\xf7\x4b\x6d\xd2\x93\x63" + "\x42\x93\x08\x99\x86\xaa\x24\x92\x71\xc4\x26\xb9\x42\xdc\x6b\xae\x32" + "\xb2\x64\x16\x16\x67\x2f\x3d\x75\x96\x88\x66\xe1\x82\xe5", + 1, 1024, NULL, 0 }, + { 384, 384, 27, + "\x90\xc4\x21\x5d\xc3\xf2\x37\x43\x50\x47\xfe\xfd\xd8\x63\x8d\x33\x9a" + "\x3f\xc6\x6f\xca\x06\xc5\x06\x3e\xac\xbd\xa0\x02\xab\x33\x5e\x62\x16" + "\x05\xf6\x72\xf3\xda\x9f\x64\x1f\xae\x11\x0a\xfc\x3e\x7b", + "\x1e\xbc\x22\xc3\x03\x1b\x64\x61\x5e\xb6\xf1\xa0\x69\x6e\x33\xb7\xdf" + "\x13\x9a\x4b\x89\x1d\x3e\x67\x21\xcc\x26\xc0\x5d\x55\xde\x79\x0d\xca" + "\x62\x36\x68\xc1\x03\x08\x48\x5d\x38\xe9\x5e\xc4\x76\x9f\xa4\x43\x0c" + "\xa3\xeb\xc2\x5d\xa9\xf5\xd3\x1c\x97\x26\x74\x51\x7d\x9a\x22\x22\xe6" + "\xb9\x7d\x8d\xef\x65\x12\xaf\x09\x6c\x6d\x14\x80\xd8\x3a\x22\x9c\x84" + "\xb7\xf2\x8c\x80\x18\x4b\x6b\xeb\xf3\xf4\xef\xf5\xfc\x4e\x5c\x6c\xfe" + "\xa4\xf8\xeb\xa9\xa9\x57\xf7\x91\x3b\x20\xa8\x8a\xd1\x73\x4f\x7c\x38" + "\x54\x7e\x93\x4d\x1d\xbf\x2d\x73\xdb\xd6\x1e\x31\xfb\x15\x83\xc7\xb6" + "\x57\x7a\x17\x1e\x7d\x02\xf1\x90\x45\x12\x6a\xc2\x97\x3d\x85\x5b\xc1" + "\x8d\x34\xd3\x23\x26\xd1\xe2\x16\xda\x58\x36\x6a\x60\x03\x34\x50\x09" + "\x11\x28\xae\x26\xa4\x79\x06\x9b\xba\x7b\x91\xb2\xab\x7f\x3c\x5f\xbc" + "\xde\x39\x1d\xe3\xca\x11\x4b\x95\x1d\x68\x52\xf9\x27\x95\xf8\x02\x3d" + "\x7a\x29\xa7\xf4\xce\x61\xe9\x24\x1b\x4f\x23\x5d\x21\xe8\x99\x08\x71" + "\x67\xab\x3f\x3a\x0e\x93\x21\xc7\x94\x2b\x16\x51\x78\x78\x8d\xf4\x8d" + "\x3b\x10\x6b\x20\x3e\xc1\xe0\x1d\x29\xbd\xa4\x1a\x99\xac\x0d\x2c" + "\x00", + "\xc5\x2b\x91\xda\xed\x6e\xe4\x64\x16\xf2\xdb\x78\x97\x82\x51\xcb\x33" + "\x4e\x5d\x8e\x00\xb3\x2a\xe0\x6e\x36\x5f\x45\x5d\x28\xde\x40\x6a\x9c" + "\xce\x2f\x9f\x29\x37\x8f\x22\x98\x22\xdb\xf2\x6b\xfd\xad", + 1, 2040, NULL, 0 }, + { 384, 384, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x45\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa8\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x46\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xab\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xc4\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x29\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbf\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4c\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\x44\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\x7f\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x14\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x91\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x17\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x92\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\x2b\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\x60\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7b\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x65\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\xfa\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\xe4\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x42\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\x90\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa5\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x37\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x89\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd6\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x8a\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd5\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x08\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\x57\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x67", + 0, 0, NULL, 0 }, + { 384, 384, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x17", + 0, 128, NULL, 0 }, + { 384, 384, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x64", + 0, 0, NULL, 0 }, + { 384, 384, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x14", + 0, 128, NULL, 0 }, + { 384, 384, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x26", + 0, 0, NULL, 0 }, + { 384, 384, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x56", + 0, 128, NULL, 0 }, + { 384, 384, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\xe6", + 0, 0, NULL, 0 }, + { 384, 384, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x96", + 0, 128, NULL, 0 }, + { 384, 384, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x45\xbe\x81\xc4\x15\xd2\x83\xab\x7b\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa8\x4d\x07\xff\x90\xb3\x38\xe0\x65\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 68, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\x44\x15\xd2\x83\x2b\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 69, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\x7f\x90\xb3\x38\x60\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 70, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\x2b\x7a\x62\xa4\x51\x88\xe5\xda\x7b\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c\x92\xc1\xfc\x36\xf1\x98\xc0\xb3\xa7\x14" + "\x92\x18\x48\xd5\xe0\x3d\xf1\xc4\x84\x9b\xb8\x31\x0c\x66", + 0, 0, NULL, 0 }, + { 384, 384, 71, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\x60\x64\xb0\x36\x03\xd7\x6b\xcf\x82\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15\xdd\xe7\x66\x74\x89\x64\x00\xf9\x7b\x84" + "\x08\xbf\xef\xa6\xee\x86\xc7\x16\xbf\xa4\xa4\x60\xd2\x16", + 0, 128, NULL, 0 }, + { 384, 384, 72, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xbb\x41\x7e\x3b\xea\x2d\x7c\x54\x85\x9d\x5b\xae\x77\x1a\x25\x04\x34" + "\x68\x25\x9f\x94\x2a\x4e\x93\x6d\x3e\x03\xc9\x0e\x67\x3f\x4c\x58\xeb" + "\x6d\xe7\xb7\x2a\x1f\xc2\x0e\x3b\x7b\x64\x47\xce\xf3\x99", + 0, 0, NULL, 0 }, + { 384, 384, 73, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x56\xb2\xf8\x00\x6f\x4c\xc7\x1f\x9b\x4f\xc9\xfc\x28\x94\x30\xfd\xeb" + "\x4e\x04\x77\x39\x94\x6b\xea\x22\x18\x99\x8b\x76\x9b\xff\x06\x84\x7b" + "\xf7\x40\x10\x59\x11\x79\x38\xe9\x40\x5b\x5b\x9f\x2d\xe9", + 0, 128, NULL, 0 }, + { 384, 384, 74, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 0, NULL, 0 }, + { 384, 384, 75, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 128, NULL, 0 }, + { 384, 384, 76, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 0, NULL, 0 }, + { 384, 384, 77, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 128, NULL, 0 }, + { 384, 384, 78, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xc4\x3e\x01\x44\x95\x52\x03\x2b\xfa\xe2\x24\xd1\x08\x65\x5a\x7b\x4b" + "\x17\x5a\xe0\xeb\x55\x31\xec\x12\x41\x7c\xb6\x71\x18\x40\x33\x27\x94" + "\x12\x98\xc8\x55\x60\xbd\x71\x44\x04\x1b\x38\xb1\x8c\xe6", + 0, 0, NULL, 0 }, + { 384, 384, 79, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x29\xcd\x87\x7f\x10\x33\xb8\x60\xe4\x30\xb6\x83\x57\xeb\x4f\x82\x94" + "\x31\x7b\x08\x46\xeb\x14\x95\x5d\x67\xe6\xf4\x09\xe4\x80\x79\xfb\x04" + "\x88\x3f\x6f\x26\x6e\x06\x47\x96\x3f\x24\x24\xe0\x52\x96", + 0, 128, NULL, 0 }, + { 384, 384, 80, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x45\xbf\x80\xc5\x14\xd3\x82\xaa\x7b\x63\xa5\x50\x89\xe4\xdb\xfa\xca" + "\x96\xdb\x61\x6a\xd4\xb0\x6d\x93\xc0\xfd\x37\xf0\x99\xc1\xb2\xa6\x15" + "\x93\x19\x49\xd4\xe1\x3c\xf0\xc5\x85\x9a\xb9\x30\x0d\x67", + 0, 0, NULL, 0 }, + { 384, 384, 81, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa8\x4c\x06\xfe\x91\xb2\x39\xe1\x65\xb1\x37\x02\xd6\x6a\xce\x03\x15" + "\xb0\xfa\x89\xc7\x6a\x95\x14\xdc\xe6\x67\x75\x88\x65\x01\xf8\x7a\x85" + "\x09\xbe\xee\xa7\xef\x87\xc6\x17\xbe\xa5\xa5\x61\xd3\x17", + 0, 128, NULL, 0 }, + { 384, 192, 82, + "\x1c\x67\x82\x67\xbe\x13\xac\xb4\x64\x93\x9c\x28\x96\xc9\xe9\xce\x1d" + "\xeb\x5b\x30\x83\x3b\xdd\x9c\xa0\x03\x70\x88\x9b\x84\x41\x07\x82\xad" + "\x52\xaf\xe2\x5d\xc1\x0a\xb7\xec\x5c\xf5\xf3\x47\x93\xb7", + "", + "\x6d\xd5\x66\xbe\x67\x8c\x1e\x63\x59\xab\x31\xb6\x35\xcc\x16\x01\x60" + "\xa0\xc5\xa9\xc4\x9a\x0a\xc5", + 1, 0, NULL, 0 }, + { 384, 192, 83, + "\x00\xb1\x84\xc2\xc0\xa4\x91\xd7\x64\xa2\x6f\x8b\x2e\x56\xa9\x65\x22" + "\x2b\x36\x21\x3b\xdd\x10\x6a\xe7\x82\x30\x5c\x50\xf8\x92\x69\x90\x24" + "\x76\xe5\xdf\x3f\xa5\x8e\x0e\xcf\xae\x82\xa9\x60\x7c\x8e", + "\x9f", + "\x5a\xff\xf4\xb0\x09\xca\x9c\x9e\x5d\xcd\x84\xf0\x56\x07\xe7\xa7\xd4" + "\x3e\xe4\x3b\x42\x49\x89\x89", + 1, 8, NULL, 0 }, + { 384, 192, 84, + "\x05\x5b\x67\xed\xb6\x59\xe2\x9c\x10\xe3\xe9\xcd\x25\xaa\x1c\xd5\xab" + "\xf0\x88\x0e\x20\x26\xed\x84\x36\xe3\x9b\x06\x4b\x73\x15\x76\x0c\xd7" + "\xa9\x29\x4e\xe2\x3d\x47\x50\x96\x9c\xc8\xb5\xdb\xae\xd7", + "\x40\x47", + "\x4d\x08\xba\xef\x96\x9e\xed\x23\xb8\x14\x47\x2a\xcf\xf0\x8d\x08\xfd" + "\x34\x91\xa7\x28\x77\x8a\x1c", + 1, 16, NULL, 0 }, + { 384, 192, 85, + "\x9e\x3c\x19\x8e\x93\x93\x0f\x07\x6b\x03\x5c\x5f\xa8\xf1\x0d\x9a\x65" + "\xe9\x8c\x66\xcf\xb3\x66\x33\xe3\xcb\x33\x27\x9c\xdf\x57\x68\x8f\x10" + "\xb7\x47\x2d\x1f\xc9\xd9\x62\xce\x69\x54\x51\x9b\xfb\xf6", + "\x88\xcf\xab", + "\x1c\xde\x37\x65\xba\x5a\x15\xb1\xd0\x18\x21\x36\xa7\x2c\x60\x3a\xcd" + "\x3b\x90\x4c\xea\xc8\xf7\xad", + 1, 24, NULL, 0 }, + { 384, 192, 86, + "\xf5\xf5\x96\x2b\xda\x25\x7b\x38\xb2\xa2\x31\x89\x29\x12\x1b\x2e\xae" + "\xf7\x92\xd5\xc6\xa9\x58\x5e\x48\xb8\x0c\xf5\x35\x7b\x29\xc3\x95\x1b" + "\x78\x7e\xd3\xe0\x3e\x38\x5b\x05\xb8\xff\xe6\x86\x1d\xc3", + "\xd9\x39\x77\x53", + "\x46\x38\xe4\x42\x7e\x60\x84\xb7\x6c\x53\xed\x9d\x6e\x91\x61\x62\xfc" + "\xb8\xb9\x62\xc3\xd6\x16\xf1", + 1, 32, NULL, 0 }, + { 384, 192, 87, + "\xf6\x28\x20\xed\x5f\x98\x33\xfd\x22\xde\xe7\xbd\x49\xe2\xc9\xb1\x9f" + "\xc9\x66\x88\x97\xc2\xc3\x3e\x6c\x7c\x1f\xa5\xc2\x77\xc3\xb9\xf5\x81" + "\xfa\xef\x3d\xdc\x66\x4b\xa5\x37\x97\x5d\x8a\xfa\xa7\x07", + "\x9b\x6c\xc7\xca\xa4", + "\xf6\xe2\x72\xa7\xa6\x23\x5f\x60\xb7\x2b\x4c\x74\x24\xcf\x32\xa0\x7f" + "\x98\xea\x59\x26\x65\xba\xd8", + 1, 40, NULL, 0 }, + { 384, 192, 88, + "\xf2\x22\xa1\xda\xbf\x32\x2a\xff\x84\x63\xac\xee\x64\x44\x93\x93\x31" + "\x21\x2b\xe3\xe1\x9d\x31\xf4\xb7\x3f\xdc\xc9\x7e\x29\x25\x36\x5e\xa3" + "\x3c\x98\x52\x82\x80\x5c\x83\xdc\xd8\xfb\x42\xa0\xe2\x14", + "\xc8\x5a\xd7\x87\x2b\x76", + "\x93\x3f\x0f\xa6\x1d\x44\x66\xb5\xba\xf5\xa6\x01\xf6\xb9\x6d\x81\xa9" + "\x7e\x81\xc5\x12\xd8\x22\xe6", + 1, 48, NULL, 0 }, + { 384, 192, 89, + "\x56\xe8\x0f\x38\x99\xe9\x45\x31\x0a\x9d\x9b\xef\x3d\x32\x09\x1f\x29" + "\xc1\x57\xdd\x46\xb2\xd4\x39\xad\x89\xd6\x3e\x14\xb2\xc2\x43\x90\xf7" + "\x4d\xb4\xd9\x05\xf6\xbd\x03\xf7\x5c\x32\xe9\x12\x25\xfe", + "\x80\xba\x25\xf1\xc2\x76\x50", + "\xa1\xa6\xe2\x48\xb4\x08\x64\xdd\xf8\x3b\x00\xc5\x2a\xe2\xc3\x03\xb7" + "\xe7\x6f\xba\x05\x48\xd4\xd4", + 1, 56, NULL, 0 }, + { 384, 192, 90, + "\x6c\xb6\x26\x1a\x56\xa2\x1b\x2c\x3c\x13\x45\x3c\x15\x83\x64\xaa\xfa" + "\x78\xf5\x81\x72\xa9\xae\x3e\xeb\x32\x8a\xc3\x88\x08\xb5\xc6\x8c\x11" + "\x11\x97\xa3\x03\xec\x36\x84\x7c\x9a\x31\x5a\xc5\xeb\x5b", + "\x79\x43\x0d\xe5\x1d\x68\xcf\x34", + "\x33\x59\x3a\x80\xda\x45\x5e\x58\x0c\xcc\x5e\xe9\xb6\x0e\xdc\xd1\x46" + "\x84\x60\x53\x97\x88\xfc\x41", + 1, 64, NULL, 0 }, + { 384, 192, 91, + "\x44\xca\x1e\xcb\x49\x04\x70\xa8\x4c\x7e\x13\xe1\xf1\xc6\x9d\xa2\x1f" + "\x48\xc3\x3b\x6f\x05\x0f\x48\xf7\xf2\x44\xf0\xfd\xa8\xb3\xc8\x55\x90" + "\x4e\xd0\x61\x2e\x2d\xaf\xa5\x10\x5c\xbd\x7f\x64\x49\xeb", + "\x87\x0b\x98\x1c\x8a\xfd\x9f\xae\x1b", + "\x93\x0f\x2e\x40\x1e\x3a\xaf\xb4\x6a\x0c\x40\x29\x00\x2f\x4e\xf1\xab" + "\x9f\xe8\x38\xbc\x00\xc7\x9e", + 1, 72, NULL, 0 }, + { 384, 192, 92, + "\xaa\xca\x68\x88\x2c\xfa\x72\x50\x98\x8a\x24\x7b\x96\xcf\xb3\x23\x2d" + "\x65\x67\x37\x8f\x8f\xa7\xe7\xaa\xac\xa1\xc3\x86\xe1\xae\x15\xe5\x49" + "\x57\xd2\x2b\xff\xf1\xe5\x0a\xe7\xf2\x1b\xee\xa1\x97\xa5", + "\xa6\xf3\x1b\x82\x2e\xc2\x4d\xa1\xb1\xe9", + "\xa9\xc2\xd6\x8f\x0a\xd1\xba\x50\x08\x9b\x16\x9c\x86\xd9\x65\xf9\x7f" + "\x52\x38\x8a\x48\xac\xe7\x44", + 1, 80, NULL, 0 }, + { 384, 192, 93, + "\x1b\x32\xf9\xb6\x37\x89\x34\xa5\x02\xdd\x74\xd8\xb7\x4a\x46\x06\xd5" + "\xb2\xc9\xa8\x58\x7f\xab\x1c\xfa\x90\xd7\x50\x07\x73\x4d\x2b\x8b\xdf" + "\xe6\x34\x81\x52\x43\x52\x6e\xbc\x0f\x33\xc0\x4d\x0d\x05", + "\x55\x36\x7c\x65\x7c\x79\x26\x10\xef\xdc\xc0", + "\x93\x40\x83\xc8\x59\x45\x91\xda\x78\x3f\x0d\xa2\x8f\x4b\x58\xad\xb6" + "\x04\xe9\xcc\x76\xb9\x9e\xfe", + 1, 88, NULL, 0 }, + { 384, 192, 94, + "\x09\xd9\x1b\x2f\xa2\x2e\x68\xb5\x33\x5d\x47\x82\x35\xaa\x4e\x15\x74" + "\x35\xc9\xac\xfe\xd7\x72\x21\x9a\xdf\xa1\xe9\xdd\x72\xf3\x3e\x1a\x21" + "\x83\xa0\x20\x3a\x10\x4f\x80\xe6\x43\xcd\xf2\x9e\x5a\xff", + "\xb3\x1e\x25\x49\x57\xdb\x6b\x1b\x70\xa0\x6c\xe2", + "\x7d\x45\xf3\x89\x94\x55\x78\x7e\x71\x16\xb5\x70\xdf\x8f\x77\x87\xf6" + "\x72\xd5\x82\x1d\x6f\x75\xfe", + 1, 96, NULL, 0 }, + { 384, 192, 95, + "\xd3\x11\xa8\x0a\xc8\x01\xe3\x63\x9b\x91\x85\x60\x8a\xf4\xa8\x5e\x41" + "\x22\xe2\x9b\x5c\x23\xf0\x52\x34\xc3\x0d\x92\xd5\x9a\xd1\x3c\xb8\x03" + "\x90\xe5\xfa\x0e\xa4\xa5\x48\x53\x22\x8b\x35\x66\x89\xf5", + "\xe6\xb4\x43\xdb\xa0\xda\xb3\x5d\x43\xca\x5d\x6c\xe6", + "\x27\x29\x70\x96\xf5\x8f\x59\x83\x91\xc5\x77\x78\x12\x99\x49\xb9\x46" + "\x28\xbf\x17\xbb\x24\x22\xd1", + 1, 104, NULL, 0 }, + { 384, 192, 96, + "\x06\x29\x7e\x6c\x46\x55\x8b\x9b\x0f\xc3\x6c\x27\x2b\x4a\xe7\xe6\x5d" + "\xd5\x36\xcc\x1d\x13\xac\xbf\xa8\x31\xfa\x55\x74\xb3\x4f\x99\xe0\x9a" + "\xdf\xb7\xf2\x03\x21\xf2\x03\x07\x5f\xd2\x6e\xd2\xe2\x9d", + "\x30\x9b\x95\xe5\xf1\xec\x26\xf7\x07\x86\xe7\x4d\x80\x6d", + "\xaa\xbf\xf2\x6f\xc4\x4a\x40\xf0\xb8\x7a\x40\xc1\x75\xc1\x7e\xa7\x14" + "\x0f\x84\x67\xdc\xdb\x95\xcd", + 1, 112, NULL, 0 }, + { 384, 192, 97, + "\xe8\xb6\x3a\x25\xcd\x85\xad\x4f\x39\xe3\xc0\xe9\x58\x4e\xac\xb9\x4d" + "\x6a\xe3\x3f\x98\x4d\xa2\x59\xaa\x53\x3d\x4d\x28\xae\xb3\x41\xcf\x3f" + "\xfe\x49\xc0\x29\xe4\xaf\x6a\x48\x05\xf7\x60\xf3\x5f\x2c", + "\xd2\x25\xc2\x77\x95\xf8\x09\x45\x4b\xb2\xc5\x1d\x21\xf3\xac", + "\x0e\x12\xb7\x58\x01\x5a\xc8\x97\x97\xd5\x54\x70\xf3\x98\x2c\x13\xa5" + "\xff\x14\x83\x27\x60\x83\xd2", + 1, 120, NULL, 0 }, + { 384, 192, 98, + "\xd8\x3a\x68\x5a\xce\x9f\xa0\xc0\xaa\x47\xf0\xc7\xb4\xf0\xf0\x07\x17" + "\x61\x9a\x82\xe2\xee\xff\x87\xf5\x1f\x67\xd8\x14\xd5\x1d\xd9\xe4\xca" + "\xd7\x57\x8a\x4e\x49\xb6\x72\xb5\xaf\x83\x94\x3c\x25\x83", + "\xab\xfa\x7f\x59\x78\xf7\x51\xe8\x7e\x8b\x5a\x15\xa6\xe8\x9f\x4f", + "\xe4\xe6\xba\x04\x1b\xbb\x7a\x47\xec\x84\x82\xb2\x04\x34\x55\xc1\x19" + "\xfb\xdb\x38\x9a\x39\x45\xa0", + 1, 128, NULL, 0 }, + { 384, 192, 99, + "\x5b\xea\xf4\x06\xa6\x62\x7e\xaa\xfc\xad\xb6\xde\xa4\xe2\x7b\xa4\xfd" + "\x87\x9f\xd3\xe5\xbf\xd8\x7e\xa3\xc8\xd5\xe0\xac\xfb\xbd\xa2\xc6\xbf" + "\x00\x6b\xea\xf5\xa3\x03\x12\xe6\x90\x72\x4c\x47\x44\xa3", + "\xbc\x57\xd4\x67\xa9\xa2\xaf\x64\xad\x5e\x14\xb7\xbc\x08\x98\xdc" + "\x63", + "\x3f\xab\x1a\x7a\x19\x23\x59\xb6\x33\x3a\x96\x99\xb7\x56\x12\x21\x1a" + "\x38\xb6\xdc\xca\xb4\x57\x2d", + 1, 136, NULL, 0 }, + { 384, 192, 100, + "\x76\xb3\x6c\xc3\xb8\xca\x97\x57\x08\xee\x4b\x32\xbd\xbe\x40\xca\x13" + "\xf9\xce\x38\x4c\x52\xc4\xb6\x60\x2b\x7f\xd9\x21\x64\xf1\xfd\x84\x32" + "\x70\x6c\x19\x66\xf6\x48\xbf\x48\x30\xf4\xde\xb3\x47\x95", + "\xb1\xd0\x22\xc6\x53\x6f\x40\x1d\x14\x7d\xfc\x0d\x7d\x4e\x60\x0b\xb7" + "\x53\xef\x0e\x9f\x24\x3b\xc3", + "\xc9\x1e\xb3\xf3\x62\x04\x9c\x53\x36\xc5\x07\x4c\xb8\x87\xed\xcb\x27" + "\xaa\xc1\xef\x65\x75\xa9\x2d", + 1, 192, NULL, 0 }, + { 384, 192, 101, + "\x20\x56\x9a\x16\xf4\x53\xdd\x3c\x34\xdf\x98\x15\x52\x86\xb1\xca\x8a" + "\x39\x2e\xa1\x64\xc9\x19\x31\x1f\x0d\xf9\xd3\x9d\x97\x60\x62\xf4\xf9" + "\x92\xb9\x6d\xef\x38\x51\x88\x6e\x62\x95\xf2\x61\x50\x64", + "\x54\x02\xc4\xe6\x83\xd1\xa4\x31\x86\x8a\xd5\x28\xaf\xbf\x41\x28\xb0" + "\xb1\x0c\xef\x94\x7d\x06\x3b\x34\xd3\x76\xd3\x44\xb7\x93\xb2", + "\x27\x72\x80\x59\x69\x6a\xed\x5b\xb0\x0a\x13\xc1\xdb\x10\x06\x91\xd4" + "\xa2\x1e\xbe\xa0\xa8\xe4\xc3", + 1, 256, NULL, 0 }, + { 384, 192, 102, + "\x9e\xf6\xa5\x5f\x8a\x9b\x6b\x9e\xf1\xf8\x29\x61\x67\x31\x90\x78\x16" + "\x37\x06\xae\x5b\x60\x89\x7c\x2d\xd6\xe3\x40\xb6\x7e\xd5\xd5\x77\xfb" + "\x54\xc5\x54\x7c\xd5\xf2\x48\xf0\x6e\x70\x82\xff\xb8\x26", + "\x6a\x0d\x16\x27\x69\x41\xd8\xf0\x4e\xac\x2e\xc7\x23\xfa\x53\xb9\xd6" + "\xb1\x6d\xa7\xe3\x0e\x7f\x2d\x9a\xd8\x98\xe7\xcb\xb7\x1b\xd3\xdd\x23" + "\x4e\xe2\x28\x36\xff\x4a\xc6\x01\x1b\x6f\x12\xbd\x3a", + "\xce\xf5\xd9\x00\xee\xf0\xab\xef\xc6\x25\xc1\xd2\x86\x2a\x3f\x42\x99" + "\x8c\xe8\xb1\xe0\x07\xd2\xb8", + 1, 376, NULL, 0 }, + { 384, 192, 103, + "\xfb\x56\xbb\xbc\x6d\x75\x1b\x74\x4d\x8c\x1b\x57\xcc\x27\xa1\xd2\xc2" + "\xf4\xe3\x8e\x34\x91\xf5\x44\x48\xcf\xcf\xb9\x38\x9b\x7f\x63\xfd\x0d" + "\x41\x92\x09\x68\xef\x61\x25\x10\x62\x5f\x26\x37\xd2\x8d", + "\xcf\x17\x91\x51\x7e\xf5\xa6\x1c\x0d\xb6\x5a\x66\x8b\xee\x26\xfd\xbc" + "\x97\x5d\x79\x9b\x26\x23\xcc\x0f\x3e\x45\x60\xe8\x0c\x70\x14\xfa\x9c" + "\x02\xd5\x68\xc9\x8c\x86\x38\x5e\x00\x0f\xe6\x77\x6b\xb7", + "\x88\xe9\x9a\xcc\xc9\xc2\x3c\x9c\x8c\x11\x10\xe7\x47\x0c\xad\xe0\x31" + "\x78\x17\x91\x6d\x85\x05\xf5", + 1, 384, NULL, 0 }, + { 384, 192, 104, + "\xd0\x41\xe2\x4e\x59\xb3\x4d\x7a\x18\x12\x8a\x42\xd8\xa7\xa5\x2d\xcb" + "\xa5\xd7\x9e\x5e\xd5\x85\xb5\x5c\x7c\x9e\x49\x46\xe5\xcc\xaf\x7e\x59" + "\xdf\x0f\x3d\xa9\x8c\x7d\x05\x23\xe4\xcc\x8f\x9d\x7d\xa4", + "\x52\x79\x61\x8f\x1b\x41\x53\x49\x10\x39\x5a\x78\xde\xd9\x68\xae\xe3" + "\x43\x10\x85\xb5\x99\xc4\xf5\x5e\xb5\xff\x8a\x2e\x87\x9b\xc4\x42\x91" + "\xd9\x23\xde\x31\x00\x9d\xb1\xb9\xf7\xf8\x10\x95\xaf\xb3\xea", + "\x85\x00\xf6\x03\xce\x85\xc0\x30\xcf\xa0\x57\x31\x75\x8b\x6b\xe3\x31" + "\x7b\x6f\xe8\xe9\x9b\x7d\x48", + 1, 392, NULL, 0 }, + { 384, 192, 105, + "\xe1\xce\x48\x84\xfd\x74\xa0\xe1\x97\xc6\x8a\xce\x3b\x29\xb5\x52\x31" + "\x3a\xf8\xe4\x51\xe9\x8d\x9a\xb8\xd0\xe8\xf8\xee\x74\x14\x3e\x8f\xcb" + "\x64\x46\x21\x7c\x0f\x31\x23\xa4\x26\xb8\xab\x6f\x62\xcb", + "\x71\x15\x4b\x9a\x65\x7b\x90\x5f\x88\x4b\xa5\x14\x0d\x5e\x7b\x92\x43" + "\xfe\xc3\xe0\x3f\xbb\xdb\xb3\x60\xc8\x19\x49\x63\xae\x43\x17\x7b\x55" + "\x02\xcd\x20\xf5\x59\xee\xef\xf8\x63\x8d\x02\x8c\x50\x19\x26\xeb\xc7" + "\xed\xdd\x13\x2c\xce\xa2\x9e\xad\x7a\xd0\xc9\x5a\x30\xb9\xd3\x25\x95" + "\x2c\xaf\xb0\xea\x5e\xc9\xd9\xd6\xfd\xeb\x63\x95\x0d\x5d\x69\xc8\xbb" + "\xbe\xa7\x02\xae\xd1\xd4\x44\xda\x28\x68\x07\xff\xd6\xb3\x6c\xb4\x99" + "\x02\xcb\xa7\xab\xf9\xbd\xa1\xb5\x77\xc6", + "\xc7\xe9\xae\x2a\x81\xde\x32\x28\x0b\x51\x8d\x05\x5c\x2c\x9d\x7f\x0f" + "\x5d\xb6\xd0\x6a\xd0\xe4\xae", + 1, 896, NULL, 0 }, + { 384, 192, 106, + "\x8a\x24\x2c\x22\xd1\xb5\x4c\xe2\x16\xca\x03\xc8\x84\x55\xbe\xb1\x28" + "\x21\x1a\x9f\x35\xaf\x23\x43\x70\x9a\xf7\xc5\xf4\x3a\x68\x14\x51\xea" + "\x53\xa3\x6d\xe2\xe5\x04\x8e\xb4\x4a\x51\x68\x1c\x61\x20", + "\xab\x5e\xee\x6b\x83\x86\x91\x19\xf0\x0d\xd3\xcc\x66\xdd\xe7\x5c\xb5" + "\x70\x05\x35\xa9\x0e\x9b\x3e\x32\xb3\x14\x34\xc2\x97\xef\x53\xf9\x46" + "\x59\xd7\xd9\xb1\x13\x23\x16\x1b\x2e\x66\xc6\xb9\xc9\xad\x20\xe3\x13" + "\x30\x3f\x81\xe8\x8e\x47\x17\x86\xc8\xe9\x36\x01\x1f\x78\x12\x1e\x39" + "\x63\x0b\x2e\x08\x04\xfc\x97\xce\x5c\xb3\xa3\x4f\x26\x94\x94\x39\xfe" + "\x53\x0a\xdc\xea\x6e\x97\xc7\x8b\x04\x2e\x08\x17\x25\x3b\xf7\x5d\xd5" + "\x43\x35\x58\x41\x22\xf5\xed\xd2\x10\x34\x1b\x6d\x93\xf5\x8a\xa1\xb4" + "\xde\x2a\xad\x76\xfe\xce\xc4\x4f", + "\x77\x39\x2b\x18\x57\x7b\xa8\x81\x9f\xbd\x76\xfc\x73\xd4\x50\x29\xe5" + "\x5e\x7e\xbe\xcd\x58\xa3\x20", + 1, 1016, NULL, 0 }, + { 384, 192, 107, + "\x31\x1c\x4b\xee\x7c\xf2\x57\xb7\x80\x13\x5a\x2e\x4a\x64\x13\xe6\x8a" + "\x81\x6f\x5d\x84\x62\x51\x5d\xcb\x1c\x72\x49\x4b\x63\x35\x58\x1a\x9b" + "\x60\xa2\x17\xb9\xff\x1c\x75\xe7\x76\x81\x48\xf8\xdf\x46", + "\x63\xcc\xc3\x84\x9c\x4c\x32\x3c\xb6\xce\x92\x68\x77\x96\x90\x48\xb8" + "\x49\xee\x4a\xf1\x8e\x71\xee\xf5\x2f\xe9\xf2\x74\xa8\x67\x85\x60\xf9" + "\xa5\xd4\x75\x10\xc3\xc9\x8c\x8a\x08\xed\x4c\x01\xa0\x1e\x0a\x36\x63" + "\xef\x0c\xc6\xc3\xcd\xca\x62\x76\xd9\x1e\x99\xb0\xd4\x14\x26\x34\x98" + "\xfb\x64\xad\x74\xb8\x20\xab\x52\xb3\x7a\xde\xaf\x27\xcb\x44\x54\x5e" + "\xdb\x8f\x09\x09\x49\x92\x83\x7b\x8d\x3a\x0b\xaa\x2a\x10\x1a\x49\x59" + "\x2e\xb8\x89\xdc\x8b\xac\xe4\xc7\x1e\x3e\xfc\xb9\xd4\x14\x9b\xd6\x70" + "\xce\x2f\x77\x4d\x73\xc1\x2f\x2a\x45", + "\x94\x67\x4a\xae\xfc\x06\xee\xad\x22\xd1\x53\x17\x90\x0f\xa2\x6c\x8d" + "\xf8\xcd\xfb\x25\x2b\xca\xae", + 1, 1024, NULL, 0 }, + { 384, 192, 108, + "\xfb\x79\x28\x67\xc8\x92\x8f\x05\x03\xaa\x24\x47\x7c\xeb\xf4\x2e\x0b" + "\x01\x83\x46\xe3\x61\x97\x70\xb9\xe8\xf5\x09\x79\x45\xe2\xe2\x75\xad" + "\x06\xf0\xc1\x21\x52\x36\x6a\xc0\x6e\x27\x8c\x94\x09\x0a", + "\x0a\x63\xe6\xd9\x1d\x7a\x6a\x18\xdb\xad\x87\x9f\xb8\xe2\x3a\xe3\x51" + "\x92\x03\x91\xeb\x40\xfe\xad\x6c\xba\x84\x67\x68\xa2\xc6\x79\x7f\xf3" + "\x47\xb4\x30\x13\x27\xb0\x9a\xfc\x41\xf7\xb8\x03\xaf\x6b\x61\xf6\xd9" + "\xb8\x18\xe0\xdd\xcc\x02\x53\x6d\x05\x43\xdb\xf1\xa8\x7f\x2c\x5e\x02" + "\x0f\x64\x59\x09\x43\x44\xb7\x25\x96\xd5\x48\x43\x5c\x31\x35\x44\xe9" + "\x2c\x25\x4d\x54\xa7\x0a\x1d\x6f\x6e\xdd\x2f\x82\x54\x0a\x1e\xa2\xe8" + "\x21\x25\xb0\x71\x5f\xa0\xf8\x90\xbb\x2b\xe4\xba\x00\x65\xd2\xba\x01" + "\x44\x85\x46\x82\xae\xd0\x41\xc1\x03\x59\x96\x64\x8e\x2e\xd6\x71\xb7" + "\x25\x3b\xa5\x67\xff\xb9\x99\xd9\x1f\xd8\xe7\xff\xce\x5c\x6d\xc4\x79" + "\x07\x32\xad\xae\x44\x34\x35\xa4\x54\xfe\x6c\x2a\x7c\x67\x08\xd9\xd5" + "\xb2\xeb\x92\x92\xd6\xfb\xe5\xe0\x26\xd6\x53\x32\xb3\x8c\x79\x25\xef" + "\xf9\xbe\xb8\x90\x63\xca\xb6\x3f\xbe\xcb\x2a\xc0\xe1\xbb\x61\xa5\xb1" + "\xe5\x11\xf9\x49\xc4\x3a\x34\xee\x26\xf1\x15\x6e\x97\x79\x3d\xa9\x7b" + "\xcf\x5b\x5c\x67\x64\x13\x84\xf2\x68\x13\x1b\x29\x78\x57\xd7\x19\xee" + "\xb6\xca\xfa\x3d\xbe\x9b\x8d\x0d\xa5\x5c\x98\x65\x6f\x20\xe5\xb3" + "\x9b", + "\x1a\xaa\xff\x96\x6c\x0a\x84\xba\xc7\x91\xab\x9e\x0b\x9b\x50\x5d\x39" + "\x30\x73\x66\x57\x32\xa7\x4a", + 1, 2040, NULL, 0 }, + { 384, 192, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x45\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa8\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x46\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xab\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xc4\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x29\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbf\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4c\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\x44\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\x7f\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x14\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x91\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x17\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x92\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\x2b\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\x60\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7b\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x65\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\xfa\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\xe4\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x42\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\x90\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa5\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x37\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x89\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd6\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x8a\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd5\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 137, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x08\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 138, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\x57\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 139, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6d", + 0, 0, NULL, 0 }, + { 384, 192, 140, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x14", + 0, 128, NULL, 0 }, + { 384, 192, 141, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6e", + 0, 0, NULL, 0 }, + { 384, 192, 142, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x17", + 0, 128, NULL, 0 }, + { 384, 192, 143, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x2c", + 0, 0, NULL, 0 }, + { 384, 192, 144, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x55", + 0, 128, NULL, 0 }, + { 384, 192, 145, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\xab\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\xec", + 0, 0, NULL, 0 }, + { 384, 192, 146, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\xe0\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x95", + 0, 128, NULL, 0 }, + { 384, 192, 147, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x45\xbe\x81\xc4\x15\xd2\x83\xab\x7b\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 148, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa8\x4d\x07\xff\x90\xb3\x38\xe0\x65\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 149, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\x44\x15\xd2\x83\x2b\x7a\x62\xa4\x51\x88\xe5\xda\xfb\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 150, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\x7f\x90\xb3\x38\x60\x64\xb0\x36\x03\xd7\x6b\xcf\x02\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 151, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x44\xbe\x81\xc4\x15\xd2\x83\x2b\x7a\x62\xa4\x51\x88\xe5\xda\x7b\xcb" + "\x97\xda\x60\x6b\xd5\xb1\x6c", + 0, 0, NULL, 0 }, + { 384, 192, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa9\x4d\x07\xff\x90\xb3\x38\x60\x64\xb0\x36\x03\xd7\x6b\xcf\x82\x14" + "\xb1\xfb\x88\xc6\x6b\x94\x15", + 0, 128, NULL, 0 }, + { 384, 192, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xbb\x41\x7e\x3b\xea\x2d\x7c\x54\x85\x9d\x5b\xae\x77\x1a\x25\x04\x34" + "\x68\x25\x9f\x94\x2a\x4e\x93", + 0, 0, NULL, 0 }, + { 384, 192, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x56\xb2\xf8\x00\x6f\x4c\xc7\x1f\x9b\x4f\xc9\xfc\x28\x94\x30\xfd\xeb" + "\x4e\x04\x77\x39\x94\x6b\xea", + 0, 128, NULL, 0 }, + { 384, 192, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00", + 0, 0, NULL, 0 }, + { 384, 192, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00", + 0, 128, NULL, 0 }, + { 384, 192, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff", + 0, 0, NULL, 0 }, + { 384, 192, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff", + 0, 128, NULL, 0 }, + { 384, 192, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\xc4\x3e\x01\x44\x95\x52\x03\x2b\xfa\xe2\x24\xd1\x08\x65\x5a\x7b\x4b" + "\x17\x5a\xe0\xeb\x55\x31\xec", + 0, 0, NULL, 0 }, + { 384, 192, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x29\xcd\x87\x7f\x10\x33\xb8\x60\xe4\x30\xb6\x83\x57\xeb\x4f\x82\x94" + "\x31\x7b\x08\x46\xeb\x14\x95", + 0, 128, NULL, 0 }, + { 384, 192, 161, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "", + "\x45\xbf\x80\xc5\x14\xd3\x82\xaa\x7b\x63\xa5\x50\x89\xe4\xdb\xfa\xca" + "\x96\xdb\x61\x6a\xd4\xb0\x6d", + 0, 0, NULL, 0 }, + { 384, 192, 162, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xa8\x4c\x06\xfe\x91\xb2\x39\xe1\x65\xb1\x37\x02\xd6\x6a\xce\x03\x15" + "\xb0\xfa\x89\xc7\x6a\x95\x14", + 0, 128, NULL, 0 }, + { 192, 384, 163, + "\x08\x47\x6e\x9d\x49\x49\x9c\x5f\x52\xe3\x7f\x80\xec\xe6\xf5\xa4\x54" + "\x59\x94\x88\x06\xb4\x82\x41", + "", + "\x1b\x6c\xfc\x87\x09\xaa\xb8\x07\x54\x65\xf3\x2e\x13\xb0\xb0\xf7\x96" + "\xcc\x34\xd9\x3d\x7b\xed\x09\x0f\x29\x7d\xcf\x9f\xb7\x5e\x0d\x8e\x28" + "\x5b\x15\x00\xb7\x32\xd5\x54\xac\x97\xba\x45\xf3\x3e\x47", + 1, 0, NULL, 0 }, + { 192, 384, 164, + "\x21\x3b\x44\xd8\xe1\xfa\xba\xff\x83\x7e\xf3\x0e\xe2\x54\x2f\x9a\xb8" + "\x2e\xd7\x04\x11\xda\xe7\x8f", + "\xee\x0b\xf4\x85\x85\xc1\x86\xff\x99\x1b\x4d\x86\x07\x81\x7c\x9c", + "\x54\xf4\x01\x0d\x50\xf8\x0b\xcd\xb4\xb8\x4d\x56\xbc\x4e\xf3\x0e\x4c" + "\x68\xf7\x51\x28\x21\x4c\xf4\x46\xb5\x14\x5f\x6f\xff\x13\x26\xa2\x09" + "\x94\x5f\xc2\x1a\xb5\xe1\xf5\xd9\x17\x55\x9e\xa9\xb8\x00", + 1, 128, NULL, 0 }, + { 192, 384, 165, + "\xb4\xaf\xa9\xda\xaa\x8c\x94\x4d\x73\xa3\x88\x1f\x32\x21\xe4\x2b\x34" + "\xef\x4e\x35\xf1\x84\xe8\x78", + "\xcf\x60\x7f\x6a\x0e\xb4\x4e\xcb\xca\x81\xb6\xd1\xfd\xb5\x95\xce\xe3" + "\x5f\x23\x53\xda\x02\xe8\x2e\x28\xe1\x33\xb9\xde\xcd\x8f\xbb", + "\xd0\x64\xa5\x1f\xb1\x09\xc3\xb1\xd4\x43\xf1\x3f\x41\xe9\x0e\x14\x19" + "\x8f\x84\x60\x80\x46\x45\x47\x80\x6d\x46\xa8\x15\x1c\x4e\x38\x55\xa8" + "\x1f\x4a\xf4\x09\x15\x60\x90\x95\xdd\x72\xf8\x69\xaa\x1b", + 1, 256, NULL, 0 }, + { 192, 192, 166, + "\x89\xe4\x6b\x66\x20\x95\x48\xc8\x0b\x0c\x83\x06\x62\x22\x3b\x49\xb0" + "\xe3\xb8\x95\xeb\x30\xe2\xfc", + "", + "\x4b\x01\x2c\x0c\x0d\xa4\x4e\xde\x2a\x42\x7e\x85\xac\xe8\xec\xc5\x4b" + "\x37\x9e\x9e\x24\xf0\x8d\x41", + 1, 0, NULL, 0 }, + { 192, 192, 167, + "\xf2\xc1\x0c\xe8\xcb\x1c\xf3\xb3\x63\x35\x44\x73\xb0\x27\xc1\xe5\x3d" + "\xec\xce\xf0\x32\x33\xbe\x0c", + "\xe1\xfa\x10\xb8\xe3\x01\xe0\x34\x84\x05\x77\x0b\xc3\xfa\xfc\xb1", + "\x2d\x08\x8a\xf2\x9c\xc7\x44\xe3\x47\x12\x4f\xbe\x41\x00\xcb\xcd\xeb" + "\xba\xe0\x37\xed\x9b\xf6\x9d", + 1, 128, NULL, 0 }, + { 192, 192, 168, + "\x92\xe0\x74\x44\x2c\xc4\xc5\x9e\x72\x26\x08\x08\xd8\x0d\x8e\x7b\x85" + "\xc6\x33\x50\x68\x91\x7b\x83", + "\x34\xea\xe2\x74\x25\xac\xe1\x77\x71\xe1\x64\xcb\xb6\x34\x30\x6f\x35" + "\x2e\xdc\x9c\x37\xbf\x60\x8b\xe8\xa7\x55\xfb\x94\x14\x81\x83", + "\xb7\xe6\xb7\xbb\x29\xc0\x2e\x46\x35\xdb\xdc\x50\xd8\xbe\x71\xe2\xdd" + "\xf0\xa5\x44\x47\x1d\xe2\x85", + 1, 256, NULL, 0 }, + { 520, 384, 169, + "\xdb\x6f\x99\x56\xc3\xf4\xca\x6e\x41\xf1\xf7\xf1\x46\x29\xd4\x4c\x79" + "\xe0\x35\x3e\xdb\xf3\xe3\x10\xe6\x85\x8b\xbc\x45\xa7\xcd\x57\x77\x8a" + "\x90\x53\xba\x22\xa1\x41\xbf\x58\xbf\xd4\x34\xad\x08\x64\x8c\x70\x41" + "\xa2\x24\xb9\x7a\x0d\x17\xe0\xed\xf9\x4f\xd4\x0b\x41\x0a", + "", + "\x0c\xb1\xb2\x96\x25\x5b\xb2\x59\xf3\xb6\x01\xb4\x9b\x35\x52\x4a\x5e" + "\xca\x6c\x52\x36\x07\x54\xd3\xd9\x6d\xd5\x21\xc9\x05\xb1\xc1\x82\x1d" + "\x74\x96\x59\x67\xd8\xe8\x6d\x50\xde\x95\x0f\xe4\xd6\x35", + 1, 0, NULL, 0 }, + { 520, 384, 170, + "\xf0\x34\x04\xbd\xb3\xe0\x8f\x53\x0d\x4c\x3a\x5f\x16\x5d\x23\x60\x12" + "\xa4\xc4\x5c\xd0\x63\xe3\xe4\x48\x3d\xa0\x88\xec\x0a\xfd\xb2\x4e\x96" + "\x39\xfc\xca\xbb\x91\xf9\x8a\x49\xdc\x29\x72\xe2\x98\x14\x26\x57\x3e" + "\xcf\xe6\x9c\x00\xc4\x3a\x2d\x99\xa3\x10\x7c\xef\x3a\x70", + "\x73\xed\x9f\xa2\xac\xf4\x9d\x6c\x98\xbf\xc7\xd6\xc5\xad\x9c\x56", + "\xb6\x13\x2e\x52\x16\xf7\x11\xee\xeb\x44\xda\x3d\x92\x98\x3f\xe5\xb6" + "\xde\x5c\xd9\x41\x0b\xe7\x1d\xb8\xd3\xb0\x72\x28\x34\x16\x86\xaa\x60" + "\xe7\x08\x1e\x95\xf2\xe4\xb6\x9b\xb7\xcd\x96\x48\xbc\x0b", + 1, 128, NULL, 0 }, + { 520, 384, 171, + "\xee\x79\x9e\x25\xed\xb1\xb1\x84\x52\xe5\xed\x17\x4b\xc6\xb2\x18\x5a" + "\x67\x54\x41\x7d\x6c\xc0\x5d\x73\x6d\x2b\xa9\xef\xc8\x36\x7e\x4b\x05" + "\xba\x0a\x2e\xe5\x25\xce\xea\xb7\x4f\x98\x04\xa8\x47\x91\x30\xc3\x28" + "\xd6\x71\xe3\x40\x70\xcf\x17\x4a\x00\x3a\x1d\xfb\x59\x94", + "\xac\x3e\x7d\xa7\xe5\x78\xb9\xb4\xdc\x24\x24\x03\x04\x46\xc7\xf6\xae" + "\xbc\xc4\x71\x44\x5a\x9e\x0e\x6e\x65\x09\x9c\xae\xec\x5b\x2f", + "\xc8\x60\x7f\xca\x18\x88\x41\x81\x66\xc5\x50\xdd\x58\xd7\xa3\x97\x6a" + "\x6e\xcd\x0e\x4c\xa9\x9b\x02\xfb\x18\x78\x00\xa9\xc9\xef\x90\x9a\x6c" + "\x14\x97\xc0\x65\x2d\x4d\xca\x82\x40\x5a\xb0\x7f\x5e\xed", + 1, 256, NULL, 0 }, + { 520, 192, 172, + "\x06\x3d\x6e\x12\xe6\x70\x09\x8a\xda\xbe\x68\x19\x20\x23\xb6\x37\xbb" + "\x6d\x8d\x71\x3f\xc8\x43\x61\x88\xc4\xec\x06\xfd\xd0\x84\xce\x6d\x19" + "\x3f\x26\xc8\x6a\x95\x60\xe1\xab\xc2\x7d\x81\x3f\xce\x2b\x3e\xac\x01" + "\x70\xfd\x1c\xb7\x2e\x19\x30\xa2\x77\x6b\xc8\x4d\x6c\x11", + "", + "\x9d\xc2\xac\xbf\xa2\x8a\x7a\xc5\xf2\xa5\xbd\xd4\xb1\xb2\xdb\xc8\x06" + "\xc4\x8f\x96\xce\x95\x0e\xb5", + 1, 0, NULL, 0 }, + { 520, 192, 173, + "\x35\x93\x18\xe6\xc6\x27\x9b\xa9\xeb\xcb\x16\x75\xf5\xa9\x81\x95\xbb" + "\xf5\xd8\x95\xda\x9c\x17\xb8\x32\x90\x38\xbe\x85\x7d\xc3\x95\xb1\x2a" + "\xe9\x1a\x55\x59\x88\x76\x59\x3c\x1c\x20\xbc\x01\x72\xcf\x15\x12\x6b" + "\x7a\x6b\xf0\xa2\x38\xed\xa3\x32\x5d\x6d\xd6\x06\x00\xef", + "\x7a\xd0\xc9\x09\x8e\xa1\x0e\x61\x5b\xb6\x72\xb5\x2c\x96\x54\x2d", + "\x41\x63\x73\x7c\x21\x9f\x7c\x5e\x74\x38\x43\xdc\x3d\x36\x01\x9c\x65" + "\x85\xea\x5d\x4e\x7c\xf2\x4f", + 1, 128, NULL, 0 }, + { 520, 192, 174, + "\xd0\x1c\xd8\x98\x08\x9d\x8a\x1e\xeb\x00\x35\xb0\xd3\x32\xda\x80\xfb" + "\xd3\x57\x1b\x91\x92\xdb\x10\xfa\x6f\x55\xf6\x65\xab\x19\x2d\x70\x50" + "\xca\xb6\x43\x99\x6e\x99\x25\x4d\x95\x73\xe0\xcf\x4e\xea\xa6\x3a\xfc" + "\xcd\xef\xd8\x16\x14\xfe\x7b\x83\xdf\xe3\x0e\x3b\xa1\x9f", + "\xd6\x7c\x77\xcd\xd0\xaf\x5d\x10\xe8\xca\xe8\x87\xe5\xa6\x09\xbb\x76" + "\xa9\xe5\x59\x76\x53\x77\x3c\x30\x3b\x82\xb9\x18\xfd\xc5\x9f", + "\xe7\xdf\x52\x7a\x98\x80\x80\x74\x9e\xe2\x15\xba\x0f\x82\x07\x83\x8d" + "\xf3\x8a\x37\x70\x7a\x63\x30", + 1, 256, NULL, 0 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/hmac_sha512_test.json.c b/test/wycheproof/hmac_sha512_test.json.c new file mode 100644 index 0000000000000000000000000000000000000000..4e7b79fb526e902b01508e1cb3c70e2a05a24b6d --- /dev/null +++ b/test/wycheproof/hmac_sha512_test.json.c @@ -0,0 +1,1853 @@ +/***************************************************************************** + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*****************************************************************************/ + +/* Vectors from https://github.com/google/wycheproof */ +/* HMACSHA512, 0.8rc21 */ +#include "mac_test.h" +const struct mac_test hmac_sha512_test_json[] = { + { 512, 512, 1, + "\x53\x65\x24\x4b\xb4\x3f\x23\xf1\x8d\xfc\x86\xc0\x9d\x62\xdb\x47\x41" + "\x13\x8b\xec\x1f\xbd\xdc\x28\x2d\x29\x5e\x0a\x09\x8e\xb5\xc3\xe3\x7b" + "\xd6\xf4\xcc\x16\xd5\xce\x7d\x77\xb1\xd4\x74\xa1\xeb\x4d\xb3\x13\xcc" + "\x0c\x24\xe4\x89\x92\xac\x12\x51\x96\x54\x9d\xf9\xa8", + "", + "\xd0\xa5\x56\xbd\x1a\xfa\x8d\xf1\xeb\xf9\xe3\xee\x68\x3a\x8a\x24\x50" + "\xa7\xc8\x3e\xba\x2d\xaf\x2e\x2f\xf2\xf9\x53\xf0\xcd\x64\xda\x21\x6e" + "\x67\x13\x4c\xf5\x55\x78\xb2\x05\xc8\xa1\xe2\x41\xba\x13\x69\x51\x6a" + "\x5e\xf4\x29\x8b\x9c\x1d\x31\xe9\xd5\x9f\xc0\x4f\xe4", + 1, 0, NULL, 0 }, + { 512, 512, 2, + "\x00\x69\x89\x77\xf7\x10\x2c\x67\xb5\x94\x16\x69\x19\xaa\x99\xdc\x3e" + "\x58\xc7\xb6\x69\x7a\x64\x22\xe2\x38\xd0\x4d\x2f\x57\xb2\xc7\x4e\x4e" + "\x84\xf5\xc4\xc6\xb7\x92\x95\x2d\xf7\x2f\x1c\x09\x24\x48\x02\xf0\xbc" + "\xf8\x75\x2e\xfb\x90\xe8\x36\x11\x07\x03\xbf\xa2\x1c", + "\x01", + "\x4d\x16\x09\xcc\x2c\x2f\x1a\xb5\xdd\xc3\x58\x15\xae\x1b\x5d\xc0\x46" + "\xf2\x26\xbd\xe1\x7e\xc3\x7a\x4c\x89\xec\x46\xfb\xd3\x1a\xf2\xae\xb8" + "\x10\xb1\x96\xdf\xfd\xd1\x19\x24\xd3\x77\x2b\xef\x26\xa7\xa5\x42\xe0" + "\xa1\x67\x3b\x76\xb9\x15\xd4\x1c\xbd\x3d\xf0\xf6\xa6", + 1, 8, NULL, 0 }, + { 512, 512, 3, + "\xed\x6d\xc6\x5d\xbe\xaa\xdb\xda\xab\x53\x0a\x0d\x35\xf1\x9f\x78\xa7" + "\xbd\x93\xe6\x98\x54\x6c\x82\x75\x1b\xf6\x50\xc2\xa4\x4f\xc8\x52\x90" + "\x33\xd0\x88\xfe\xbe\xed\x28\x8f\xb4\xc8\x13\x2a\x59\xdf\x02\x07\x68" + "\x76\x40\xc7\x6d\xcd\xb2\x70\xac\x3a\xf5\xf0\x42\xf1", + "\xa7\x8f", + "\x07\x57\xb2\x7e\x12\x05\x59\xd6\x4c\xd3\xd6\xe3\xcb\x40\xd4\x97\x84" + "\x53\x75\x81\x51\x81\xbd\x9b\x4e\x74\xf2\x18\x9d\x09\xd0\x1a\x1b\x3e" + "\xad\x53\x70\x13\x80\xd9\x88\x95\x8e\xd2\x2b\xc3\x79\xac\xe9\xd4\x7c" + "\xbc\xac\x1d\x49\xbf\xa7\xe1\x4f\x1f\x44\x80\x4c\x30", + 1, 16, NULL, 0 }, + { 512, 512, 4, + "\x46\x3c\x5e\x69\x6d\xa0\xec\x0d\x78\x43\x88\xbe\x77\x5d\x1d\x91\xd9" + "\x47\x46\xaa\x8d\x3d\x2c\x20\x9f\x56\xac\x95\xea\x54\xe7\x28\x83\x29" + "\xf9\xfb\x40\xbe\x4e\xef\x35\x54\x7e\x64\xc6\x1d\xc5\x1a\x4a\x1f\x33" + "\x80\xa2\xb9\x64\x20\xf0\x88\x65\x5e\xa9\xd8\x5b\x97", + "\xe9\x56\xc1", + "\xac\x4b\x15\x09\x39\x18\x14\xae\x5c\xb5\xa1\x23\xe7\xa0\x60\x60\x15" + "\x75\xc1\x1d\x81\xb5\x63\xbd\xc5\x2f\xeb\xe6\xbb\x2c\x74\x7b\x85\xee" + "\xdd\xcb\x67\x48\xc9\x81\x47\xa4\x6a\x1c\xc9\xbe\x67\x76\xd1\xa8\xe8" + "\x2a\xe4\x89\x6b\x9c\x18\xda\x2f\xf3\x51\xc5\x67\x95", + 1, 24, NULL, 0 }, + { 512, 512, 5, + "\x4b\xc0\xd3\x2e\x94\x5c\xfd\xaf\xd2\x0d\x39\xbe\x38\x20\xf9\x64\x97" + "\x27\xcb\xda\x5a\xb5\x85\x99\x53\xa3\x22\xcb\xde\x1a\xb7\xa5\x14\xd7" + "\xdc\xd1\x4b\xa9\x09\x05\xe7\x09\x19\xbb\x86\xb8\x5c\xfe\xaa\x37\x5e" + "\xe2\xce\x27\x03\x71\x1b\x93\x8c\x8f\x4a\xb5\xf1\x78", + "\xb2\xaa\x48\xb3", + "\xc4\xec\xdb\xd2\xef\xb1\x76\x40\xce\x67\x07\xe2\xe9\xd0\xee\x5b\xfb" + "\x98\xb9\x15\x84\xbc\x86\xab\x38\x64\x37\xea\xa3\x7b\x0f\x2e\xb7\x05" + "\x00\x36\x11\x05\x41\x6c\x0d\xce\xcf\xf3\x89\xdc\x94\xc7\x23\xfc\xff" + "\x18\xcb\x80\x17\x40\x96\x23\x12\x00\x7a\x19\x5a\x23", + 1, 32, NULL, 0 }, + { 512, 512, 6, + "\xac\xa4\x7f\x63\x50\x94\x1a\x0e\xfd\x8c\x3b\xac\x90\x64\xa5\x54\xbe" + "\x33\x7c\xde\x7d\x19\x2f\x6f\xbf\x86\xd1\xb4\xdb\x09\xb3\x65\x31\x16" + "\x5c\xba\xe0\xa6\x34\x20\x6f\x71\xfa\x40\x0d\xf3\x33\x52\xff\xf6\x0e" + "\x1f\xba\x40\x09\xac\x66\x71\xcd\x37\x31\x2b\xdd\x98", + "\xbc\x99\x3b\x1d\xb0", + "\x89\xaf\x2f\x57\x46\xca\xb8\x9f\xda\x69\x93\xe0\x0f\x1b\xf0\xcc\x70" + "\xa7\x71\x88\x94\x5b\xb7\xb5\x40\x9b\x53\x6a\xec\x55\x33\xad\x50\x1d" + "\xb6\xec\xfa\x3e\x51\x6b\x58\x0b\x7d\xf9\xc8\xea\xdb\x3c\xf5\x56\xcc" + "\xc0\x16\x68\xbe\x98\x43\x35\xbd\x5a\x62\x55\xd5\x66", + 1, 40, NULL, 0 }, + { 512, 512, 7, + "\xb3\xec\xae\x6f\x25\xc2\xf6\x99\xf1\x58\xb3\xff\xcd\x0a\x7a\x57\x55" + "\x83\xe4\xc9\xcb\x56\xb5\xc2\x2e\xf4\x27\x3c\xde\x6c\x67\x34\xe8\x4d" + "\x74\x00\x74\x9c\x17\xe4\x7e\x8c\xfc\xca\xfa\xf8\xb5\x0c\x65\xeb\x47" + "\xdf\xeb\x27\x3d\x5d\x30\xa1\x18\x1e\x37\xb2\x7a\xd0", + "\xf0\x36\x1d\x58\x29\x1e", + "\x40\x37\xa5\x7a\xa2\x79\xb5\xa0\x7a\xbe\x93\x89\xdc\xf5\x08\xbe\x94" + "\x95\xa8\x25\x7d\xcb\x3f\xeb\xa3\xf0\x80\x1c\xd5\x75\x74\xc3\x0b\xfd" + "\xdc\x6d\xf5\xdf\x65\x67\xcd\x57\x2c\x4e\x82\x73\x5f\xd4\xe6\x7b\x65" + "\xe8\x5b\x03\x0f\x18\x3a\x7f\x44\x57\xfb\x7d\x2c\x3d", + 1, 48, NULL, 0 }, + { 512, 512, 8, + "\x70\xff\x24\xa2\x52\xd6\x51\x83\xbd\xc6\xb7\xc8\x87\x51\xf8\x50\x82" + "\x11\x41\xa6\x12\x46\x72\x7c\x32\x40\xb4\xf9\x60\x88\xae\x32\x78\x76" + "\x7a\x82\x2b\x65\x73\x5a\x28\xcc\xeb\xe4\xc8\x74\xbc\xb2\xc9\x42\x88" + "\x2c\xb2\x3f\x9d\xd8\x7f\xe0\x8f\xba\xad\x5a\xe7\x2f", + "\xe1\x8d\xa3\xeb\xf0\xff\xa4", + "\x87\x8d\x48\x87\x54\xbc\x79\x6c\x70\xe1\x1d\x5d\xb7\x7a\xcd\xa2\xe1" + "\x79\x6d\x86\x14\x6e\x27\xd8\x62\x58\x67\x40\xc4\xd4\x88\xed\x12\x23" + "\x9e\x6f\xb4\xab\x29\x25\xaf\xc8\x81\x68\x60\x9e\xdc\x04\x8f\x85\x72" + "\x53\x6f\xae\x96\xe1\x49\xd7\x3d\x23\x0b\x18\xdb\x66", + 1, 56, NULL, 0 }, + { 512, 512, 9, + "\xdd\x4e\x05\x93\x3d\x09\x71\x1e\xe8\x8c\xb4\xc1\xce\xb3\x60\x0b\x2b" + "\x33\x80\x8b\xc0\x8d\x49\x93\x87\xb3\x31\xd9\xc7\xaf\x49\xbc\x65\xb5" + "\x51\x72\xcf\x80\x83\x38\x5a\x94\x0e\x4b\x86\x4b\x7b\x4b\x73\xdd\xf3" + "\xbd\x51\x3a\x6c\xbc\xac\x73\x87\x8a\x87\x9b\x4d\x06", + "\x66\x94\x80\x29\x35\x14\x32\xc3", + "\x99\x68\xa1\x6e\xff\x2b\x4e\xee\xcb\x2f\x9d\x11\xfc\xb1\x05\xe8\xd8" + "\xca\x59\xed\x4e\x69\x13\x1c\x9d\xe5\x99\xcd\x81\x55\xfa\x4f\x33\xde" + "\xf1\x19\x5a\x6b\x45\x22\x63\xaa\xd9\x26\x5e\x16\xd4\x95\x18\x41\xd7" + "\xcd\x33\xc7\x4c\x47\x5d\xa0\x44\x97\xc0\x29\x22\xea", + 1, 64, NULL, 0 }, + { 512, 512, 10, + "\xfb\xd3\x2c\xaf\x89\x84\xfc\x43\x76\xd1\x0d\xaa\x72\x88\xdb\x8e\x6e" + "\x74\x46\x4b\xdd\x94\xb4\x48\xad\xab\x44\x97\xb3\x19\xe9\xa6\xdc\xce" + "\x54\x2f\x82\xa7\xff\x2e\x77\x5d\x12\x47\x7c\x88\x0e\x46\x0a\x9e\xab" + "\x8e\xfc\x49\xfc\xfc\x8c\x54\x76\xcb\x4b\x08\x95\x4a", + "\x38\xa2\x58\x6a\x28\x83\x95\x3c\xc4", + "\xe0\xc6\x9b\xd0\x34\xcd\xec\x5b\x48\x15\x0f\xdf\x3a\x43\x83\x45\x6a" + "\x76\x26\xd4\x40\x5d\xf5\x2d\xc6\xc2\xbc\x8f\xe9\x3b\xd8\x7e\x36\x9e" + "\x06\xa7\x81\xed\x80\xba\x8b\x1f\xe1\x14\x6c\x4d\xf8\x2b\x6a\x51\x44" + "\x12\x35\x8b\x31\xb7\x7b\x9b\x79\xc7\xa9\x1e\xc9\xe4", + 1, 72, NULL, 0 }, + { 512, 512, 11, + "\xfd\x4c\x3f\x6b\x21\x37\x51\x36\x16\xc2\x8e\xd4\xd8\x63\x8f\x86\x7a" + "\xd0\xb9\x71\x88\xb7\x3f\xc9\xb3\x6f\x3d\x52\xb8\x2d\x72\xa4\x9b\x9d" + "\xc1\xb8\xb2\x53\x97\xeb\x44\x80\x54\xa8\xd3\x8d\x83\x8e\x7a\x88\xb4" + "\xdf\x9c\x26\x3a\xea\x1b\x96\x87\x71\xd5\xac\x57\x56", + "\x86\xb4\xe6\x1b\x3b\x7d\x65\x00\x44\xad", + "\x29\x34\x5d\x7d\xa4\x4e\x2f\x22\x8e\x8d\x50\x2e\x29\xfb\x65\x5d\xa3" + "\x67\x6a\x48\x1f\x99\x47\xc8\x48\x25\x02\xce\x07\x0b\x3d\xa5\x06\x55" + "\x89\xd8\x4c\x02\xa0\x5c\xd7\x74\xb4\xbd\x5a\x15\xb6\x68\xc5\x9b\xaf" + "\xc1\x92\x69\x5a\xec\x43\xe5\xdf\x3a\x82\x30\x17\x45", + 1, 80, NULL, 0 }, + { 512, 512, 12, + "\xf9\x5b\xae\xa5\x35\xf4\x77\xd2\x2b\x40\x5c\x67\xd9\x27\xf5\x9a\x9e" + "\x04\x2c\x46\x29\x7a\x16\x81\xbc\xc1\x6f\xdb\xe1\xb2\xcd\x59\x67\x5a" + "\x22\x13\x51\xa7\x80\x75\x98\x1e\x7e\xb4\x99\x80\x66\x76\x88\x01\xcb" + "\xd7\xa8\x52\x31\x11\x4d\x7f\x27\xf9\xbd\xf2\x48\x99", + "\x5a\x34\xde\xe4\xe0\x98\x2d\x45\x8e\xff\xfb", + "\x63\x86\x7b\xb3\xe8\x2b\xd4\xa5\xf7\x15\xb3\xdd\x67\xba\x36\x25\x66" + "\x6e\x45\x8c\x5e\x3d\x75\x80\x47\x09\xf8\x0b\x6d\xde\x6f\x77\x4e\xa2" + "\x23\xba\x9e\x25\x36\xc6\x0a\xb6\x36\xdd\x12\xd0\x7b\x21\x72\x34\xa4" + "\x90\xea\x9c\xae\x4f\xe6\x73\x21\x5d\x33\xf8\xc5\x7a", + 1, 88, NULL, 0 }, + { 512, 512, 13, + "\x4d\x76\xae\x95\xa1\x23\x20\x7e\x01\xc6\xd2\x2d\x8b\x58\x7e\x63\xba" + "\x68\x29\x63\xe5\x09\x61\xaf\xff\x53\x11\x60\xa9\xb9\xaa\xc6\xc7\x72" + "\xc5\xe8\xbf\x91\x8d\xde\xcb\xeb\x56\x45\x5e\xa6\x47\x10\xe5\x1a\xc2" + "\x1e\x3b\xb9\xaf\x4b\x24\xea\xa8\x53\x5b\x3c\x29\x24", + "\x2c\x31\xf2\xd9\x86\xf6\x8a\x6d\x6a\x96\xc4\xb0", + "\x9d\x4f\x95\x49\xac\x13\x4a\x6f\x60\xf1\x7f\xd0\xfb\xc8\x0f\x55\x42" + "\x6a\xfa\x73\xcd\xaf\x84\xa8\x06\xd9\x8d\xff\xfc\x94\x26\x31\x78\x11" + "\x6f\x76\xaa\xdc\xa9\x5a\x92\x43\xa9\x12\x8f\x5f\x66\xd3\xe7\xf3\x3e" + "\x72\x60\x3d\x4b\x35\xab\x90\xab\x7d\x1e\x87\x0a\xd7", + 1, 96, NULL, 0 }, + { 512, 512, 14, + "\x0d\xa7\xfa\x1f\x5d\x21\x79\x51\xe3\xe3\x43\xcd\xa8\x1f\x23\x2d\xeb" + "\x71\x76\x4e\xb4\x9e\x85\x10\xbc\x28\xdb\xa8\xeb\x62\xaf\xa2\xa9\x8b" + "\x6f\x05\x36\xad\xb1\x02\x50\xc7\x48\x78\xfe\x64\x9f\x47\xbb\xaf\xdf" + "\x3f\x72\x2f\xa1\x50\xf6\x6e\x83\xf6\x5f\x60\x6a\xb0", + "\x83\x51\x1d\xe1\x90\x66\x3c\x9c\x42\x29\xac\xe9\x01", + "\x11\xbd\x76\xba\x2f\xd5\x68\x4e\x3f\xaa\xdd\x44\xab\xc0\x5d\x32\x66" + "\x14\x72\xae\x4c\x75\xfd\x69\xe6\x2e\x47\xa2\xd4\x62\xe4\x83\xab\x5f" + "\xd3\x74\x07\x0e\x64\x80\x17\x25\x09\x34\xd4\x86\xfe\xd5\x5e\x68\xf4" + "\x33\x85\x47\xfb\x5d\xc5\x4d\x4b\xed\x89\x4c\x1c\x2f", + 1, 104, NULL, 0 }, + { 512, 512, 15, + "\xce\xc9\xe9\xf2\x5e\xd9\xa0\x17\x00\x4a\x78\x82\xb1\xe4\x4e\x8b\xd8" + "\xfa\x32\x03\xc5\x0c\xb6\x05\x84\x55\xed\x4f\x2a\x03\x67\x88\xd4\x6f" + "\xcd\x32\x83\x27\xd0\xd8\x6b\x1a\xba\xe6\x9f\x7b\xbb\x96\xe3\xd6\x63" + "\x73\xec\x8b\xd4\x50\x75\x89\x08\x79\xa8\x3f\x4d\x33", + "\x80\xdc\xd8\xba\x66\xf9\x8b\x51\x09\x41\x44\xe9\xb8\xbd", + "\xc6\x9f\x17\x87\xbf\x78\x04\xbf\xff\xd9\xda\x7e\x62\xf5\x8c\x1c\x9f" + "\x59\x9c\xca\xe2\xed\x4f\xc6\xab\xda\x1b\xe4\x86\x20\xaf\xc7\x97\xd5" + "\x9d\x4a\xdb\x39\x6e\x1f\xa5\xd1\x8b\x8c\x1a\xa1\xc7\xc1\x52\x18\xa9" + "\xf9\xe3\xaa\xb2\x26\x11\x9a\xda\xd7\x42\x64\x10\x89", + 1, 112, NULL, 0 }, + { 512, 512, 16, + "\xbb\xe2\x56\x49\xec\xdf\x54\xae\x00\x28\xfb\x92\x3c\xc8\xc2\x8e\xc0" + "\x0e\x10\xe2\xd4\x42\x14\x59\x07\x81\x23\x8a\x14\x3b\x75\xd5\x4e\xfb" + "\x03\x7e\xb9\xf5\x30\x82\xa8\xab\x3d\x88\x76\xda\xf4\xdb\xdc\x24\x83" + "\xc4\xba\x22\x27\x97\xfe\x20\xda\x3b\x77\x30\x36\x8b", + "\x33\xf6\x30\x08\x8c\x0d\x24\xcd\xa9\x8c\xaf\xf1\xa3\xaf\xc7", + "\xc8\x03\xca\x83\x3e\x85\x14\x18\xa3\xd9\xed\x76\x4f\x8c\x83\xf4\x81" + "\x06\x01\x41\xeb\x1b\x2b\xf6\x4d\x7e\xe7\x99\x1b\x04\x1c\x48\xbf\xc7" + "\x47\xbc\xe1\x3d\x69\x72\x2f\x63\x94\x40\x85\xce\xf8\xe7\xa1\x66\x27" + "\x05\x30\xfe\x31\xa2\xa5\x25\xa9\x9b\x8a\x75\xf1\xb1", + 1, 120, NULL, 0 }, + { 512, 512, 17, + "\xf5\xe2\xb9\xe2\x31\x3f\x4f\x80\x7c\xb3\xa9\x24\xa7\xd4\x94\x3f\xc3" + "\xfb\x47\x5d\x8f\x1a\x1b\x40\xce\x09\xa3\x77\x70\xf6\x21\xaf\x89\x77" + "\x72\x9c\xad\xf9\x86\xc9\x8c\x75\xf0\x8a\x4f\xab\x42\x80\x53\x8e\x09" + "\xe7\xe5\x1e\x87\xa8\xd6\x2c\x03\x41\x1b\xdb\x8d\x24", + "\x74\xef\x62\x3c\x83\x27\x5a\xe9\x97\x45\xbf\xf7\xe6\x14\x2a\xfa", + "\x47\x10\x55\xf7\xa2\xd4\x47\x58\xe7\xd7\x83\x7d\xb8\x5c\x33\x62\x6b" + "\x83\x06\x76\x0e\xb4\x5e\x18\xd4\xba\x8d\xfb\xcd\x0d\x42\x79\xfc\xf8" + "\xb5\x39\xef\x7b\x16\x5e\xea\xbf\x54\x57\xee\x2c\x41\xe5\x2d\x07\xe9" + "\x12\x1d\xa0\x2c\x98\x8f\x08\x16\x2f\x86\xbd\xf2\x08", + 1, 128, NULL, 0 }, + { 512, 512, 18, + "\x8e\x32\x3d\x5f\xb4\x75\x2d\x92\xa6\xd9\x05\xc5\x12\xb2\x87\xd0\x7b" + "\x21\xae\x50\x00\x2d\x02\x6f\xf0\x38\x8e\x15\x93\xbd\xe9\x99\x8d\xd0" + "\x23\x21\xe2\x00\xd1\x48\xf5\xfa\x2e\x82\x4b\x37\xe9\xf5\xa7\x74\x41" + "\x79\x4b\x84\x0b\xed\xd5\x52\xd1\x05\x1c\x1d\xdd\x8c", + "\x4d\xaa\x22\x9b\x00\x9b\x89\x84\x35\x4c\x2e\xc3\xe7\x97\x3e\x00" + "\x42", + "\x93\xa2\x13\x7c\xc8\x4e\x2f\xa1\x43\x9d\x7c\x23\x97\x67\xb3\xce\x65" + "\x3d\x63\x4c\x58\xa4\x59\x0e\xb6\x1a\xf9\xd3\xef\x98\x64\x45\x22\x0a" + "\xff\x35\x54\xde\x45\xa1\xb0\x93\x3f\xa0\x6d\x3d\x64\x46\x04\x18\x91" + "\x09\x77\xd8\xd9\xdd\xb2\xeb\x04\x96\x3c\x81\x68\x41", + 1, 136, NULL, 0 }, + { 512, 512, 19, + "\x46\x5b\xc1\xab\x21\x25\xcc\xa2\x97\x29\xd0\x1d\xf0\x44\xe3\x93\xb0" + "\x67\x7d\xef\xdd\x93\x92\x80\xa3\xaa\x14\x12\x24\xef\xa0\x64\x57\xe6" + "\x23\x05\x6d\x02\xf6\xc3\x6e\xca\x3d\xfc\x4a\x74\x76\xdd\x36\xb9\x7d" + "\x0c\x2d\x60\xc7\x67\x21\x29\x18\x9e\x73\xb6\xaf\x8f", + "\xdd\x84\x59\x9b\x47\xba\x9a\xe9\xf2\xad\x0c\x8e\xac\x67\x84\x85\x43" + "\x3e\xb6\xb1\xdf\xb7\xc9\x98", + "\x9f\xff\x43\xa8\x3c\x71\x83\x32\x11\xf9\xd6\x0e\xee\xf4\x16\x69\x65" + "\xc4\x1a\x37\xc7\x66\x34\xb1\xbd\xf9\xc5\x29\x1d\xf7\x5d\xc8\x77\x66" + "\x8f\x22\x87\xbc\xf8\x10\x8e\xa9\xe0\x3d\x06\x1a\x70\x8d\xb2\xdb\x08" + "\x68\x7e\xda\x61\xfa\x97\xb1\xca\x92\xdc\xf2\x2b\x92", + 1, 192, NULL, 0 }, + { 512, 512, 20, + "\xb9\x02\x26\x79\x8d\xff\x2f\xfb\x91\xd1\xee\x41\x03\xf2\x63\x97\xd0" + "\xbf\x84\xc1\x3c\x1e\xc7\x17\x39\x2c\x5f\xe1\xd4\xd0\xf4\xdc\x79\x02" + "\x36\xd7\x59\xfa\x1b\xe8\x52\xe3\x05\xda\x58\x5a\x3d\xbd\xe0\xd3\x91" + "\x2b\xea\x60\xd6\xb1\x40\xc2\x56\x45\xeb\x00\x94\x3f", + "\xaa\x29\xc3\x72\xf1\x36\x99\x3c\x65\xac\xe5\xe1\xd6\x20\x78\x80\x6e" + "\xb7\x87\x91\x3b\xb3\x5a\xf3\x33\x71\x05\x63\x59\xd3\x54\xb2", + "\x49\x3a\x72\x75\x36\xb0\x7d\x43\x4a\x7f\xc8\xdf\x6b\x70\x98\x91\x48" + "\xa8\xd9\x4c\xad\xb9\x76\x1a\xd8\x45\xac\x5f\xde\x20\x68\xf9\x56\x5e" + "\x68\x60\x7b\x53\x1b\x0f\x30\x7d\x7c\x17\xce\x0a\x2b\xa6\x9f\xb1\xac" + "\x1b\x0c\x71\x6f\x93\x90\x4e\xec\x75\x66\x9e\x70\xb7", + 1, 256, NULL, 0 }, + { 512, 512, 21, + "\xaf\x1b\xb9\x17\x75\xcb\x40\xc7\x39\x83\xf1\x19\xc9\x27\xa2\xce\x8f" + "\x7b\x95\x4a\x62\x74\xec\xc1\xcd\x96\x01\x9e\x5c\x41\x7a\xf4\xb0\x94" + "\x37\x61\x94\xea\xe7\x1c\x7f\x68\xf3\x34\x56\x54\xd5\xd9\xf8\x19\x8a" + "\x69\x7b\x41\xae\x25\x1e\x82\x30\x8a\xcc\xd9\x35\xbd", + "\x75\xed\xed\xdf\xa7\xf1\xdf\x1d\xc1\x44\xfb\x19\x5b\x27\xe4\x54\x64" + "\x0e\x3f\x89\x7c\xb5\x64\x22\x2f\x05\xe8\xaa\xb0\xc6\x02\x4f\x90\x47" + "\x2a\xfe\xa6\xe7\x25\x4e\xd2\x51\x34\xea\x43\x45\x2a", + "\xb5\x3d\x56\x40\x86\xa7\x45\xb1\x0d\x88\xa4\x8b\x50\xed\x8b\x53\xf4" + "\xc8\x3f\xd1\x2b\xf5\x6a\x75\x10\x80\x74\xde\x9b\x34\x3c\xdf\x06\x68" + "\xce\x8b\x6a\x3d\x88\x4b\xa2\xda\x5f\x4c\x95\x7f\x13\x19\xe2\x6c\x08" + "\x13\xc9\x9a\x42\x69\xc1\x71\xad\x80\x98\x10\x13\xa2", + 1, 376, NULL, 0 }, + { 512, 512, 22, + "\x51\x3e\x0e\x76\x22\xea\xbc\xb6\xbf\xc8\x16\x69\xda\xc9\x03\xdf\x46" + "\xda\xea\x12\x40\xf3\x22\x48\xbb\xf4\xfc\x61\xf1\xf9\xb1\x3b\x2c\x3f" + "\xe1\xbc\xc9\x75\x40\xd3\x00\x65\xbe\x9e\xee\x41\xe5\x17\x48\xbc\x42" + "\xc1\x6a\x8c\x82\x69\xfb\xe2\xb6\xf6\x25\xc1\x92\x28", + "\x81\xd8\x65\x09\x37\xf5\x08\x71\xa6\x6a\xf7\x16\x05\xea\x4f\xa9\xd6" + "\xc5\xd7\xa3\x75\x77\x4c\x22\x80\xeb\x34\xae\xfc\xee\x8c\x0e\xf8\x33" + "\x45\xbc\x54\x7e\x4d\xe7\xcb\xea\x48\x23\x69\xb2\x5a\x93", + "\x9d\x94\x2e\x45\x85\x74\x2b\xa1\x18\xbd\xa6\xe1\x32\x51\x0a\xf3\xb9" + "\x29\x70\x47\xd3\x64\xf7\x6b\x2a\x0d\x1f\xc8\x03\x84\x9b\x06\xcc\xac" + "\x0e\xaa\x42\x79\x34\x05\x5c\x9d\x2e\x5a\x5d\xa1\x9c\xf1\x72\x99\xff" + "\xda\xb6\x50\x89\x58\x0d\x10\xff\x72\x07\xc9\xed\x03", + 1, 384, NULL, 0 }, + { 512, 512, 23, + "\x62\x7c\x9a\x72\x24\x7d\x07\xb0\xce\xc8\x34\x62\x77\x46\x83\x11\xc7" + "\x40\x1f\xc4\xce\xca\xea\x8e\x22\xe1\x3e\xce\x4b\x35\x2c\x8f\x7a\x7e" + "\xb1\xba\x81\xce\x34\x8a\x08\x67\x04\x38\xc9\x7b\x8d\x9e\x88\x36\x14" + "\xd5\x50\xf1\xff\x16\xd6\x36\x97\x5c\x59\x98\x8c\x2d", + "\x11\x8e\x04\x68\xcb\xb5\x2f\x93\xa3\x39\x6e\xbf\xaa\x11\x48\x81\xa9" + "\x8a\x41\x01\xf4\xff\x91\x2c\xed\x47\xec\xfc\x73\xb2\x7f\x52\x20\x5b" + "\x7a\x5d\x4f\x38\x99\x50\x6f\x9e\x34\xeb\xf9\x94\x60\xda\x7a", + "\xa1\x86\xe0\x8c\x77\x31\xd4\xbb\xb1\xd5\x34\x2a\x10\x5e\xf4\x8f\x53" + "\x53\xc5\xc5\x42\x27\x7d\xe6\x07\x83\x1f\xcb\xbc\x8d\x0b\x9f\xd5\x09" + "\xc7\x4b\xf9\xe3\x52\xee\x73\x97\x92\xee\x3c\xd6\x38\x2f\x96\xe7\x0a" + "\xdb\x58\x9f\xdf\x1f\xb0\x31\xd4\x3e\xef\x1a\x59\x5f", + 1, 392, NULL, 0 }, + { 512, 512, 24, + "\x1e\x98\x1d\x0c\xbb\xad\x5b\xea\x94\x80\xd8\x36\xb4\x70\x4b\xf3\x14" + "\x76\x63\xb6\xea\x59\xe1\xe0\xa2\x80\xfb\x45\xd9\xb8\x5d\x44\x5d\xc9" + "\x72\x15\x9d\xde\x30\x1c\x6f\x1e\x66\x68\x1f\x95\x64\x2d\xbb\x9a\x92" + "\x18\xc0\x0d\x0c\xd7\x24\xcb\x02\xf3\xbc\xae\xa2\xea", + "\x44\x0d\xff\x39\x06\x88\xc9\xfd\xe3\x1c\x17\xfd\xb6\x1c\x1d\x13\x89" + "\x9f\x95\x44\xa9\x86\x32\x4c\x34\xd5\xeb\x07\xbe\xf9\xa4\x43\x62\x97" + "\xf4\xa7\xfe\x16\xde\x5d\xd7\xb2\x4e\x0c\x7c\x12\x90\x51\xef\xe6\xf2" + "\xdd\x0a\x21\xae\xc0\x5c\x3e\x3c\x8f\x6f\xa3\x0d\x9c\x0c\xbd\x60\xd8" + "\x40\xd1\x4f\x0b\x2a\x92\x8b\xc7\x18\x9b\x9d\xe4\xa6\xa7\x31\x15\x1d" + "\x6b\x31\xe6\xa0\xec\xae\x75\x09\x54\x34\x73\x7b\xe8\xc3\xdb\x11\xa6" + "\xa6\x97\xd0\x61\x6c\x78\xb9\x70\x41\xde", + "\xc5\x2e\xb5\xd1\x8e\x90\x68\x72\x48\x34\x2a\x84\xdc\x02\x41\xc6\x80" + "\xe9\x92\xb8\x8b\x14\x09\x27\x5d\xf7\xe3\x47\xc9\x91\x69\xa5\x0c\xd7" + "\x80\xeb\x47\x26\xad\x75\x9e\x2a\x02\x7f\xb0\x91\x35\x4e\x3d\x7c\x7a" + "\xba\x8a\x21\xf8\xac\xd1\xd0\xe2\x12\x36\xaf\x5f\x98", + 1, 896, NULL, 0 }, + { 512, 512, 25, + "\xee\x8a\xea\x2a\x52\xeb\x7e\x0c\x11\x20\xab\x73\x6b\x1a\x82\x5b\x12" + "\x61\x00\x63\xde\x96\x42\xc5\x94\x76\x6c\x02\x0c\xb8\x73\x14\xd8\xac" + "\x94\xb1\x30\x72\xbf\xbf\x3c\x01\x9b\x4a\xac\xb1\xd2\x69\x5c\xdd\x75" + "\x63\xa2\x6f\x57\x4e\x12\x55\x99\x06\x78\x4d\x85\x3c", + "\xa3\x95\x1f\x1d\x18\x13\x56\x02\xfd\xad\xce\xee\xf5\x74\x1c\x24\xad" + "\x22\x75\x61\x60\xd0\xc5\x5e\x51\xb7\x88\xaf\x95\x2a\xda\xeb\x13\xe1" + "\x8c\x24\xc6\xb0\x96\x72\xf4\x05\xd7\xec\x3d\x49\xb0\xbd\x86\xc7\xf8" + "\x69\x1b\x6f\x69\xaf\x49\x17\x54\x23\x21\x5c\xf5\x7d\x7c\x08\xa5\x4a" + "\xb0\xb0\x29\x3e\x68\x5c\x9a\xa2\x50\xf1\x59\x9d\x78\x19\x3a\x00\xaf" + "\x82\x2d\xec\x4b\x56\xfd\xb4\x1f\x03\x43\xab\x2c\xf8\x5e\xa2\x7b\xb2" + "\xe6\x50\x93\x0f\x5e\x8c\xa8\x36\x83\x39\x03\xb0\x53\xb3\xe0\x68\x99" + "\xb4\x01\x2a\x65\x32\x97\x8d\x90", + "\xd3\x67\x8c\xa7\xc5\xc1\xaa\x21\xf1\x2e\xcc\xc2\x1a\x1a\xdd\x0b\x3e" + "\xb1\x2c\xcd\x13\x40\x33\x57\x04\x68\x19\x1e\x51\xb0\x58\xc6\x1f\x2a" + "\x7d\x88\xf2\xca\x6c\x65\x2c\x29\xc6\x5c\x49\x1b\xf1\xf0\x25\x2b\xc1" + "\x57\xbd\xd7\x74\x36\xff\x55\x20\x4e\xac\x6d\xfb\x0d", + 1, 1016, NULL, 0 }, + { 512, 512, 26, + "\xec\xd1\x86\x1a\x12\xea\xee\x48\xae\xf1\xd7\xed\x27\x82\x23\xb5\x0d" + "\x34\x16\xdb\xff\x81\xe9\x76\xc5\x6e\xcd\x4b\x1a\x1b\xc8\x89\x2b\x58" + "\x4c\xbc\xc7\x23\x70\xff\x5e\x97\x6a\x6a\xf1\x79\x0c\xaa\x32\xf9\xea" + "\x91\x28\x55\x91\x4c\x03\x15\x97\x95\x78\xfb\xf1\x65", + "\x57\x79\xc5\x63\x73\xa8\xe5\xdb\x43\xbd\x65\xc0\x45\x3c\xe2\x31\x44" + "\x23\x0d\x43\x66\x6d\x71\x7a\x3b\x59\xd2\xe9\x0f\x0e\x10\x73\x23\x76" + "\x83\x1d\x72\x81\xcb\x23\xdd\x55\x66\xe5\xf8\xc6\x27\xd0\x0d\x39\x65" + "\x01\x39\xce\xb8\x7c\xd4\x7e\x92\x1d\x65\xd6\xc1\xcc\x77\x12\xac\x4b" + "\xd7\x5b\xda\x88\x28\xe6\x8a\xbc\x96\x8f\x41\x60\xed\x91\xb2\x89\x46" + "\xc9\xd7\x06\xb0\x36\x0b\xbb\xdd\x65\xf4\x7e\xf9\x98\x3c\x50\xf2\xd0" + "\x9d\x05\xc3\x67\x4c\x09\x43\xea\x4a\xf5\x4c\x38\x10\x89\xf9\xb8\x46" + "\xdd\x69\xce\x90\x8e\x0f\x6e\xaa\xaf", + "\xd3\x77\xe4\xef\xc3\x9f\x25\xca\x75\x14\x52\xe7\x9d\xcb\x56\x61\xf8" + "\xad\xcc\x06\x57\x0b\xd3\xf7\x10\xe0\x38\x54\xe0\x32\x28\x6c\xa4\x77" + "\xe6\xa6\x20\x64\x79\x58\xfd\x31\x70\x64\x63\xb5\x42\xdd\xf6\x17\x75" + "\x78\x75\xf3\x49\xc6\x11\x09\x35\x8d\x04\xf6\xdc\x58", + 1, 1024, NULL, 0 }, + { 512, 512, 27, + "\x71\xaa\xdb\xf3\x30\xea\x13\x3b\x46\xc9\x39\xd1\x2e\x60\x38\x96\x90" + "\x2e\x8d\xf6\x38\x59\x7c\x98\x87\x2d\xfb\x5a\xec\xd5\x16\x1b\xc8\x40" + "\x95\x22\x1d\xe3\x22\x23\x67\x01\x2f\x45\xc6\xd7\x07\x01\xe8\x62\xab" + "\x00\x0e\x78\x2e\x91\xb5\x05\xb2\x1b\x4e\x21\x2c\x38", + "\xe6\xd7\xb0\x28\x0d\x2f\x7d\xf8\x3f\xd2\x65\x62\xfc\xde\xa2\x59\x7c" + "\xf6\x87\xa9\xc9\xfa\x19\x4f\x65\x5c\x44\xd3\x27\x1b\x88\x1f\x28\xad" + "\xc4\x36\xdb\x8e\x04\x37\xff\x4d\xc5\xd3\x83\x56\x27\x1c\x33\x88\x29" + "\xc3\xe2\xd9\xba\x4a\xc1\x77\x7c\x94\x88\x69\x83\xd4\xb7\x2c\x27\x5b" + "\xc0\x0e\x4f\x7b\x06\xc5\xce\x38\xa2\xfe\x54\x9f\xe5\x37\x61\x85\x7f" + "\x23\x6d\xa7\x05\xfd\x03\x79\x0b\x41\xcc\x6f\x75\x9f\x41\xaa\x20\x6f" + "\xec\xa7\xba\x54\x86\xf4\xfc\x9d\x09\xf3\x5c\x8e\x08\x87\x24\x12\x91" + "\x88\x20\x10\x41\x4a\xe4\x1b\x8b\x38\x4a\x71\x5a\x40\x9b\xe1\x3d\xa1" + "\x7b\xfd\x60\xd3\xfb\xd4\xb8\xcb\x3c\xc7\xc2\x60\x43\x80\x72\x64\xa2" + "\x0b\x9a\x5c\x02\x72\x5e\x74\x2f\xff\x03\xe1\x80\x6b\x38\xaf\x35\x7e" + "\xbf\x8c\x79\xfc\x4c\x38\xb0\x07\xbf\x06\x13\x28\x6c\xf0\x63\xe4\x54" + "\x82\x37\x54\x75\xe6\xc4\x26\xd4\xf7\x00\x57\xcd\x92\xef\xcb\x2d\xfe" + "\x86\xe4\x5b\xde\xa3\x99\x27\x3a\x5e\x0f\x14\x22\x21\xfa\xe2\x06\x80" + "\x05\x55\xc0\x1b\x18\x53\x32\x95\xf5\x77\xe2\x3a\x9a\x7a\x0a\xa0\x72" + "\x82\x30\x02\xb9\x09\x65\x01\x17\x4d\x3b\xc4\xaa\xc3\x3e\x0d\xc6" + "\x00", + "\x0c\x1c\xbb\x2f\x19\x6d\x3d\x1a\xf5\xf9\x82\xa3\x30\xbf\x1d\x9a\xcc" + "\xaa\xda\x72\xcf\x6c\x25\x46\x58\xcb\x32\xbf\xd8\x70\x54\x81\xab\xd2" + "\xe1\x63\xa7\x33\x38\x70\x0f\x0d\x96\x1c\xa0\x2a\x31\xb6\x00\xdf\x04" + "\xfa\xf3\x11\xcd\x06\x49\x85\x57\x83\x11\x02\xf8\x0f", + 1, 2040, NULL, 0 }, + { 512, 512, 28, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd3\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 29, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbe\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 30, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd0\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 31, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbd\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 32, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x52\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 33, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x3f\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 34, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9a\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 35, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x31\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 36, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\xbf\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 37, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\x3f\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 38, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x86\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 39, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb4\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 40, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x85\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 41, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb7\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 42, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x06\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 43, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x8e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 44, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf2\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 45, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x22\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 46, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\x73\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 47, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\xa3\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 48, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x61\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 49, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xd1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 50, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x08\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 51, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x67\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 52, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc6\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 53, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x49\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 54, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc5\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 55, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x4a\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 56, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\x47\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 57, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\xc8\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 58, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8d", + 0, 0, NULL, 0 }, + { 512, 512, 59, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x53", + 0, 128, NULL, 0 }, + { 512, 512, 60, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8e", + 0, 0, NULL, 0 }, + { 512, 512, 61, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x50", + 0, 128, NULL, 0 }, + { 512, 512, 62, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\xcc", + 0, 0, NULL, 0 }, + { 512, 512, 63, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x12", + 0, 128, NULL, 0 }, + { 512, 512, 64, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x0c", + 0, 0, NULL, 0 }, + { 512, 512, 65, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\xd2", + 0, 128, NULL, 0 }, + { 512, 512, 66, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd3\x9b\x9e\x3f\x87\x80\x96\x86\xf2\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 67, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbe\x30\x1c\xbf\xb5\x66\x72\x0e\x22\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 68, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\xbf\x87\x80\x96\x06\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 69, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\x3f\xb5\x66\x72\x8e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 70, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x06\xf3\x41\x09\xfb\xc7\x18\xd6\x2b\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62\x12\x2e" + "\x58\x27\x2a\x31\x67\x97\x20\xb2\x54\xcb\xd6\x3a\x7c\x6d\x69\x6b\xf9" + "\x28\x3f\x9c\x68\x97\xe7\xd7\x92\x48\x3b\xb0\x38\x8c", + 0, 0, NULL, 0 }, + { 512, 512, 71, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x8e\x23\xf1\x66\xe2\x48\x65\xc3\x16\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3\xe7\xe2" + "\xdc\xa7\xb0\x11\xbf\x4c\xec\x4c\x7e\x7d\x6c\xc4\x1b\xc1\x0c\x3b\xe3" + "\x6e\x83\x20\xc5\x0a\xaf\x6c\x35\xf0\x4a\xc8\xca\x52", + 0, 128, NULL, 0 }, + { 512, 512, 72, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x2d\x64\x61\xc0\x78\x7f\x69\x79\x0c\xbe\xf6\x04\x38\xe7\x29\x54\x44" + "\xf6\x3d\x87\x30\xfa\x5d\xf9\x52\x0d\xeb\x9c\x1e\xe8\xfc\x9d\xed\xd1" + "\xa7\xd8\xd5\xce\x98\x68\xdf\x4d\xab\x34\x29\xc5\x83\x92\x96\x94\x06" + "\xd7\xc0\x63\x97\x68\x18\x28\x6d\xb7\xc4\x4f\xc7\x73", + 0, 0, NULL, 0 }, + { 512, 512, 73, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\xcf\xe3\x40\x4a\x99\x8d\xf1\xdc\x0e\x99\x1d\xb7\x9a\x3c\x69\x0d" + "\xe9\xe6\x38\x3e\xaf\xcc\x33\x91\x71\x44\x33\x73\xa3\xa4\x5c\x18\x1d" + "\x23\x58\x4f\xee\x40\xb3\x13\xb3\x81\x82\x93\x3b\xe4\x3e\xf3\xc4\x1c" + "\x91\x7c\xdf\x3a\xf5\x50\x93\xca\x0f\xb5\x37\x35\xad", + 0, 128, NULL, 0 }, + { 512, 512, 74, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 0, NULL, 0 }, + { 512, 512, 75, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 128, NULL, 0 }, + { 512, 512, 76, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 0, NULL, 0 }, + { 512, 512, 77, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 128, NULL, 0 }, + { 512, 512, 78, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x52\x1b\x1e\xbf\x07\x00\x16\x06\x73\xc1\x89\x7b\x47\x98\x56\x2b\x3b" + "\x89\x42\xf8\x4f\x85\x22\x86\x2d\x72\x94\xe3\x61\x97\x83\xe2\x92\xae" + "\xd8\xa7\xaa\xb1\xe7\x17\xa0\x32\xd4\x4b\x56\xba\xfc\xed\xe9\xeb\x79" + "\xa8\xbf\x1c\xe8\x17\x67\x57\x12\xc8\xbb\x30\xb8\x0c", + 0, 0, NULL, 0 }, + { 512, 512, 79, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x3f\xb0\x9c\x3f\x35\xe6\xf2\x8e\xa3\x71\xe6\x62\xc8\xe5\x43\x16\x72" + "\x96\x99\x47\x41\xd0\xb3\x4c\xee\x0e\x3b\x4c\x0c\xdc\xdb\x23\x67\x62" + "\x5c\x27\x30\x91\x3f\xcc\x6c\xcc\xfe\xfd\xec\x44\x9b\x41\x8c\xbb\x63" + "\xee\x03\xa0\x45\x8a\x2f\xec\xb5\x70\xca\x48\x4a\xd2", + 0, 128, NULL, 0 }, + { 512, 512, 80, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd3\x9a\x9f\x3e\x86\x81\x97\x87\xf2\x40\x08\xfa\xc6\x19\xd7\xaa\xba" + "\x08\xc3\x79\xce\x04\xa3\x07\xac\xf3\x15\x62\xe0\x16\x02\x63\x13\x2f" + "\x59\x26\x2b\x30\x66\x96\x21\xb3\x55\xca\xd7\x3b\x7d\x6c\x68\x6a\xf8" + "\x29\x3e\x9d\x69\x96\xe6\xd6\x93\x49\x3a\xb1\x39\x8d", + 0, 0, NULL, 0 }, + { 512, 512, 81, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbe\x31\x1d\xbe\xb4\x67\x73\x0f\x22\xf0\x67\xe3\x49\x64\xc2\x97\xf3" + "\x17\x18\xc6\xc0\x51\x32\xcd\x6f\x8f\xba\xcd\x8d\x5d\x5a\xa2\xe6\xe3" + "\xdd\xa6\xb1\x10\xbe\x4d\xed\x4d\x7f\x7c\x6d\xc5\x1a\xc0\x0d\x3a\xe2" + "\x6f\x82\x21\xc4\x0b\xae\x6d\x34\xf1\x4b\xc9\xcb\x53", + 0, 128, NULL, 0 }, + { 512, 256, 82, + "\xee\xf6\xbc\xf1\x6e\xf7\xae\x17\x32\x6a\x33\xf2\x2d\x14\x06\xec\x1b" + "\xd3\xf8\x66\x50\x5f\x4b\x2e\x4f\xe8\xb4\x5b\xd6\x2c\xcb\xd8\x50\x32" + "\xa9\x89\x9f\xac\xf2\xdb\x0c\x93\xa2\x34\x5c\xb8\x89\x2a\xfb\x74\xdb" + "\x54\x97\x81\x21\x1d\xd8\x88\x1a\x8c\x8e\x25\xc1\x71", + "", + "\x75\xf6\x97\x5e\x35\x00\xbe\x4f\xbf\xee\x1b\xc9\x56\x44\x74\x5c\xe9" + "\xf8\xd4\x7b\x6f\x38\x18\xa4\x8f\xf3\x4e\x8c\x2b\x18\x6b\xa6", + 1, 0, NULL, 0 }, + { 512, 256, 83, + "\x83\x86\x96\xe6\x19\x0c\x87\x4c\x37\x17\xb8\xbe\x0c\xf0\x63\xca\x6d" + "\x60\x76\x09\x87\xd1\xa3\x37\x03\xe7\xe3\x5e\xb1\x73\xe5\xae\x95\x4e" + "\x74\x1a\x37\x93\x51\x39\xd6\x12\x14\x9e\x76\xf6\xab\x2a\x37\x06\x04" + "\xf5\xb4\xa6\x8b\xee\x87\xe3\x09\x24\x0a\x9b\xa3\xd6", + "\xd8", + "\x5b\xcf\x44\x53\x9d\x87\x83\xbb\x70\x8e\x7f\x5d\xaf\xc4\xd6\x83\xbc" + "\xaa\x0d\x24\x0c\x90\x26\x75\xbd\xde\x05\x9f\x94\x4d\xac\xde", + 1, 8, NULL, 0 }, + { 512, 256, 84, + "\xfa\x99\x9e\xd1\xcf\xbc\x5c\x43\xaf\xb1\x6f\x22\xd0\x24\xe3\xce\x64" + "\x5e\x00\xb0\x67\x12\xc9\x3b\x94\x61\x67\xc9\xc2\xc0\x37\xd1\x92\xf0" + "\xf3\x00\x3f\x87\xc4\x3a\x71\x16\x6f\xe1\xa3\xc5\x82\x4c\x34\x86\x73" + "\xa2\xf0\xf3\xc4\x75\x70\x69\x85\x94\x0f\x6b\x02\xa2", + "\xca\xd2", + "\x9a\x93\xf4\x72\x8a\xa5\x94\x1d\xa1\x60\xec\x70\x7f\x14\xb7\xe9\xee" + "\x1e\x76\x8c\x7f\x62\x72\x69\x54\x34\x30\xd2\xfc\x68\x1e\x90", + 1, 16, NULL, 0 }, + { 512, 256, 85, + "\xb5\x3f\xce\xd8\xb7\xb4\xaa\x59\xd3\xb5\x6d\x91\xe1\x38\x67\x63\xc3" + "\x9e\x35\x1c\x2f\x5a\xd6\xa4\x88\x5e\x44\x2a\xd8\x94\xd5\x18\x1c\x5b" + "\xfe\x5c\x05\x28\x0a\x84\xad\x19\xd7\x58\xe3\x59\xbf\x81\x71\xfe\x65" + "\x29\x88\xfc\xf9\xd1\x45\x8e\xa1\x73\x64\xca\x8f\xa9", + "\xd3\x39\x3c", + "\x02\xc4\x96\x8e\x86\xd1\xc6\x28\x37\xa4\x16\x50\xd3\x19\x9c\xa6\xb3" + "\xc5\x9b\x82\x27\xf5\x5e\x0b\xa4\x0e\x5e\x39\x04\xab\x51\x2a", + 1, 24, NULL, 0 }, + { 512, 256, 86, + "\x3f\x0c\xbe\xeb\xe3\x91\xc6\x49\x1e\x77\xc5\x7a\x05\xe8\x5a\x16\xf0" + "\xb5\x29\x4d\x19\xf9\xa7\xf3\x39\x0b\xaf\x7a\x20\x51\xfb\xf9\x80\xe0" + "\x41\xee\x45\xc9\x10\x4a\x91\x26\xa6\xa7\xec\x18\x2e\xae\xc2\x7a\x99" + "\xc1\xa7\xa3\xf5\xa1\xe8\xcd\x8f\xfd\xe6\x06\x41\xdc", + "\xbe\xbb\x03\x92", + "\x8d\x68\xdd\x7d\x67\x76\x3e\x8e\xaf\xee\x00\x29\xd0\x1e\x96\xe6\xa0" + "\x9c\x4b\xa0\x9e\x57\xa2\xe5\xbd\xe3\xba\xcc\xa2\x13\xc6\x95", + 1, 32, NULL, 0 }, + { 512, 256, 87, + "\x4a\x7f\x11\x0b\x92\x24\x1b\xad\xc9\x07\xba\x3c\x61\x47\x7b\xab\x09" + "\x53\xa8\x51\xbf\x32\x74\x25\xe8\x58\xfc\x72\x4b\xd3\x3e\xde\x2a\x4a" + "\x50\x18\xfe\x71\xaa\xb4\x34\xbd\xa8\xeb\x24\x64\xa4\x15\x77\xc8\xd5" + "\x70\x53\x0c\x46\x0f\x7c\x8b\xc0\x17\x2f\x1e\xe0\xf1", + "\x40\xa3\x33\xf4\xcf", + "\x5f\xed\xa3\x36\x6c\xe9\xf5\xac\x34\x02\xc9\x77\xa4\x06\x2d\x33\xc0" + "\x9e\x2c\x9a\x3d\x0c\x2d\xcd\xa0\xc8\xd6\x7b\x4b\xbd\x1a\x37", + 1, 40, NULL, 0 }, + { 512, 256, 88, + "\xc6\xc9\x5b\xf9\xfa\xca\xb2\x95\xce\xbf\xa4\xb9\x07\x85\x58\x74\xf5" + "\x7a\x5c\x15\x48\x57\x6a\xd8\xba\xe6\x94\xa3\x64\xf5\xe5\x8d\xbb\x8c" + "\x8d\xd4\x93\x30\xb2\xfd\xd1\xb7\x16\x57\xd2\x11\xdb\x2a\x65\x69\xa9" + "\xf3\xa3\x56\xc0\xc5\xb3\xc4\xef\xbd\x5b\x67\x77\xcb", + "\x4c\xf9\x26\xaf\x47\x5a", + "\x0a\x74\x8a\xa3\x37\x62\xd3\x74\xaa\x04\xb6\x17\xb5\x8d\x12\x9a\xd1" + "\xaa\xf2\x52\xa4\x04\x63\xfd\x62\xce\x92\x4e\x21\xe6\xd5\x2c", + 1, 48, NULL, 0 }, + { 512, 256, 89, + "\x0d\x33\x87\xfe\x59\xe8\xe7\xc4\x3c\x92\x8d\xac\x79\x13\x82\x6d\xec" + "\x4d\x63\xac\x3c\x8e\xe3\x46\xba\x7c\xae\xd9\x50\x5b\x9e\x63\xff\x89" + "\x42\xcd\xe9\x09\x97\xe8\xdb\xea\xf6\xe1\x7e\xe1\x87\xc0\xa8\x4a\x18" + "\x53\x95\x2d\x86\x6c\x15\xf9\xa3\x2f\xce\xe6\xa8\x2a", + "\x2b\xa3\xbc\x3c\xd6\x4b\xc6", + "\xcc\x7e\x97\x63\x08\x84\xbd\x8a\xd5\x6f\x6d\x96\xfd\x34\x69\x0e\xab" + "\x6c\x8a\xd5\x55\x6b\x51\x9d\xb3\xbc\x3c\x60\x83\xc8\x2d\x3c", + 1, 56, NULL, 0 }, + { 512, 256, 90, + "\x3c\xb9\xce\x56\x53\x88\xa6\xf0\x45\x4a\x80\xad\xd8\x6c\x7e\x10\x7e" + "\xa5\x37\xd7\xf4\x68\xa0\x64\x89\x30\xfc\x37\x17\x2c\xf7\xb4\xca\x90" + "\x58\x03\x30\x71\xc3\x54\xa2\x0a\x60\x8e\x2d\x46\xe9\x8a\xfe\x46\x43" + "\x5a\x34\x43\x62\x98\x9c\xba\xaf\xac\x18\x85\x9b\xba", + "\x33\xce\x49\x8e\x1f\x94\xf4\x12", + "\x76\x33\x5e\xe2\x3e\xe1\xdc\x25\x88\x12\xbe\x37\x3b\xb5\xf9\x59\x18" + "\xba\xbe\xd0\xb6\x9e\x56\x5e\x00\xee\x3a\xf7\x76\xc5\xa5\xfc", + 1, 64, NULL, 0 }, + { 512, 256, 91, + "\xc2\x5f\x45\xce\xb2\xa5\x59\x7f\x36\x14\x45\xfa\x41\xa9\x01\x9f\x41" + "\xa6\xe6\xd7\xf1\x44\x20\x3f\x29\xc0\xb9\xfc\xea\x36\x2d\x60\x89\x4c" + "\x3c\xad\xc1\xce\x25\xd5\x3d\xa3\x62\xe4\x64\xc1\x1f\xc6\xe1\x69\xe3" + "\xdb\x2e\xa1\xcf\x40\xfe\x08\xff\xfb\x42\x9b\x1a\x5b", + "\x81\x97\x8a\xf4\x79\x5c\x50\xf8\x9c", + "\xbf\x73\x21\x85\x44\xd8\x45\x8e\x6a\xd0\x07\x27\xb2\x36\xf8\x33\xd2" + "\x81\x72\x3d\x7d\xca\xe4\xd1\x01\x9b\x70\xb9\xd6\xe8\xbc\x4d", + 1, 72, NULL, 0 }, + { 512, 256, 92, + "\xb4\x58\x76\x4e\xe2\x73\xf3\x91\xcb\x71\x8f\x64\xa1\xbc\xa6\x4c\x96" + "\xa8\x70\xd9\x42\x6d\x62\x54\xee\x37\xe5\xc9\x38\x98\xd6\xa5\xef\x68" + "\xe9\xd3\xb0\xe0\x57\xa3\xc3\x96\xfa\xa8\x34\xa2\x99\x26\xa9\x68\x0c" + "\xfa\x90\x3d\x2a\x60\x5b\x85\x40\x7b\xb2\x4c\x8c\xee", + "\xd8\x04\xdc\x2a\x1e\x14\x6f\x62\xb6\x21", + "\x5b\x25\x84\x34\x16\x46\x7b\x9e\x0a\x24\xcf\xab\x67\xd8\xfc\x27\xe0" + "\x62\x3f\xf9\xe0\x1b\x22\x04\xb5\xaf\xdf\x3e\x9c\xc0\x5d\x1b", + 1, 80, NULL, 0 }, + { 512, 256, 93, + "\x7c\xbc\x57\x78\xf7\x05\x95\xfc\x21\x1c\xee\x3a\x93\xe1\x7f\xf7\xf2" + "\x5a\x1d\x9f\x37\x66\xf8\xeb\x70\xeb\x1e\x08\xc9\x42\x0a\x62\xbd\x89" + "\xe8\xb7\xd8\x34\xcc\x85\x4d\x05\x9a\xfd\x41\x3e\x4d\x9c\x06\x2a\x53" + "\x2e\x01\x59\x28\x52\x8c\x7f\x58\x12\xdf\xeb\xec\xac", + "\x40\x17\xe0\xff\xca\xac\x4c\x48\x5c\xe7\xec", + "\x66\xb4\x19\x1b\x37\xa6\xf7\x88\x09\xc4\x34\x73\x6e\xd6\xac\x22\x73" + "\xc0\x4a\x11\x21\x96\x36\xe9\x26\x71\xea\x05\xbf\x6d\xc2\x99", + 1, 88, NULL, 0 }, + { 512, 256, 94, + "\x93\x7b\x97\x11\xd6\x70\xda\xa2\x35\x99\x20\xe4\x7d\xd6\xd0\xfb\x75" + "\x27\x51\x05\xb3\xef\x07\xbb\x4a\x31\xd3\xc4\xb9\x9b\xaa\x8f\xf7\x9e" + "\xe4\xb4\xa1\xb4\xa5\xb2\x50\xd0\xfd\x7b\x47\x21\xe0\x4a\x7b\x06\x03" + "\x5b\x1d\x0c\x9d\x73\x95\x97\x70\x78\x39\x01\x8b\xce", + "\x69\x91\x81\x0e\x97\x88\xaf\x7a\xab\xff\x8e\xb4", + "\x23\x52\x2d\xe8\x0f\xf6\xa3\xa6\xd8\xfc\x1b\xf9\xb6\x32\xe1\x60\x0d" + "\xf5\x3c\x59\xf3\x85\x89\xa6\xf2\xae\x9b\x95\xd9\x40\xa3\x40", + 1, 96, NULL, 0 }, + { 512, 256, 95, + "\xad\x37\x63\x0a\x28\x0b\x1a\x75\xeb\xed\x19\x84\x21\x71\x50\xa4\x00" + "\xa5\x5d\xca\x23\x62\xa8\xea\xf3\xc9\x07\x85\x8d\x0b\x45\xdb\x3e\x20" + "\x8c\x31\x6d\x03\x38\x54\xeb\x42\x84\xf7\x11\x17\xd3\x38\x76\xe0\xe2" + "\x03\xca\x92\x2d\x26\xf9\xb7\x6d\x94\xca\xb0\xd4\xf3", + "\x7c\x6f\x2f\xd8\x3e\x56\x91\x82\x7b\xe3\x8e\x49\xa0", + "\xb6\x09\xc0\xb5\xd3\x59\x06\x1a\xc0\x66\xbd\x3b\xcc\xab\xc9\x84\x93" + "\xfd\x33\xba\xc8\xfe\x0e\x3f\x2e\x2b\x47\x58\xcb\x65\x78\xb0", + 1, 104, NULL, 0 }, + { 512, 256, 96, + "\x52\x6a\xa2\x64\x5a\xb7\x12\x29\xe2\xc6\x0f\x68\xbd\x5b\xbc\xf0\xce" + "\xa0\x97\x8a\x0a\x9c\x60\xcf\x69\x5e\x81\xfe\xca\xed\xec\xfa\x03\x62" + "\xc2\x17\x47\xea\xa9\x95\x97\x52\x08\xca\x35\xcf\xa6\xbc\x2b\x95\xd1" + "\xc2\xaf\xce\xe1\x12\x75\xf4\x3a\xdd\x6f\x02\x6d\x6c", + "\x68\x1b\xca\x55\x0f\xe3\x01\xf2\xdc\xc1\xe3\x8b\x53\xc1", + "\x1f\x1f\x50\xda\xcb\x3d\xc3\x5b\x90\x42\x9c\x0f\x9b\x31\xed\xd2\x39" + "\xa6\xaf\x4c\x09\xd5\x10\x95\xcb\x39\xff\x11\xc7\xc2\x65\x98", + 1, 112, NULL, 0 }, + { 512, 256, 97, + "\x60\x26\xa9\x04\x7a\x07\xcc\xf1\x97\xfd\xe0\x9c\x8b\x9f\x15\xf3\x4b" + "\xc5\x24\x72\xb7\xd1\xea\x46\x73\xb4\x91\x4a\x0e\x1c\x3a\xa4\xcf\xe8" + "\xd6\xef\x12\xd4\xd4\x01\x9f\x5f\xf3\xac\x0a\xdc\xad\x73\x58\x49\x08" + "\x74\x15\x5d\xaf\x41\xda\x2f\x74\xac\xa1\xcd\x89\x4a", + "\xce\x1d\x86\x9c\x53\x04\x1c\x5f\x6a\xcd\xda\x7a\x05\xaf\x15", + "\xbd\x2d\x67\x8c\xea\xdd\x71\x68\x0f\x99\x87\xc8\x8d\x24\xc4\x93\x35" + "\xcb\x98\x5a\xf0\xbc\xf8\xe2\x3f\xe8\x10\xe8\x3a\x92\x0f\x13", + 1, 120, NULL, 0 }, + { 512, 256, 98, + "\xa0\x3d\x2c\x54\x3c\x30\x29\x49\xc5\x1b\x66\x2f\x43\x11\x4c\x13\x05" + "\xa8\xf6\x96\x1a\xe4\x83\x42\x80\x3d\x36\x90\xdc\x18\x25\x5f\xab\x92" + "\x49\x65\x53\x6a\x79\xbc\x38\x56\x4c\x7c\x97\xcb\x8c\xc0\x20\x97\x86" + "\xe9\xf7\x63\x75\xbf\x18\x15\x29\xcf\x7f\x93\xd9\x54", + "\x69\x76\x17\xae\x31\xf1\x9b\x8a\x6a\xd4\xb8\x48\x9b\xfc\x3d\xb1", + "\x57\x7d\x62\xd7\x27\x9b\x39\xa0\xd7\x1e\x2f\x80\x83\x34\x25\xed\x43" + "\xe4\xa1\x62\x33\xee\xb5\xd2\x51\xf7\x66\xdb\x0b\xf7\xa5\x8c", + 1, 128, NULL, 0 }, + { 512, 256, 99, + "\x25\x84\x36\x65\xd3\x9c\x3a\xe9\xa7\xb3\xe4\x42\x7e\x2b\xf7\x78\x52" + "\x81\xfd\x25\x94\xbd\xde\x67\x86\x0c\xa9\xb8\xfa\x11\x64\x64\x69\xd1" + "\x64\x5a\xe8\xca\x38\x25\xb8\xc5\x51\xf9\xea\xc3\xda\x06\x60\xd8\xc2" + "\xe2\xe3\xbd\x23\xd3\x43\x95\xc6\x77\x5d\xcd\xfd\x2e", + "\x74\x08\x2c\xc5\xd1\x4d\xb1\x96\x74\x42\xd6\x6a\xac\x60\x92\xbd" + "\x23", + "\xb3\x73\xac\x5f\xb1\x98\x2b\x9d\x47\xd2\x88\x44\xe9\x69\xd5\x16\x80" + "\xdc\x81\xd2\x1d\x55\x6c\x26\x71\xc2\x9c\x11\xdf\xa6\xe3\x40", + 1, 136, NULL, 0 }, + { 512, 256, 100, + "\x48\x31\xab\x49\x62\xa2\xd2\x43\x60\x91\xcb\xbf\x38\x8d\x2b\xa0\x42" + "\xb4\x72\xa2\x62\xed\x83\x73\xc8\x5d\x04\x7c\x70\x2a\xdf\x73\xa8\x7e" + "\xb0\x97\xe7\x2d\x91\xd0\x89\xb7\xd1\x50\x4a\x7f\x7d\x8a\xbb\x3b\xc2" + "\xc4\x4c\x13\x40\xd6\xc1\x6c\x84\xea\x92\x69\xe6\x4b", + "\x28\x03\xc2\xf4\xe5\xb4\xbf\xcc\xd2\xb4\x07\x46\x9a\x6c\xb5\xef\x21" + "\xfd\x14\xa6\x82\x63\x63\x97", + "\x64\x1f\x29\x92\x5c\x06\xaa\x01\x08\x6c\x8b\xce\x89\xd9\x9e\x14\x56" + "\xdc\x2a\xd4\xb1\xd3\x36\x44\x42\x18\x7c\xe4\x39\x28\x38\xee", + 1, 192, NULL, 0 }, + { 512, 256, 101, + "\x44\x9b\xb5\x7d\x04\x65\x51\xe1\x81\x9b\x3c\x99\x41\x22\xc3\x60\x59" + "\x54\x31\x7d\x0d\x76\x21\x22\x84\xa3\x32\x8c\x22\x67\x32\xcb\xf4\xec" + "\xb4\x42\xa5\x82\xc8\x42\x38\x88\xaa\xed\x94\x6e\x5e\xec\x2b\xe6\x6e" + "\x12\x7f\x1e\x2e\x29\xb6\x6e\x68\xb9\xb4\xbe\xc4\xd1", + "\x79\x00\x4e\x64\x43\x89\xa1\x1b\x70\x9b\xc0\xa2\x3c\xb8\x59\x2f\x9f" + "\xc7\x96\x0b\xfa\x46\x13\x2c\xc1\xff\xb9\x74\x7d\xf3\x7d\xec", + "\xc0\x54\x66\x7d\x99\x2c\xc1\xe8\x4f\xa5\xb1\x3f\x64\x02\x12\x5b\x4b" + "\xb6\xfd\x29\x00\xdb\xcd\xaf\x8b\x86\x44\xc8\x2e\xda\xdc\x2b", + 1, 256, NULL, 0 }, + { 512, 256, 102, + "\xff\x97\xb7\x70\x20\x86\x1a\x0c\xe0\x0e\xff\x8d\xe9\xe7\x01\xae\xa8" + "\xc6\xde\x0f\xff\xd9\xb4\xc1\x34\x2a\x41\x6d\x35\x7f\xce\x35\xa7\x01" + "\x6c\x64\xcc\xd3\x8e\x2b\xdf\x67\x48\x02\x28\x1c\x22\x34\x15\x3d\xd8" + "\x3a\xac\xb9\x48\x96\x6d\xc8\x7e\xa4\x71\x8f\xff\x75", + "\xd7\xe2\xd2\x43\x7b\x70\x67\xa3\x0f\x04\x52\x90\x41\x96\x0e\x04\x1e" + "\x28\x17\x37\xd1\xe0\xda\xa8\xcd\x50\xcc\x0b\x26\x47\x16\xe1\x17\xaa" + "\x2f\xe9\xa7\xe3\x9c\x2f\x17\x8c\x60\x7f\xaa\x50\xc2", + "\x06\x2e\x9c\x46\x09\xa3\xb1\xd5\xdf\x27\x7a\x33\xac\x1c\x75\x01\xdf" + "\x81\xec\x1d\xdf\x46\x0b\x08\x50\xa2\xaa\x33\x2d\x07\xbf\xcb", + 1, 376, NULL, 0 }, + { 512, 256, 103, + "\xf4\xda\xf3\x70\xc4\xf4\x37\xd8\xa8\x53\x91\xec\x45\x5e\x34\x54\x0b" + "\xe8\xd3\x2d\xf8\xdf\xac\x05\xb1\x66\xae\x72\xec\x1c\xc6\x08\xf8\xc1" + "\x77\xb3\x0d\xde\x8b\xf0\x7c\x9d\x43\x47\x32\xc2\x6a\x6e\x53\x0b\x18" + "\x2c\x7a\xb2\x10\x93\xeb\x79\xd0\xbb\x5b\xe8\x5e\x53", + "\xf6\x04\x18\xf6\xc5\x4a\x25\xfa\xbf\x51\x82\x73\x08\x86\x19\xc0\x74" + "\x1c\x1c\x71\x87\xde\x93\xa0\xcf\x6a\x03\xf4\x56\x5f\x17\x65\xde\x65" + "\x67\x54\x54\x1b\x86\x01\x37\xf3\xf8\x45\x5d\xe7\xc4\x03", + "\x4d\x4a\x56\x8d\xd2\x6d\x27\x26\x7d\x07\x72\x54\x0e\xbb\x0b\x94\xd4" + "\x4f\xbb\xb1\x5a\x1b\xb7\x49\xa5\x70\xed\x63\x13\xee\x69\x5b", + 1, 384, NULL, 0 }, + { 512, 256, 104, + "\x3b\xe9\x5e\x87\x94\x21\xed\x78\x56\x26\x9e\xef\x39\xa2\x07\x0f\xae" + "\x40\x6c\xec\x5e\x30\xb5\x0d\x92\x79\x2d\xf5\xa3\x7d\xe9\x85\x95\x68" + "\x4e\xaa\x92\x05\x58\x7c\xa6\x07\xee\xac\x8f\x96\x59\x2f\x45\x8f\x63" + "\x43\x4b\x7d\xc8\x25\x96\xd3\xe4\xa1\xa1\x6c\x3d\x59", + "\xc6\x4f\x7c\x97\x0a\x44\x1c\x6c\x50\x38\x38\xa4\x91\x30\x8c\x78\x30" + "\x99\xea\xc5\x2b\xd3\x5a\x21\x79\x78\xa6\x4d\xce\xc8\x4d\x34\x18\x6a" + "\xb3\xb7\x4f\x20\x28\x5d\x6f\xea\x21\x65\xea\xb4\xda\x3d\x2c", + "\x4e\xa9\xe2\xf0\x88\x70\xc3\x0c\xb6\x8d\x41\x98\x16\xde\xb6\x81\xcf" + "\xd6\xba\x62\x90\x6e\x6b\x4a\x80\x88\xef\xcd\x9f\xa4\xed\xf9", + 1, 392, NULL, 0 }, + { 512, 256, 105, + "\xb5\xeb\x53\x58\x6b\x94\x85\x87\xdb\x3d\xd4\x6c\x43\xad\x65\x49\x8a" + "\x50\x79\x15\x75\x62\xe4\x07\x4c\x9c\x20\xd0\x97\xd0\xc9\x7f\x19\xdb" + "\x46\x61\xfb\x2d\xd1\x1b\x87\xa5\xcc\xba\x2c\x34\x56\x42\x61\x8f\x56" + "\x1d\x00\xbf\x87\xdf\xfc\x66\x76\x2e\x45\xe0\x15\x6f", + "\x9e\x8e\xbf\x96\x42\x99\x55\xd6\x0b\x92\x5a\x41\x11\x74\x5e\xc7\x02" + "\x8d\xe2\x4e\x69\x4a\x6d\x2e\xee\x1d\xbd\x5e\x82\x0a\xb9\xf0\x0b\xea" + "\xfd\xe0\x9f\x95\x09\x59\x33\xa0\x2f\x25\x12\x97\x28\x2b\x0c\xf6\x7c" + "\x51\x83\x97\x65\x58\x41\xf2\x30\xe1\xd9\xae\x5b\xa9\x31\x50\xd4\x37" + "\x5d\xc7\xc0\x73\x8b\x99\x85\x0b\x07\xd5\xa4\x42\x99\x4e\x68\xdc\x81" + "\x3d\x55\xed\xef\xa6\xcd\x06\x3c\xcb\x20\x27\x11\xd9\x7b\xa6\x74\xef" + "\xa0\x2d\xdb\xdc\x69\x23\x41\xe7\x7c\xfa", + "\x6a\x5b\x62\x72\xdf\x7f\xb6\x31\x52\x93\xce\x22\x48\x35\x11\xd7\x00" + "\xa8\x3c\x34\xdb\x6b\x0e\xe4\x39\x8b\x4a\x77\x02\x41\xfc\xf2", + 1, 896, NULL, 0 }, + { 512, 256, 106, + "\xfb\xc6\x78\x23\x5d\x59\x69\x80\x26\x87\x30\xa7\xaa\x8a\x3c\x25\x09" + "\x5f\x1d\xbc\xf7\xf8\x30\x99\x07\x16\xa5\x10\xc4\x4e\x62\xb1\xae\x8c" + "\xe0\x95\xc6\x5e\x18\x52\xbd\x8a\x09\xf2\x5a\xb9\x34\x15\xae\x73\x6f" + "\x22\xb2\xc6\x8e\xcd\xf3\xc0\xf7\x1e\x15\xa0\x57\xdc", + "\xef\x39\x57\x38\xf2\xb3\xbd\x7a\x06\x49\xee\xa7\x5e\x73\x4e\x5c\x79" + "\xba\xf2\x13\x58\xc7\xae\x96\xc6\x3a\x58\xe2\x60\x26\x6c\x7b\xfa\x86" + "\x96\x64\xc5\xd1\x0e\x87\xc2\x6d\x0f\x5e\xdb\x3b\x5f\x73\x90\x0c\x1d" + "\x9a\x96\xa5\xa2\xc2\x91\x25\x06\xc1\x9d\xff\x04\xe9\x00\xb8\xd5\xd6" + "\x3e\x1b\xb6\x06\xfe\x3d\x5a\x22\x9b\x64\x2b\x1a\xc5\x9e\x08\xa4\x68" + "\x7a\x7a\xad\xe3\xde\x16\xd1\x83\x13\x1d\xdd\x02\xda\x98\x8f\xf8\x26" + "\xe8\x8b\x74\xba\x0c\x5c\x41\xcf\xcd\x85\x70\xcc\xa5\x9f\xb3\x30\x4d" + "\x58\x8f\x7f\x7e\x63\xda\xd4\x7a", + "\x84\xdf\x33\x38\xf5\x02\xe6\x58\x66\xcf\x20\x77\xf6\x77\x63\x41\xa8" + "\x9d\xd2\x23\x49\x61\xb4\x74\xd5\xb7\x43\x8d\x18\x36\xfd\x6b", + 1, 1016, NULL, 0 }, + { 512, 256, 107, + "\xd8\x43\x7b\x09\x3e\x17\x0a\xfc\x30\xfb\x68\xd8\x73\xdb\x6d\xd6\x7e" + "\xe0\x37\x2b\x6c\x5f\xeb\x12\x4d\x54\x8a\xbb\xd9\x33\x04\xa0\x82\x23" + "\x5a\x17\x0a\x9d\x32\x62\x68\xd0\xcf\xc3\x42\x64\xd8\xeb\xcf\xcb\xcc" + "\x0d\xc0\xea\x77\x67\xb6\x50\xdb\xcf\xc7\x84\x8f\x91", + "\x0f\xb9\x1b\xb0\x43\xcf\x3d\x49\x34\x14\x82\xea\xcb\xab\x92\xda\x92" + "\x11\x7b\xb6\xec\x03\xb5\x18\xa9\x3c\x9d\x59\xe5\x47\x02\xc6\xa2\x1a" + "\xde\x4f\x25\x58\x70\xcb\x52\xda\x4a\x24\xb3\x68\x56\xb0\xcb\xf8\xb1" + "\x22\xd2\xfd\x5b\x02\x19\xae\xac\xb6\xe2\x92\xc9\x58\x63\x92\x17\x89" + "\x09\x2d\x5d\x65\xd5\xa9\x38\x92\x31\x67\x0e\x38\xbe\x31\xd8\xb7\x66" + "\x30\x65\x0c\x77\xed\xc2\x37\x73\xd5\xec\x91\x89\x91\x55\x60\xef\x6f" + "\x45\xa4\x27\x5f\x85\x95\x7d\x87\x62\x91\x6d\x81\x47\xef\x43\xea\x6f" + "\x43\x8d\x06\x62\x27\x35\x4d\xf2\x6c", + "\x4b\x9b\x51\xf2\xc7\xd7\x25\x26\x9a\x5b\xd7\x35\xd4\x25\x1c\xbf\xda" + "\x71\xea\x5c\xf5\xc0\x46\x37\x9b\x43\x9d\xdf\xf7\x66\xd2\x1a", + 1, 1024, NULL, 0 }, + { 512, 256, 108, + "\xd2\x92\x25\xe3\x04\x2b\x43\xd4\xeb\x7a\x93\x99\xf2\x24\x42\x4b\x5b" + "\x4d\xd9\x90\x31\xc8\xab\xf6\x09\xd3\xa6\xe3\x17\x58\x97\x13\x4c\xc7" + "\xba\x8a\x6b\xe2\x5d\x43\x6d\x41\xa7\x57\xa2\xda\xa4\xe1\xb0\x3f\x7c" + "\x30\x53\xee\x8c\xad\xa1\x95\x31\xd4\x8d\xab\x93\xe0", + "\x1f\xda\xd8\xcd\x37\xe1\x15\xef\x81\x31\xd4\x61\x9b\x5d\x61\xa9\x82" + "\x7a\x11\xb9\xc2\x5c\x5b\x82\x20\xb0\x02\xd5\xb7\xf6\xe2\xbc\x30\xa7" + "\xe2\xf3\xd1\x03\xdb\xca\xf8\x49\xa2\x55\xc2\x0e\xc5\xd1\xb4\x0a\x62" + "\x3e\x39\x8d\x76\xc5\xb7\xf0\x7f\xe3\x05\x18\x1c\xd5\xbe\xe2\x9d\xe3" + "\x36\x0f\xed\x55\xf6\x90\x63\xe6\xa8\xfd\x2e\xf3\x12\x91\xe4\xd7\xc3" + "\x90\x84\x49\x46\x6b\x4f\xf1\x16\x69\x59\xff\x1f\x2e\x86\xeb\x48\xa7" + "\xed\x11\xbd\xce\xc2\xfc\x2d\xfb\x66\x84\xb3\x6b\x8d\x0b\x68\xe0\xf1" + "\xb2\x3e\xf1\x1b\xf5\x90\x7d\x4a\x2b\x16\x2c\xec\x26\xf3\x1b\x6d\x5d" + "\x98\x92\xa2\x7b\x3f\xad\x7a\x55\x49\x85\x8d\xad\x53\x0b\xae\x19\x3d" + "\x9b\x60\xd4\x2d\x7a\xd2\xa6\x64\x76\xa3\xba\x4b\xf7\xa2\x7f\xf9\xd0" + "\xf8\x85\xa5\x40\xbf\x18\x1c\xaa\xd7\x1a\x23\x5e\xb3\x48\xa2\x3d\x05" + "\x3b\xa2\xdb\x7a\xed\x8d\x7d\x01\xd9\x6d\xc9\xf7\x80\xe2\xe4\x26\xc7" + "\x2b\xb6\x3f\xcb\x3f\xff\x44\xc1\x4b\xb7\xb0\xf8\xaf\x35\x52\xd6\x7e" + "\xe6\x76\x10\x92\xc7\x57\x62\x7d\x19\xc0\x80\x49\x9c\x24\x7d\x13\xb4" + "\x31\x69\x93\x97\x15\x9b\x1b\x71\xc6\x27\x45\x84\x95\x9d\x5c\x30" + "\xf0", + "\xf9\xad\x00\x42\x0c\xe9\x09\xcd\x05\x0f\xdb\x84\xdc\xc7\x0f\x00\xdf" + "\x97\x92\x89\x68\xd0\xac\xa5\x1d\xb2\x78\x4f\x03\x94\x89\x8d", + 1, 2040, NULL, 0 }, + { 512, 256, 109, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd3\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 110, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbe\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 111, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd0\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 112, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbd\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 113, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x52\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 114, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x3f\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 115, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9a\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 116, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x31\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 117, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\xbf\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 118, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\x3f\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 119, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x86\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 120, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb4\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 121, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x85\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 122, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb7\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 123, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x06\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 124, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x8e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 125, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf2\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 126, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x22\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 127, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\x73\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 128, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\xa3\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 129, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x61\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 130, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xd1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 131, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x08\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 132, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x67\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 133, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc6\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 134, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x49\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 135, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc5\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 136, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x4a\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 137, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\x47\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 138, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\xc8\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 139, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x63", + 0, 0, NULL, 0 }, + { 512, 256, 140, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa2", + 0, 128, NULL, 0 }, + { 512, 256, 141, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x60", + 0, 0, NULL, 0 }, + { 512, 256, 142, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa1", + 0, 128, NULL, 0 }, + { 512, 256, 143, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x22", + 0, 0, NULL, 0 }, + { 512, 256, 144, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xe3", + 0, 128, NULL, 0 }, + { 512, 256, 145, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x86\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\xe2", + 0, 0, NULL, 0 }, + { 512, 256, 146, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x0e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\x23", + 0, 128, NULL, 0 }, + { 512, 256, 147, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd3\x9b\x9e\x3f\x87\x80\x96\x86\xf2\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 148, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbe\x30\x1c\xbf\xb5\x66\x72\x0e\x22\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 149, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\xbf\x87\x80\x96\x06\xf3\x41\x09\xfb\xc7\x18\xd6\xab\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 150, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\x3f\xb5\x66\x72\x8e\x23\xf1\x66\xe2\x48\x65\xc3\x96\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 151, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd2\x9b\x9e\x3f\x87\x80\x96\x06\xf3\x41\x09\xfb\xc7\x18\xd6\x2b\xbb" + "\x09\xc2\x78\xcf\x05\xa2\x06\xad\xf2\x14\x63\xe1\x17\x03\x62", + 0, 0, NULL, 0 }, + { 512, 256, 152, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbf\x30\x1c\xbf\xb5\x66\x72\x8e\x23\xf1\x66\xe2\x48\x65\xc3\x16\xf2" + "\x16\x19\xc7\xc1\x50\x33\xcc\x6e\x8e\xbb\xcc\x8c\x5c\x5b\xa3", + 0, 128, NULL, 0 }, + { 512, 256, 153, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x2d\x64\x61\xc0\x78\x7f\x69\x79\x0c\xbe\xf6\x04\x38\xe7\x29\x54\x44" + "\xf6\x3d\x87\x30\xfa\x5d\xf9\x52\x0d\xeb\x9c\x1e\xe8\xfc\x9d", + 0, 0, NULL, 0 }, + { 512, 256, 154, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x40\xcf\xe3\x40\x4a\x99\x8d\xf1\xdc\x0e\x99\x1d\xb7\x9a\x3c\x69\x0d" + "\xe9\xe6\x38\x3e\xaf\xcc\x33\x91\x71\x44\x33\x73\xa3\xa4\x5c", + 0, 128, NULL, 0 }, + { 512, 256, 155, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 0, NULL, 0 }, + { 512, 256, 156, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 0, 128, NULL, 0 }, + { 512, 256, 157, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 0, NULL, 0 }, + { 512, 256, 158, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff" + "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff", + 0, 128, NULL, 0 }, + { 512, 256, 159, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\x52\x1b\x1e\xbf\x07\x00\x16\x06\x73\xc1\x89\x7b\x47\x98\x56\x2b\x3b" + "\x89\x42\xf8\x4f\x85\x22\x86\x2d\x72\x94\xe3\x61\x97\x83\xe2", + 0, 0, NULL, 0 }, + { 512, 256, 160, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\x3f\xb0\x9c\x3f\x35\xe6\xf2\x8e\xa3\x71\xe6\x62\xc8\xe5\x43\x16\x72" + "\x96\x99\x47\x41\xd0\xb3\x4c\xee\x0e\x3b\x4c\x0c\xdc\xdb\x23", + 0, 128, NULL, 0 }, + { 512, 256, 161, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "", + "\xd3\x9a\x9f\x3e\x86\x81\x97\x87\xf2\x40\x08\xfa\xc6\x19\xd7\xaa\xba" + "\x08\xc3\x79\xce\x04\xa3\x07\xac\xf3\x15\x62\xe0\x16\x02\x63", + 0, 0, NULL, 0 }, + { 512, 256, 162, + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f\x10" + "\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x20\x21" + "\x22\x23\x24\x25\x26\x27\x28\x29\x2a\x2b\x2c\x2d\x2e\x2f\x30\x31\x32" + "\x33\x34\x35\x36\x37\x38\x39\x3a\x3b\x3c\x3d\x3e\x3f", + "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f", + "\xbe\x31\x1d\xbe\xb4\x67\x73\x0f\x22\xf0\x67\xe3\x49\x64\xc2\x97\xf3" + "\x17\x18\xc6\xc0\x51\x32\xcd\x6f\x8f\xba\xcd\x8d\x5d\x5a\xa2", + 0, 128, NULL, 0 }, + { 256, 512, 163, + "\x14\xd9\x37\x59\xfc\x28\xf3\x31\x9a\xb7\x4b\x81\x67\xc9\x74\xe8\x00" + "\xf0\x32\x34\x4d\xc2\x74\x7e\xc0\xf4\x94\x50\x61\xa4\x78\x27", + "", + "\x68\x93\x4d\xbe\x94\x8d\x9a\x77\xa5\xe0\xa9\x2e\xd9\x82\x54\xfa\x3b" + "\x6c\x93\xc8\xbf\x5e\xea\xa9\x12\xb7\xdf\xdf\x76\x2b\x37\x19\x2c\x5d" + "\x85\x23\xbc\xab\x9a\xd7\x1b\x09\xbf\x96\xd8\x45\x41\x88\xd0\x01\xc7" + "\xf2\x07\x7e\xb6\x41\x19\x9f\x57\x31\xb9\xf9\x46\x69", + 1, 0, NULL, 0 }, + { 256, 512, 164, + "\x9f\xa3\x71\xf3\x6f\xb2\x73\xd5\x14\xfd\x62\x8c\xb9\x38\x06\x7a\x4b" + "\xae\x32\xa1\x9a\x1e\x04\x5a\x7d\x6d\x7f\x6d\xe3\x75\x1c\xbf", + "\x31\x1b\xbf\x72\x2d\x32\x2c\xd7\xa0\x71\x0f\x48\x0f\xc6\x65\x18", + "\x16\x34\x5f\x6a\x6c\xa6\xe7\x8d\x4c\xca\xc3\x0b\x48\xd7\x66\x91\xd6" + "\x44\x24\x20\xef\xa1\x13\xc1\x5e\xf1\x27\xb5\x38\xb5\xb0\x24\x01\x8b" + "\x7d\x2d\xb4\xbc\x3e\xd3\x42\x42\x51\xab\x6b\x8b\x6c\x3c\xb1\x08\xb0" + "\xbe\xda\x84\x2d\xc3\xe6\x8e\x63\x40\x02\x87\xe5\xcd", + 1, 128, NULL, 0 }, + { 256, 512, 165, + "\x63\x13\xf1\x52\x6b\xc2\x20\xf2\x0d\xde\x1e\x64\xce\xd8\x59\x72\x79" + "\x58\x6d\x1e\x15\xaa\xd0\x5a\xd5\x91\xd8\x41\xb3\x69\x28\x4f", + "\xf7\x44\xfa\x39\x33\xe1\x6d\x8b\xf5\x24\xaf\xae\xb3\x4c\x71\x56\x53" + "\xa9\xcf\xb0\x1f\xa4\x5f\xe1\xfb\x68\xe7\x01\xfe\x14\x87\xca", + "\xb8\x8d\x1b\xa0\x3e\x27\x99\x20\x0a\x44\x75\x50\xd1\x8e\x31\x06\x97" + "\xa5\x79\x74\xf5\x13\xdf\x77\xeb\x07\xbb\xe3\x15\xba\x5f\xef\x39\x7e" + "\xeb\x81\xad\x90\x71\x68\x0b\xcc\x6c\x70\xf6\xb2\x52\xad\xe3\x5b\x4a" + "\x40\x40\x27\x9e\xc0\x1b\x86\xe4\x0b\x98\x77\x0e\x39", + 1, 256, NULL, 0 }, + { 256, 256, 166, + "\x1e\x22\x5c\xaf\xb9\x03\x39\xbb\xa1\xb2\x40\x76\xd4\x20\x6c\x3e\x79" + "\xc3\x55\x80\x5d\x85\x16\x82\xbc\x81\x8b\xaa\x4f\x5a\x77\x79", + "", + "\x23\xd4\x82\xa0\x5c\x90\x7e\xeb\x34\x6b\xa9\x8f\x83\xdb\x0f\x63\xc2" + "\xad\xfb\xd5\xb2\x94\x0f\x33\xc7\x96\x4c\x7f\x17\x99\xf1\x80", + 1, 0, NULL, 0 }, + { 256, 256, 167, + "\x6f\xa3\x53\x86\x8c\x82\xe5\xde\xee\xda\xc7\xf0\x94\x71\xa6\x1b\xf7" + "\x49\xab\x54\x98\x23\x9e\x94\x7e\x01\x2e\xee\x3c\x82\xd7\xc4", + "\xae\xed\x3e\x4d\x4c\xb9\xbb\xb6\x0d\x48\x2e\x98\xc1\x26\xc0\xf5", + "\x1c\xf9\xd2\xc9\xc1\xb5\x5a\x45\x19\x0b\x5b\xeb\x59\x0c\xd4\xcc\x95" + "\xe3\x85\x3d\xf8\xaa\xf9\xf4\xfe\xf9\xbb\xbb\xd7\x24\x35\xff", + 1, 128, NULL, 0 }, + { 256, 256, 168, + "\x18\x6e\x24\x8a\xd8\x24\xe1\xeb\x93\x32\x9a\x7f\xdc\xd5\x65\xb6\xcb" + "\x4e\xaf\x3f\x85\xb9\x0b\x91\x07\x77\x12\x8d\x8c\x53\x8d\x27", + "\x92\xef\x9f\xf5\x2f\x46\xec\xcc\x7e\x38\xb9\xee\x19\xfd\x2d\xe3\xb3" + "\x77\x26\xc8\xe6\xce\x9e\x1b\x96\xdb\x5d\xda\x4c\x31\x79\x02", + "\xd1\x27\xb7\x38\x5b\xad\xf0\xc7\x6f\x2b\x3d\x8a\xa9\xc7\x22\x33\x35" + "\x92\xe0\x1f\x46\x2f\xed\xd3\x5e\xc6\x64\xa6\xf6\xd5\x2d\x74", + 1, 256, NULL, 0 }, + { 520, 512, 169, + "\xdd\x1e\x0b\xdb\xb6\xb6\x08\x62\x17\x64\x84\xf3\x66\x9d\xa5\x31\x45" + "\x5f\x1c\xd7\x14\xf9\x99\xc2\x9f\x08\xb8\x51\x05\x5f\xee\x8d\x72\x18" + "\x6d\x37\x6c\x23\x6f\x4e\x16\xcb\xa7\xa2\x5c\xba\x87\x9f\xb2\x75\x3d" + "\xec\xa4\x45\x9a\xae\xbc\x6f\x6d\xe6\x25\xd9\x9a\xf3\x30", + "", + "\x7e\x4f\x7d\x84\x4b\x3b\xa0\xe0\x25\xb6\x6d\xe7\xcc\x62\x27\xbc\x50" + "\xd4\xe1\x74\x93\x02\x51\xbf\xff\x3d\xf3\x6c\x39\x00\xb5\xb7\x6b\x00" + "\x09\x5a\x89\x6d\x0f\x96\x84\x2e\x37\xb6\x13\x4d\xf4\x07\x60\x30\x76" + "\x99\x53\x4d\x66\x70\xf1\x38\x97\x4e\xe1\xc5\x8d\x94", + 1, 0, NULL, 0 }, + { 520, 512, 170, + "\x43\x2b\x31\x1e\xbc\xfd\x46\xec\xfc\xd3\xcc\x70\x6e\xbd\x05\xc7\x87" + "\xdf\xbe\x18\x55\xfd\xcf\xce\x8d\x50\xc9\xa0\x0f\x72\xb6\x5a\x8d\x42" + "\xac\xec\x33\x5b\x4e\x07\xd5\x44\xc9\x2f\xd7\xb1\xd3\x85\x43\xac\x6e" + "\x0f\xc0\x4c\x26\xd8\x8d\xe8\xdd\x97\x4a\xf6\x9e\x24\xd7", + "\x36\xb1\xfb\xe8\xf1\x33\x5e\x7c\x03\x99\xc2\x47\x30\x90\x64\x20", + "\x2c\xfb\x68\x8f\x30\xb1\x05\x34\xda\x93\x77\xa4\xb3\xfb\xee\x1d\xec" + "\x16\x1c\xb2\x88\xac\x8b\x75\x87\x93\x83\x8b\x45\xab\x95\x39\x79\xda" + "\xdf\x27\x81\x7f\x47\x7c\x9e\xbf\x23\xcf\xdc\xba\xcb\x60\xb8\x10\x38" + "\xe0\x8b\xc4\xfc\x31\x80\xbd\x2a\x1e\xe8\x05\x97\x6a", + 1, 128, NULL, 0 }, + { 520, 512, 171, + "\x17\xf7\x20\xf0\x9d\xf5\x97\x2a\xf9\xb9\xc6\x3e\x10\x04\x32\x84\x60" + "\x89\x00\xd5\x0b\x79\x55\xdb\x3b\x4e\x26\x79\xcb\x41\x20\xbe\x2c\x9b" + "\x9e\x2a\xa1\xa5\x74\x3e\xb5\x19\x79\x28\x22\xc3\x26\xb4\xd8\x90\xb5" + "\x55\x4d\x1c\xb0\xeb\x71\x08\x1b\x75\x69\xa2\xf0\x4d\xf7", + "\x57\x16\x7c\x25\x24\xa5\x52\x89\x68\x7b\x83\xa4\x0d\x3a\x69\xbc\x90" + "\xad\xc5\x3a\xd2\x47\x02\x0b\x88\x89\x7f\x9b\x95\xd1\x51\x6d", + "\x4f\x70\x26\x7b\x98\xfc\xeb\x4f\x66\x29\x01\xbd\x18\xfb\x4c\x81\xac" + "\x16\x42\x81\xdd\x0e\xce\x43\x02\x8a\x3c\x2a\x65\xca\x21\x3a\xed\xf1" + "\xbd\x20\x7f\x09\x39\xbd\x87\x9b\xbe\x20\xfd\x09\xcd\xeb\x20\x24\x6e" + "\x65\x39\x76\x6a\xdd\x08\xb3\xad\xc5\x14\x3d\x2b\xd9", + 1, 256, NULL, 0 }, + { 520, 256, 172, + "\x8a\x0c\x46\xeb\x8a\x29\x59\xe3\x98\x65\x33\x00\x79\x76\x33\x41\xe7" + "\x43\x9d\xab\x14\x96\x94\xee\x57\xe0\xd6\x1e\xc7\x3d\x94\x7e\x1d\x53" + "\x01\xcd\x97\x4e\x18\xa5\xe0\xd1\xcf\x0d\x2c\x37\xe8\xaa\xdd\x9f\xd5" + "\x89\xd5\x7e\xf3\x2e\x47\x02\x4a\x99\xbc\x3f\x70\xc0\x77", + "", + "\xe1\x65\x7f\x44\xbf\x84\x89\x5e\x6d\xb0\x81\x0a\x2c\xca\x61\xa6\xe1" + "\x05\xe1\x2e\xc0\x06\xf0\xb5\x96\x10\x20\x30\x1b\x57\x74\x4e", + 1, 0, NULL, 0 }, + { 520, 256, 173, + "\x28\x77\xeb\xb8\x1f\x80\x33\x4f\xd0\x05\x16\x33\x74\x46\xc5\xcf\x5a" + "\xd4\xa3\xa2\xe1\x97\x26\x9e\x5b\x0a\xd1\x88\x9d\xfe\x2b\x4b\x0a\xaa" + "\x67\x6f\xac\x55\xb3\x6c\xe3\xaf\xfc\x7f\x10\x92\xab\x89\xc5\x32\x73" + "\xa8\x37\xbd\x5b\xc9\x4d\x1a\x9d\x9e\x5b\x02\xe9\x85\x6f", + "\xba\x44\x8d\xb8\x8f\x15\x4f\x77\x50\x28\xfd\xec\xf9\xe6\x75\x2d", + "\x33\xd5\xa2\xd1\x99\x8a\x58\x68\x49\xee\xbf\x81\x34\x72\x84\x85\xfc" + "\xfc\x71\x24\x8f\x4a\x98\xe6\x22\xf8\x3b\x96\x78\x44\xc4\x0e", + 1, 128, NULL, 0 }, + { 520, 256, 174, + "\x21\x17\x8e\x26\xbc\x28\xff\xc2\x7c\x06\xf7\x62\xba\x19\x0a\x62\x70" + "\x75\x85\x6d\x7c\xa6\xfe\xab\x79\xac\x63\x14\x9b\x17\x12\x6e\x34\xfd" + "\x9e\x55\x90\xe0\xe9\x0a\xac\x80\x1d\xf0\x95\x05\xd8\xaf\x2d\xd0\xa2" + "\x70\x3b\x35\x2c\x57\x3a\xc9\xd2\xcb\x06\x39\x27\xf2\xaf", + "\x7d\x5f\x1d\x6b\x99\x34\x52\xb1\xb5\x3a\x43\x75\x76\x0d\x10\xa2\x0d" + "\x46\xa0\xab\x9e\xc3\x94\x3f\xc4\xb0\x7a\x2c\xe7\x35\xe7\x31", + "\x88\xd5\x79\xc2\x80\x19\x05\xb8\x18\x07\x0c\xce\xbd\x2c\x71\x92\xf9" + "\x7b\xb3\xe7\xac\xdc\xaf\x61\x3c\xec\xc7\x4d\x0e\x41\x12\x32", + 1, 256, NULL, 0 }, + { 0, 0, 0, NULL, NULL, NULL, 0, 0, NULL, 0 } +}; diff --git a/test/wycheproof/mac_test.h b/test/wycheproof/mac_test.h new file mode 100644 index 0000000000000000000000000000000000000000..89b71f7e60f2dad1e6a8886794268c048919493e --- /dev/null +++ b/test/wycheproof/mac_test.h @@ -0,0 +1,47 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#ifndef MAC_TEST_H +#define MAC_TEST_H + +#include +#include + +struct mac_test { + size_t keySize; /* bits */ + size_t tagSize; /* bits */ + size_t tcId; + const char *key; + const char *msg; + const char *tag; + int resultValid; + size_t msgSize; /* bits */ + const char *iv; + size_t ivSize; /* bits */ +}; + +#endif /* MAC_TEST_H */ diff --git a/test/wycheproof/win_x64.mak b/test/wycheproof/win_x64.mak new file mode 100644 index 0000000000000000000000000000000000000000..12d3caacbecf389aeb1353846138d5949a37b22e --- /dev/null +++ b/test/wycheproof/win_x64.mak @@ -0,0 +1,98 @@ +# +# Copyright (c) 2022, Intel Corporation +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of Intel Corporation nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# + +APP = wycheproof +INSTNAME = intel-ipsec-mb + +!if !defined(PREFIX) +PREFIX = C:\Program Files +!endif + +!if exist("$(PREFIX)\$(INSTNAME)\libIPSec_MB.lib") +IPSECLIB = "$(PREFIX)\$(INSTNAME)\libIPSec_MB.lib" +INCDIR = -I"$(PREFIX)\$(INSTNAME)" +!else +!if !defined(LIB_DIR) +LIB_DIR = ..\..\lib +!endif +IPSECLIB = "$(LIB_DIR)\libIPSec_MB.lib" +INCDIR = -I$(LIB_DIR) -I.\ +!endif + +!if !defined(DEBUG_OPT) +DEBUG_OPT = /Od +!endif + +!ifdef DEBUG +DCFLAGS = $(DEBUG_OPT) /DDEBUG /Z7 +DLFLAGS = /debug +!else +DCFLAGS = /O2 /Oi +DLFLAGS = +!endif + +# compiler +CC = cl + +# _CRT_SECURE_NO_WARNINGS disables warning C4996 about insecure snprintf() being used +CFLAGS = /nologo /DNO_COMPAT_IMB_API_053 /D_CRT_SECURE_NO_WARNINGS $(DCFLAGS) /Y- /W3 /WX- /Gm- /fp:precise /EHsc $(EXTRA_CFLAGS) $(INCDIR) + +#linker +LNK = link +LFLAGS = /out:$(APP).exe $(DLFLAGS) + +# dependency +!ifndef DEPTOOL +DEPTOOL = ..\..\mkdep.bat +!endif +DEPFLAGS = $(INCDIR) + +OBJS = aes_gcm_test.json.obj aes_ccm_test.json.obj \ + chacha20_poly1305_test.json.obj \ + aes_cmac_test.json.obj gmac_test.json.obj \ + hmac_sha1_test.json.obj hmac_sha224_test.json.obj \ + hmac_sha256_test.json.obj hmac_sha384_test.json.obj \ + hmac_sha512_test.json.obj wycheproof.obj + +all: $(APP).exe + +$(APP).exe: $(OBJS) $(IPSECLIB) + $(LNK) $(LFLAGS) $(OBJS) $(IPSECLIB) + +tests.dep: $(OBJS) + @type *.obj.dep > $@ 2> nul + +.c.obj: + $(CC) /c $(CFLAGS) $< + $(DEPTOOL) $< $@ "$(DEPFLAGS)" > $@.dep + +clean: + del /q $(OBJS) tests.dep *.obj.dep $(APP).exe + +!if exist(tests.dep) +!include tests.dep +!endif diff --git a/test/wycheproof/wycheproof.c b/test/wycheproof/wycheproof.c new file mode 100644 index 0000000000000000000000000000000000000000..ebf1ad48699dfb48bce49b6cf0815173253f1a98 --- /dev/null +++ b/test/wycheproof/wycheproof.c @@ -0,0 +1,1480 @@ +/******************************************************************************* + Copyright (c) 2022, Intel Corporation + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + * Neither the name of Intel Corporation nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +*******************************************************************************/ + +#include +#include +#include +#include + +#include + +#include "mac_test.h" +#include "aead_test.h" + +static unsigned run_vectors = 0; +static unsigned skip_vectors = 0; + +static unsigned total_run_vectors = 0; +static unsigned total_skip_vectors = 0; + +static int process_job(IMB_MGR *p_mgr) +{ + IMB_JOB *job = IMB_SUBMIT_JOB(p_mgr); + + if (!job) { + const int err = imb_get_errno(p_mgr); + + /* check for error */ + if (err != 0) + return 0; + + /* flush to get the job processed */ + job = IMB_FLUSH_JOB(p_mgr); + + /* if flush returns nothing then it's an error */ + if (!job) + return 0; + } + + /* if returned job is not complete then it's an error */ + if (job->status != IMB_STATUS_COMPLETED) + return 0; + + return 1; +} + +static void +prep_iopad(const size_t scratch_size, void *scratch, + const size_t key_size, const void *key, + const int pattern) +{ + uint8_t *cb = (uint8_t *) scratch; + const uint8_t *kp = (const uint8_t *) key; + const size_t max_j = + (key_size > scratch_size) ? scratch_size : key_size; + size_t j; + + memset(scratch, pattern, scratch_size); + for (j = 0; j < max_j; j++) + cb[j] ^= kp[j]; +} + +#define PUTS_ONCE(_s) { \ + static int _ran_already = 0; \ + \ + if (!_ran_already) { \ + _ran_already = 1; \ + printf("\t@note %s\n", _s); \ + } \ + } + +/* + * ============================================================================= + * MAC TESTS + * ============================================================================= + */ +static void print_mac_test(const struct mac_test *v) +{ + if (v->iv != NULL) { + printf("MAC vector details:\n" + " tcId = %u\n" + " keySize = %u [bits]\n" + " tagSize = %u [bits]\n" + " msgSize = %u [bits]\n" + " ivSize = %u [bits]\n" + " resultValid = %d\n", + (unsigned)v->tcId, (unsigned)v->keySize, + (unsigned)v->tagSize, (unsigned)v->msgSize, + (unsigned)v->ivSize, (int)v->resultValid); + } else { + printf("MAC vector details:\n" + " tcId = %u\n" + " keySize = %u [bits]\n" + " tagSize = %u [bits]\n" + " msgSize = %u [bits]\n" + " resultValid = %d\n", + (unsigned)v->tcId, (unsigned)v->keySize, + (unsigned)v->tagSize, (unsigned)v->msgSize, + (int)v->resultValid); + } +} + +static int err_code = 0; + +static int +mac_submit_and_check(IMB_MGR *p_mgr, + const struct mac_test *v, + const void *res_tag, + const int job_api) +{ + if (job_api) { + /* submit job and get it processed */ + if (!process_job(p_mgr)) { + if (v->resultValid) { + print_mac_test(v); + printf("JOB-API submit/flush error!\n"); + printf("ERROR: %s\n", + imb_get_strerror(imb_get_errno(p_mgr))); + return 0; + } else { + /* error was expected */ + return 1; + } + } + } else { + if (err_code != 0) { + if (v->resultValid) { + print_mac_test(v); + printf("DIRECT-API error!\n"); + printf("ERROR: %s\n", + imb_get_strerror(err_code)); + return 0; + } else { + /* error was expected */ + err_code = 0; + return 1; + } + } + } + + const int tag_mismatch = memcmp(res_tag, v->tag, v->tagSize / 8); + + /* was mismatch expected? */ + if (v->resultValid == 0 && tag_mismatch) + return 1; + + /* check for TAG mismatch */ + if (tag_mismatch) { + printf("%s: TAG mismatch!\n", + job_api ? "JOB-API" : "DIRECT-API"); + print_mac_test(v); + return 0; + } + + return 1; +} + +static void errno_update(IMB_MGR *p_mgr) +{ + const int new_code = imb_get_errno(p_mgr); + + if (err_code == 0 && new_code != 0) + err_code = new_code; +} + +static void errno_reset(void) +{ + errno = 0; +} + +extern const struct mac_test aes_cmac_test_json[]; + +static int test_cmac(IMB_MGR *p_mgr) +{ + const struct mac_test *v = aes_cmac_test_json; + DECLARE_ALIGNED(uint32_t expkey[4*15], 16); + DECLARE_ALIGNED(uint32_t dust[4*15], 16); + uint32_t skey1[4], skey2[4]; + uint8_t scratch[IMB_SHA_512_BLOCK_SIZE]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + IMB_ASSERT((v->tagSize / 8) <= sizeof(scratch)); + + /* tag too long */ + if (v->tagSize > (sizeof(scratch) * 8)) { + print_mac_test(v); + return 0; + } + + if ((v->keySize / 8) == IMB_KEY_192_BYTES) { + /* unsupported - skip it*/ + PUTS_ONCE("AES-CMAC-192 not supported"); + skip_vectors++; + run_vectors--; + continue; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = scratch; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + if ((v->keySize / 8) == IMB_KEY_128_BYTES) { + job->hash_alg = IMB_AUTH_AES_CMAC; + IMB_AES_KEYEXP_128(p_mgr, v->key, expkey, dust); + IMB_AES_CMAC_SUBKEY_GEN_128(p_mgr, expkey, skey1, + skey2); + job->u.CMAC._key_expanded = expkey; + job->u.CMAC._skey1 = skey1; + job->u.CMAC._skey2 = skey2; + } else if ((v->keySize / 8) == IMB_KEY_256_BYTES) { + job->hash_alg = IMB_AUTH_AES_CMAC_256; + IMB_AES_KEYEXP_256(p_mgr, v->key, expkey, dust); + IMB_AES_CMAC_SUBKEY_GEN_256(p_mgr, expkey, skey1, + skey2); + job->u.CMAC._key_expanded = expkey; + job->u.CMAC._skey1 = skey1; + job->u.CMAC._skey2 = skey2; + } + + /* clear space where computed TAG is put into */ + memset(scratch, 0, sizeof(scratch)); + + if (!mac_submit_and_check(p_mgr, v, scratch, 1)) + return 0; + } + + return 1; +} + +extern const struct mac_test gmac_test_json[]; + +static int test_gmac(IMB_MGR *p_mgr) +{ + const struct mac_test *v = gmac_test_json; + struct gcm_key_data gmac_key; + uint8_t scratch[IMB_SHA_512_BLOCK_SIZE]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + IMB_ASSERT((v->tagSize / 8) <= sizeof(scratch)); + + /* tag too long */ + if (v->tagSize > (sizeof(scratch) * 8)) { + print_mac_test(v); + return 0; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = scratch; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + if ((v->keySize / 8) == IMB_KEY_128_BYTES) { + job->hash_alg = IMB_AUTH_AES_GMAC_128; + IMB_AES128_GCM_PRE(p_mgr, v->key, &gmac_key); + job->u.GMAC._key = &gmac_key; + job->u.GMAC._iv = (const void *) v->iv; + job->u.GMAC.iv_len_in_bytes = v->ivSize / 8; + } else if ((v->keySize / 8) == IMB_KEY_192_BYTES) { + job->hash_alg = IMB_AUTH_AES_GMAC_192; + IMB_AES192_GCM_PRE(p_mgr, v->key, &gmac_key); + job->u.GMAC._key = &gmac_key; + job->u.GMAC._iv = (const void *) v->iv; + job->u.GMAC.iv_len_in_bytes = v->ivSize / 8; + } else if ((v->keySize / 8) == IMB_KEY_256_BYTES) { + job->hash_alg = IMB_AUTH_AES_GMAC_256; + IMB_AES256_GCM_PRE(p_mgr, v->key, &gmac_key); + job->u.GMAC._key = &gmac_key; + job->u.GMAC._iv = (const void *) v->iv; + job->u.GMAC.iv_len_in_bytes = v->ivSize / 8; + } + /* clear space where computed TAG is put into */ + memset(scratch, 0, sizeof(scratch)); + + if (!mac_submit_and_check(p_mgr, v, scratch, 1)) + return 0; + + /* exercise direct API test if available */ + memset(scratch, 0, sizeof(scratch)); + errno_reset(); + + if ((v->keySize / 8) == IMB_KEY_128_BYTES) { + struct gcm_context_data ctx; + + IMB_AES128_GCM_PRE(p_mgr, v->key, &gmac_key); + errno_update(p_mgr); + IMB_AES128_GMAC_INIT(p_mgr, &gmac_key, &ctx, + (const void *) v->iv, + v->ivSize / 8); + errno_update(p_mgr); + IMB_AES128_GMAC_UPDATE(p_mgr, &gmac_key, &ctx, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES128_GMAC_FINALIZE(p_mgr, &gmac_key, &ctx, + scratch, v->tagSize / 8); + errno_update(p_mgr); + } + if ((v->keySize / 8) == IMB_KEY_192_BYTES) { + struct gcm_context_data ctx; + + IMB_AES192_GCM_PRE(p_mgr, v->key, &gmac_key); + errno_update(p_mgr); + IMB_AES192_GMAC_INIT(p_mgr, &gmac_key, &ctx, + (const void *) v->iv, + v->ivSize / 8); + errno_update(p_mgr); + IMB_AES192_GMAC_UPDATE(p_mgr, &gmac_key, &ctx, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES192_GMAC_FINALIZE(p_mgr, &gmac_key, &ctx, + scratch, v->tagSize / 8); + errno_update(p_mgr); + } + if ((v->keySize / 8) == IMB_KEY_256_BYTES) { + struct gcm_context_data ctx; + + IMB_AES256_GCM_PRE(p_mgr, v->key, &gmac_key); + errno_update(p_mgr); + IMB_AES256_GMAC_INIT(p_mgr, &gmac_key, &ctx, + (const void *) v->iv, + v->ivSize / 8); + errno_update(p_mgr); + IMB_AES256_GMAC_UPDATE(p_mgr, &gmac_key, &ctx, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES256_GMAC_FINALIZE(p_mgr, &gmac_key, &ctx, + scratch, v->tagSize / 8); + errno_update(p_mgr); + } + + if (!mac_submit_and_check(p_mgr, v, scratch, 0)) + return 0; + } + + return 1; +} + +extern const struct mac_test hmac_sha1_test_json[]; + +static int test_hmac_sha1(IMB_MGR *p_mgr) +{ + const struct mac_test *v = hmac_sha1_test_json; + DECLARE_ALIGNED(uint8_t hmac_ipad[IMB_SHA1_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t hmac_opad[IMB_SHA1_DIGEST_SIZE_IN_BYTES], 16); + uint8_t scratch[IMB_SHA1_BLOCK_SIZE]; + uint8_t key[IMB_SHA1_DIGEST_SIZE_IN_BYTES]; + uint8_t tag[IMB_SHA1_DIGEST_SIZE_IN_BYTES]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + const void *key_ptr = NULL; + size_t key_size = 0; + + IMB_ASSERT((v->tagSize / 8) <= sizeof(tag)); + + /* tag too long */ + if (v->tagSize > (sizeof(tag) * 8)) { + print_mac_test(v); + return 0; + } + + if (v->msgSize == 0) { + /* @todo skip */ + PUTS_ONCE("HMAC-SHA1 msgSize=0 not supported"); + run_vectors--; + skip_vectors++; + continue; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_HMAC_SHA_1; + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = tag; + + /* @note smaller tags sizes can be rejected */ + if ((v->tagSize / 8) > 0 && + (v->tagSize / 8) <= IMB_SHA1_DIGEST_SIZE_IN_BYTES) + job->auth_tag_output_len_in_bytes = + IMB_SHA1_DIGEST_SIZE_IN_BYTES; + else + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + /* prepare key */ + if ((v->keySize / 8) <= IMB_SHA1_BLOCK_SIZE) { + key_ptr = v->key; + key_size = v->keySize / 8; + } else { + IMB_SHA1(p_mgr, v->key, v->keySize / 8, key); + key_ptr = key; + key_size = IMB_SHA1_DIGEST_SIZE_IN_BYTES; + } + + /* compute IPAD and OPAD */ + prep_iopad(sizeof(scratch), scratch, key_size, key_ptr, 0x36); + IMB_SHA1_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + prep_iopad(sizeof(scratch), scratch, key_size, key_ptr, 0x5c); + IMB_SHA1_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + + /* clear space where computed TAG is put into */ + memset(tag, 0, sizeof(tag)); + + if (!mac_submit_and_check(p_mgr, v, tag, 1)) + return 0; + } + + return 1; +} + +extern const struct mac_test hmac_sha224_test_json[]; + +static int test_hmac_sha224(IMB_MGR *p_mgr) +{ + const struct mac_test *v = hmac_sha224_test_json; + DECLARE_ALIGNED(uint8_t hmac_ipad[IMB_SHA256_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t hmac_opad[IMB_SHA256_DIGEST_SIZE_IN_BYTES], 16); + uint8_t scratch[IMB_SHA_256_BLOCK_SIZE]; + uint8_t key[IMB_SHA256_DIGEST_SIZE_IN_BYTES]; + uint8_t tag[IMB_SHA256_DIGEST_SIZE_IN_BYTES]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + const void *key_ptr = NULL; + size_t key_size = 0; + + IMB_ASSERT((v->tagSize / 8) <= sizeof(tag)); + + /* tag too long */ + if (v->tagSize > (sizeof(tag) * 8)) { + print_mac_test(v); + return 0; + } + + if (v->msgSize == 0) { + /* @todo skip */ + PUTS_ONCE("HMAC-SHA224 msgSize=0 not supported"); + run_vectors--; + skip_vectors++; + continue; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_HMAC_SHA_224; + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + /* prepare key */ + if ((v->keySize / 8) <= IMB_SHA_256_BLOCK_SIZE) { + key_ptr = v->key; + key_size = v->keySize / 8; + } else { + IMB_SHA224(p_mgr, v->key, v->keySize / 8, key); + key_ptr = key; + key_size = IMB_SHA224_DIGEST_SIZE_IN_BYTES; + } + /* compute IPAD and OPAD */ + prep_iopad(sizeof(scratch), scratch, key_size, key_ptr, 0x36); + IMB_SHA224_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + prep_iopad(sizeof(scratch), scratch, key_size, key_ptr, 0x5c); + IMB_SHA224_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + + /* clear space where computed TAG is put into */ + memset(tag, 0, sizeof(tag)); + + if (!mac_submit_and_check(p_mgr, v, tag, 1)) + return 0; + } + + return 1; +} + +extern const struct mac_test hmac_sha256_test_json[]; + +static int test_hmac_sha256(IMB_MGR *p_mgr) +{ + const struct mac_test *v = hmac_sha256_test_json; + DECLARE_ALIGNED(uint8_t hmac_ipad[IMB_SHA256_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t hmac_opad[IMB_SHA256_DIGEST_SIZE_IN_BYTES], 16); + uint8_t scratch[IMB_SHA_256_BLOCK_SIZE]; + uint8_t key[IMB_SHA256_DIGEST_SIZE_IN_BYTES]; + uint8_t tag[IMB_SHA256_DIGEST_SIZE_IN_BYTES]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + const void *key_ptr = NULL; + size_t key_size = 0; + + IMB_ASSERT((v->tagSize / 8) <= sizeof(tag)); + + /* tag too long */ + if (v->tagSize > (sizeof(tag) * 8)) { + print_mac_test(v); + return 0; + } + + if (v->msgSize == 0) { + /* @todo skip */ + PUTS_ONCE("HMAC-SHA256 msgSize=0 not supported"); + run_vectors--; + skip_vectors++; + continue; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_HMAC_SHA_256; + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + /* prepare key */ + if ((v->keySize / 8) <= IMB_SHA_256_BLOCK_SIZE) { + key_ptr = v->key; + key_size = v->keySize / 8; + } else { + IMB_SHA256(p_mgr, v->key, v->keySize / 8, key); + key_ptr = key; + key_size = IMB_SHA256_DIGEST_SIZE_IN_BYTES; + } + + /* compute IPAD and OPAD */ + prep_iopad(sizeof(scratch), scratch, key_size, key_ptr, 0x36); + IMB_SHA256_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + prep_iopad(sizeof(scratch), scratch, key_size, key_ptr, 0x5c); + IMB_SHA256_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + + /* clear space where computed TAG is put into */ + memset(tag, 0, sizeof(tag)); + + if (!mac_submit_and_check(p_mgr, v, tag, 1)) + return 0; + } + + return 1; +} + +extern const struct mac_test hmac_sha384_test_json[]; + +static int test_hmac_sha384(IMB_MGR *p_mgr) +{ + const struct mac_test *v = hmac_sha384_test_json; + DECLARE_ALIGNED(uint8_t hmac_ipad[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t hmac_opad[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + uint8_t scratch[IMB_SHA_512_BLOCK_SIZE]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + IMB_ASSERT((v->tagSize / 8) <= sizeof(scratch)); + + /* tag too long */ + if (v->tagSize > (sizeof(scratch) * 8)) { + print_mac_test(v); + return 0; + } + + if (v->msgSize == 0) { + /* @todo skip */ + PUTS_ONCE("HMAC-SHA384 msgSize=0 not supported"); + run_vectors--; + skip_vectors++; + continue; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_HMAC_SHA_384; + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = scratch; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + /* compute IPAD and OPAD */ + prep_iopad(sizeof(scratch), scratch, + v->keySize / 8, v->key, 0x36); + IMB_SHA384_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + prep_iopad(sizeof(scratch), scratch, + v->keySize / 8, v->key, 0x5c); + IMB_SHA384_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + + /* clear space where computed TAG is put into */ + memset(scratch, 0, sizeof(scratch)); + + if (!mac_submit_and_check(p_mgr, v, scratch, 1)) + return 0; + } + + return 1; +} + +extern const struct mac_test hmac_sha512_test_json[]; + +static int test_hmac_sha512(IMB_MGR *p_mgr) +{ + const struct mac_test *v = hmac_sha512_test_json; + DECLARE_ALIGNED(uint8_t hmac_ipad[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + DECLARE_ALIGNED(uint8_t hmac_opad[IMB_SHA512_DIGEST_SIZE_IN_BYTES], 16); + uint8_t scratch[IMB_SHA_512_BLOCK_SIZE]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for ( ; v->msg != NULL; v++, run_vectors++) { + IMB_ASSERT((v->tagSize / 8) <= sizeof(scratch)); + + /* tag too long */ + if (v->tagSize > (sizeof(scratch) * 8)) { + print_mac_test(v); + return 0; + } + + if (v->msgSize == 0) { + /* @todo skip */ + PUTS_ONCE("HMAC-SHA512 msgSize=0 not supported"); + run_vectors--; + skip_vectors++; + continue; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + job->hash_alg = IMB_AUTH_HMAC_SHA_512; + job->cipher_mode = IMB_CIPHER_NULL; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->src = (const void *) v->msg; + job->hash_start_src_offset_in_bytes = 0; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->auth_tag_output = scratch; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + + /* compute IPAD and OPAD */ + prep_iopad(sizeof(scratch), scratch, + v->keySize / 8, v->key, 0x36); + IMB_SHA512_ONE_BLOCK(p_mgr, scratch, hmac_ipad); + + prep_iopad(sizeof(scratch), scratch, + v->keySize / 8, v->key, 0x5c); + IMB_SHA512_ONE_BLOCK(p_mgr, scratch, hmac_opad); + + job->u.HMAC._hashed_auth_key_xor_ipad = hmac_ipad; + job->u.HMAC._hashed_auth_key_xor_opad = hmac_opad; + + /* clear space where computed TAG is put into */ + memset(scratch, 0, sizeof(scratch)); + + if (!mac_submit_and_check(p_mgr, v, scratch, 1)) + return 0; + } + + return 1; +} + +/* + * ============================================================================= + * AEAD TESTS + * ============================================================================= + */ + +static void print_aead_test(const struct aead_test *v) +{ + printf("AEAD vector details:\n" + " tcId = %u\n" + " ivSize = %u [bits]\n" + " keySize = %u [bits]\n" + " tagSize = %u [bits]\n" + " aadSize = %u [bits]\n" + " msgSize = %u [bits]\n" + " resultValid = %d\n", + (unsigned)v->tcId, (unsigned)v->ivSize, + (unsigned)v->keySize, (unsigned)v->tagSize, + (unsigned)v->aadSize, (unsigned)v->msgSize, + (int)v->resultValid); +} + +static int +aead_submit_and_check(IMB_MGR *p_mgr, + const struct aead_test *v, + const void *res_tag, + const void *res_text, + const int job_api, + const int is_encrypt) +{ + if (job_api) { + /* submit job and get it processed */ + if (!process_job(p_mgr)) { + if (v->resultValid) { + print_aead_test(v); + printf("JOB-API submit/flush error!\n"); + return 0; + } else { + /* error was expected */ + return 1; + } + } + } else { + if (err_code != 0) { + if (v->resultValid) { + print_aead_test(v); + printf("DIRECT-API error!\n"); + printf("ERROR: %s\n", + imb_get_strerror(err_code)); + return 0; + } else { + /* error was expected */ + err_code = 0; + return 1; + } + } + } + + const int tag_mismatch = memcmp(res_tag, v->tag, v->tagSize / 8); + const int text_mismatch = is_encrypt ? + memcmp(res_text, v->ct, v->msgSize / 8) : + memcmp(res_text, v->msg, v->msgSize / 8); + + if (v->resultValid == 0 && (tag_mismatch || text_mismatch)) + return 1; + + /* check for TAG mismatch */ + if (tag_mismatch) { + printf("%s %s: TAG mismatch!\n", + job_api ? "JOB-API" : "DIRECT-API", + is_encrypt ? "encrypt" : "decrypt"); + print_aead_test(v); + return 0; + } + + /* check for text mismatch */ + if (text_mismatch) { + printf("%s %s mismatch!\n", + job_api ? "JOB-API" : "DIRECT-API", + is_encrypt ? "encrypt: cipher-text" : + "decrypt: plain-text"); + print_aead_test(v); + return 0; + } + + return 1; +} + +extern const struct aead_test aes_gcm_test_json[]; + +static int test_aead_gcm(IMB_MGR *p_mgr) +{ + const struct aead_test *v = NULL; + struct gcm_key_data gcm_key; + struct gcm_context_data ctx; + uint8_t text[512], tag[16]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (v = aes_gcm_test_json; v->msg != NULL; v++, run_vectors++) { + IMB_ASSERT(v->tagSize <= (sizeof(tag) * 8)); + IMB_ASSERT(v->msgSize <= (sizeof(text) * 8)); + + /* tag too long */ + if (v->tagSize > (sizeof(tag) * 8)) { + print_aead_test(v); + return 0; + } + /* message too long */ + if (v->msgSize > (sizeof(text) * 8)) { + print_aead_test(v); + return 0; + } + + switch (v->keySize / 8) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_PRE(p_mgr, v->key, &gcm_key); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_PRE(p_mgr, v->key, &gcm_key); + break; + case IMB_KEY_256_BYTES: + IMB_AES256_GCM_PRE(p_mgr, v->key, &gcm_key); + break; + default: + printf("Invalid key size: %u bytes!\n", + (unsigned)v->keySize / 8); + print_aead_test(v); + return 0; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + /* encrypt test */ + job->cipher_mode = IMB_CIPHER_GCM; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->key_len_in_bytes = v->keySize / 8; + job->src = (const void *) v->msg; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->msgSize / 8; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = (const void *) v->iv; + job->iv_len_in_bytes = v->ivSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + job->hash_alg = IMB_AUTH_AES_GMAC; + job->enc_keys = &gcm_key; + job->dec_keys = &gcm_key; + job->u.GCM.aad = v->aad; + job->u.GCM.aad_len_in_bytes = v->aadSize / 8; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 1, 1)) + return 0; + + /* decrypt test */ + job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = IMB_CIPHER_GCM; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->key_len_in_bytes = v->keySize / 8; + job->src = (const void *) v->ct; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->msgSize / 8; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = (const void *) v->iv; + job->iv_len_in_bytes = v->ivSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + job->hash_alg = IMB_AUTH_AES_GMAC; + job->enc_keys = &gcm_key; + job->dec_keys = &gcm_key; + job->u.GCM.aad = v->aad; + job->u.GCM.aad_len_in_bytes = v->aadSize / 8; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 1, 0)) + return 0; + + /* test direct API */ + + /* encrypt direction */ + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + errno_reset(); + + switch (v->keySize / 8) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + (const void *) v->iv, + v->ivSize / 8, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_AES128_GCM_ENC_UPDATE(p_mgr, &gcm_key, &ctx, text, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES128_GCM_ENC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + (const void *) v->iv, + v->ivSize / 8, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_AES192_GCM_ENC_UPDATE(p_mgr, &gcm_key, &ctx, text, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES192_GCM_ENC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + break; + case IMB_KEY_256_BYTES: + IMB_AES256_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + (const void *) v->iv, + v->ivSize / 8, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_AES256_GCM_ENC_UPDATE(p_mgr, &gcm_key, &ctx, text, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES256_GCM_ENC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + break; + default: + printf("Invalid key size: %u bytes!\n", + (unsigned)v->keySize / 8); + print_aead_test(v); + return 0; + } + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 0, 1)) + return 0; + + /* decrypt direction */ + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + errno_reset(); + switch (v->keySize / 8) { + case IMB_KEY_128_BYTES: + IMB_AES128_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + (const void *) v->iv, + v->ivSize / 8, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_AES128_GCM_DEC_UPDATE(p_mgr, &gcm_key, &ctx, text, + (const void *) v->ct, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES128_GCM_DEC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + break; + case IMB_KEY_192_BYTES: + IMB_AES192_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + (const void *) v->iv, + v->ivSize / 8, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_AES192_GCM_DEC_UPDATE(p_mgr, &gcm_key, &ctx, text, + (const void *) v->ct, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES192_GCM_DEC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + break; + case IMB_KEY_256_BYTES: + IMB_AES256_GCM_INIT_VAR_IV(p_mgr, &gcm_key, &ctx, + (const void *) v->iv, + v->ivSize / 8, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_AES256_GCM_DEC_UPDATE(p_mgr, &gcm_key, &ctx, text, + (const void *) v->ct, + v->msgSize / 8); + errno_update(p_mgr); + IMB_AES256_GCM_DEC_FINALIZE(p_mgr, &gcm_key, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + break; + default: + printf("Invalid key size: %u bytes!\n", + (unsigned)v->keySize / 8); + print_aead_test(v); + return 0; + } + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 0, 0)) + return 0; + } + + return 1; +} + +extern const struct aead_test chacha20_poly1305_test_json[]; + +static int test_aead_chacha20_poly1305(IMB_MGR *p_mgr) +{ + const struct aead_test *v = NULL; + struct chacha20_poly1305_context_data ctx; + uint8_t text[512], tag[16]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (v = chacha20_poly1305_test_json; v->msg != NULL; + v++, run_vectors++) { + IMB_ASSERT(v->tagSize <= (sizeof(tag) * 8)); + IMB_ASSERT(v->msgSize <= (sizeof(text) * 8)); + + /* tag too long */ + if (v->tagSize > (sizeof(tag) * 8)) { + print_aead_test(v); + return 0; + } + /* message too long */ + if (v->msgSize > (sizeof(text) * 8)) { + print_aead_test(v); + return 0; + } + + /* test JOB API */ + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + /* encrypt test */ + job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->key_len_in_bytes = v->keySize / 8; + job->src = (const void *) v->msg; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->msgSize / 8; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = (const void *) v->iv; + job->iv_len_in_bytes = v->ivSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + job->hash_alg = IMB_AUTH_CHACHA20_POLY1305; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->hash_start_src_offset_in_bytes = 0; + job->enc_keys = (const void *) v->key; + job->dec_keys = (const void *) v->key; + job->u.CHACHA20_POLY1305.aad = (const void *) v->aad; + job->u.CHACHA20_POLY1305.aad_len_in_bytes = v->aadSize / 8; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 1, 1)) + return 0; + + /* decrypt test */ + job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = IMB_CIPHER_CHACHA20_POLY1305; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->key_len_in_bytes = v->keySize / 8; + job->src = (const void *) v->ct; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->msgSize / 8; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->iv = (const void *) v->iv; + job->iv_len_in_bytes = v->ivSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + job->hash_alg = IMB_AUTH_CHACHA20_POLY1305; + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->hash_start_src_offset_in_bytes = 0; + job->enc_keys = (const void *) v->key; + job->dec_keys = (const void *) v->key; + job->u.CHACHA20_POLY1305.aad = (const void *) v->aad; + job->u.CHACHA20_POLY1305.aad_len_in_bytes = v->aadSize / 8; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 1, 0)) + return 0; + + /* test direct API */ + + /* encrypt direction */ + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + errno_reset(); + + IMB_CHACHA20_POLY1305_INIT(p_mgr, (const void *) v->key, &ctx, + (const void *) v->iv, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_CHACHA20_POLY1305_ENC_UPDATE(p_mgr, (const void *) v->key, + &ctx, text, + (const void *) v->msg, + v->msgSize / 8); + errno_update(p_mgr); + IMB_CHACHA20_POLY1305_ENC_FINALIZE(p_mgr, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 0, 1)) + return 0; + + /* decrypt direction */ + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + errno_reset(); + + IMB_CHACHA20_POLY1305_INIT(p_mgr, (const void *) v->key, &ctx, + (const void *) v->iv, + (const void *) v->aad, + v->aadSize / 8); + errno_update(p_mgr); + IMB_CHACHA20_POLY1305_DEC_UPDATE(p_mgr, (const void *) v->key, + &ctx, text, + (const void *) v->ct, + v->msgSize / 8); + errno_update(p_mgr); + IMB_CHACHA20_POLY1305_DEC_FINALIZE(p_mgr, &ctx, tag, + v->tagSize / 8); + errno_update(p_mgr); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 0, 0)) + return 0; + } + + return 1; +} + +extern const struct aead_test aes_ccm_test_json[]; + +static int test_aead_ccm(IMB_MGR *p_mgr) +{ + const struct aead_test *v = NULL; + DECLARE_ALIGNED(uint32_t expkey[4*15], 16); + DECLARE_ALIGNED(uint32_t dust[4*15], 16); + uint8_t text[512], tag[16]; + + while (IMB_FLUSH_JOB(p_mgr) != NULL) + ; + + for (v = aes_ccm_test_json; v->msg != NULL; v++, run_vectors++) { + IMB_ASSERT(v->tagSize <= (sizeof(tag) * 8)); + IMB_ASSERT(v->msgSize <= (sizeof(text) * 8)); + + /* tag too long */ + if (v->tagSize > (sizeof(tag) * 8)) { + print_aead_test(v); + return 0; + } + /* message too long */ + if (v->msgSize > (sizeof(text) * 8)) { + print_aead_test(v); + return 0; + } + + if ((v->aadSize / 8) > 46) { + /* unsupported AAD sizes - skip it */ + PUTS_ONCE("AES-CCM AAD > 46 bytes not supported"); + run_vectors--; + skip_vectors++; + continue; + } + + switch (v->keySize / 8) { + case IMB_KEY_128_BYTES: + IMB_AES_KEYEXP_128(p_mgr, v->key, expkey, dust); + break; + case IMB_KEY_256_BYTES: + IMB_AES_KEYEXP_256(p_mgr, v->key, expkey, dust); + break; + case IMB_KEY_192_BYTES: + /* unsupported key size - skip it */ + PUTS_ONCE("AES-CCM-192 not supported"); + run_vectors--; + skip_vectors++; + continue; + default: + printf("Invalid key size: %u bytes!\n", + (unsigned)v->keySize / 8); + print_aead_test(v); + return 0; + } + + IMB_JOB *job = IMB_GET_NEXT_JOB(p_mgr); + + /* encrypt test */ + job->cipher_mode = IMB_CIPHER_CCM; + job->cipher_direction = IMB_DIR_ENCRYPT; + job->chain_order = IMB_ORDER_HASH_CIPHER; + job->key_len_in_bytes = v->keySize / 8; + job->src = (const void *) v->msg; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->msgSize / 8; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->hash_start_src_offset_in_bytes = UINT64_C(0); + job->iv = (const void *) v->iv; + job->iv_len_in_bytes = v->ivSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + job->hash_alg = IMB_AUTH_AES_CCM; + job->enc_keys = expkey; + job->dec_keys = expkey; + job->u.CCM.aad_len_in_bytes = v->aadSize / 8; + job->u.CCM.aad = v->aad; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 1, 1)) + return 0; + + /* decrypt test */ + job = IMB_GET_NEXT_JOB(p_mgr); + + job->cipher_mode = IMB_CIPHER_CCM; + job->cipher_direction = IMB_DIR_DECRYPT; + job->chain_order = IMB_ORDER_CIPHER_HASH; + job->key_len_in_bytes = v->keySize / 8; + job->src = (const void *) v->ct; + job->dst = text; + job->msg_len_to_cipher_in_bytes = v->msgSize / 8; + job->cipher_start_src_offset_in_bytes = UINT64_C(0); + job->msg_len_to_hash_in_bytes = v->msgSize / 8; + job->hash_start_src_offset_in_bytes = UINT64_C(0); + job->iv = (const void *) v->iv; + job->iv_len_in_bytes = v->ivSize / 8; + job->auth_tag_output = tag; + job->auth_tag_output_len_in_bytes = v->tagSize / 8; + job->hash_alg = IMB_AUTH_AES_CCM; + job->enc_keys = expkey; + job->dec_keys = expkey; + job->u.CCM.aad_len_in_bytes = v->aadSize / 8; + job->u.CCM.aad = v->aad; + + memset(text, 0, sizeof(text)); + memset(tag, 0, sizeof(tag)); + + /* submit job and check */ + if (!aead_submit_and_check(p_mgr, v, tag, text, 1, 0)) + return 0; + } /* for(ccm_vectors) */ + + return 1; +} + +static int test_all(IMB_MGR *p_mgr) +{ + const struct { + int (*fn)(IMB_MGR *); + const char *name; + } test_tab[] = { + { test_aead_gcm, "AEAD AES-GCM" }, + { test_aead_ccm, "AEAD AES-CCM" }, + { test_aead_chacha20_poly1305, "AEAD CHACHA20-POLY1305" }, + { test_cmac, "AES-CMAC" }, + { test_gmac, "GMAC" }, + { test_hmac_sha1, "HMAC-SHA1" }, + { test_hmac_sha224, "HMAC-SHA224" }, + { test_hmac_sha256, "HMAC-SHA256" }, + { test_hmac_sha384, "HMAC-SHA384" }, + { test_hmac_sha512, "HMAC-SHA512" } + }; + unsigned i; + int ret = 1; + + for (i = 0; i < IMB_DIM(test_tab); i++) { + run_vectors = 0; + skip_vectors = 0; + if (test_tab[i].fn(p_mgr) == 0) { + printf("Testing %s: FAILED\n", test_tab[i].name); + ret = 0; + } else { + printf("Testing %s: PASSED (run: %u, skipped: %u)\n", + test_tab[i].name, run_vectors, skip_vectors); + } + total_run_vectors += run_vectors; + total_skip_vectors += skip_vectors; + } + return ret; +} + + +/* + * ============================================================================= + * MAIN + * ============================================================================= + */ + +static void +usage(const char *name) +{ + printf("Usage: %s [args], where args are zero or more\n" + "--aesni-emu test AESNI emulation interface\n" + "--avx512 test AVX512 interface\n" + "--avx2 test AVX2 interface\n" + "--avx test AVX interface\n" + "--sse test SSE interface\n" + "--shani-off don't use SHA extensions " + "(auto-detect by default)\n" + "--gfni-off don't use GFNI extensions " + "(auto-detect by default)\n", name); +} + +int main(int argc, const char **argv) +{ + IMB_ARCH arch_to_run = IMB_ARCH_NUM; + uint64_t flags = 0; + const uint64_t feat_flags = imb_get_feature_flags(); + IMB_MGR *p_mgr = NULL; + int i; + + if (imb_get_version() < IMB_VERSION(0, 50, 0)) { + printf("Library version detection unsupported!\n"); + } else { + printf("Tool version: %s\n", IMB_VERSION_STR); + printf("Library version: %s\n", imb_get_version_str()); + } + + for (i = 1; i < argc; i++) { + if ((strcmp(argv[i], "-h") == 0) || + (strcmp(argv[i], "--help") == 0)) { + usage(argv[0]); + return EXIT_SUCCESS; + } else if (strcmp(argv[i], "--aesni-emu") == 0) { + flags |= IMB_FLAG_AESNI_OFF; + arch_to_run = IMB_ARCH_NOAESNI; + } else if (strcmp(argv[i], "--sse") == 0) { + arch_to_run = IMB_ARCH_SSE; + } else if (strcmp(argv[i], "--avx") == 0) { + arch_to_run = IMB_ARCH_AVX; + } else if (strcmp(argv[i], "--avx2") == 0) { + arch_to_run = IMB_ARCH_AVX2; + } else if (strcmp(argv[i], "--avx512") == 0) { + arch_to_run = IMB_ARCH_AVX512; + } else if (strcmp(argv[i], "--shani-off") == 0) { + flags |= IMB_FLAG_SHANI_OFF; + } else if (strcmp(argv[i], "--gfni-off") == 0) { + flags |= IMB_FLAG_GFNI_OFF; + } + } + + p_mgr = alloc_mb_mgr(flags); + if (p_mgr == NULL) { + printf("Error allocating MB_MGR structure: %s\n", + imb_get_strerror(imb_get_errno(p_mgr))); + return EXIT_FAILURE; + } + + switch (arch_to_run) { + case IMB_ARCH_NOAESNI: + if (((feat_flags & IMB_FEATURE_AESNI_EMU) == 0) && + (imb_get_errno(p_mgr) == IMB_ERR_NO_AESNI_EMU)) { + printf("AESNI Emulation is not enabled!\n"); + free_mb_mgr(p_mgr); + return EXIT_FAILURE; + } + init_mb_mgr_sse(p_mgr); + break; + case IMB_ARCH_SSE: + init_mb_mgr_sse(p_mgr); + break; + case IMB_ARCH_AVX: + init_mb_mgr_avx(p_mgr); + break; + case IMB_ARCH_AVX2: + init_mb_mgr_avx2(p_mgr); + break; + case IMB_ARCH_AVX512: + init_mb_mgr_avx512(p_mgr); + break; + default: + /* auto-detect */ + init_mb_mgr_auto(p_mgr, &arch_to_run); + break; + } + + if (p_mgr->features & IMB_FEATURE_SELF_TEST) + printf("SELF-TEST: %s\n", + (p_mgr->features & IMB_FEATURE_SELF_TEST_PASS) ? + "PASS" : "FAIL"); + else + printf("SELF-TEST: N/A (requires library >= v1.3)\n"); + + if (imb_get_errno(p_mgr) != 0) { + printf("Error initializing MB_MGR structure! %s\n", + imb_get_strerror(imb_get_errno(p_mgr))); + free_mb_mgr(p_mgr); + return EXIT_FAILURE; + } + + if (!test_all(p_mgr)) { + printf("Wycheproof test complete: FAILED\n"); + free_mb_mgr(p_mgr); + return EXIT_FAILURE; + } + + printf("Test complete: PASSED (run: %u, skipped: %u)\n", + total_run_vectors, total_skip_vectors); + + free_mb_mgr(p_mgr); + return EXIT_SUCCESS; +} diff --git a/test/zuc_test.c b/test/zuc_test.c index 3f1fe8919686a84e0d25584d998af7f3c2bdbedd..f83b3864af5946e5399150c4dc0f0bddf6619390 100644 --- a/test/zuc_test.c +++ b/test/zuc_test.c @@ -334,7 +334,6 @@ int zuc_test(struct IMB_MGR *mb_mgr) test_suite_update(&eia3_256_ctx, 1, 0); } - exit_zuc_test: freePtrArray(pKeys, MAXBUFS); /*Free the key buffers*/ freePtrArray(pIV, MAXBUFS); /*Free the vector buffers*/ @@ -1017,7 +1016,8 @@ int validate_zuc_EIA_n_block(struct IMB_MGR *mb_mgr, uint8_t **pSrcData, static int verify_tag_256(void *mac, const struct test256EIA3_vectors_t *vector, - const unsigned tag_sz, const uint32_t i, const uint32_t j) + const unsigned tag_sz, const uint32_t test_idx, + const uint32_t vector_idx, const int multi_vector) { const void *ref_mac = NULL; int ret; @@ -1031,19 +1031,34 @@ verify_tag_256(void *mac, const struct test256EIA3_vectors_t *vector, ret = memcmp(mac, ref_mac, tag_sz); if (ret) { - printf("Validate ZUC-256 n block test %u, " - "index %u (Int - %u bytes): FAIL\n", - i + 1, j, tag_sz); + if (multi_vector) { + printf("Validate ZUC-256 n block multi-vector test " + "# jobs = %u, index %u (Int - %u bytes): FAIL\n", + test_idx, vector_idx, tag_sz); + + } else { + printf("Validate ZUC-256 n block test %u, " + "index %u (Int - %u bytes): FAIL\n", + test_idx + 1, vector_idx, tag_sz); + } byte_hexdump("Expected", (const uint8_t *)ref_mac, tag_sz); byte_hexdump("Found", mac, tag_sz); } #ifdef DEBUG - else - printf("Validate ZUC-256 n block test %u, " - "index %u (Int - %u bytes): PASS\n", - i + 1, j, tag_sz); + else { + if (multi_vector) { + printf("Validate ZUC-256 n block multi-vector test " + "# jobs = %u, index %u (Int - %u bytes): PASS\n", + test_idx, vector_idx, tag_sz); + + } else { + printf("Validate ZUC-256 n block test %u, " + "index %u (Int - %u bytes): PASS\n", + test_idx + 1, vector_idx, tag_sz); + } + } #endif fflush(stdout); @@ -1082,7 +1097,7 @@ int validate_zuc256_EIA3(struct IMB_MGR *mb_mgr, uint8_t **pSrcData, for (j = 0; j < numBuffs; j++) { retTmp = verify_tag_256(pDstData[j], vector, - tag_sz, i, j); + tag_sz, i, j, 0); if (retTmp) ret = retTmp; } @@ -1102,34 +1117,23 @@ int validate_zuc256_EIA3(struct IMB_MGR *mb_mgr, uint8_t **pSrcData, iv_lens[i] = vector->iv_length; } - submit_eia3_jobs(mb_mgr, pKeys, pIV, - pSrcData, pDstData, - bitLength, numBuffs, - ZUC256_KEY_LEN_IN_BYTES, ZUC_DIGEST_LEN, - iv_lens); + // Todo: tag_sz can be 8 and 16, so far only 4 bytes mac is supported + for (tag_sz = 4; tag_sz <= 4; tag_sz *= 2) { + submit_eia3_jobs(mb_mgr, pKeys, pIV, + pSrcData, pDstData, + bitLength, numBuffs, + ZUC256_KEY_LEN_IN_BYTES, tag_sz, + iv_lens); - for (i = 0; i < numBuffs; i++) { - vector = &test256EIA3_vectors[i % NUM_ZUC_256_EIA3_TESTS]; - retTmp = - memcmp(pDstData[i], &vector->mac4, - sizeof(((struct test256EIA3_vectors_t *)0)->mac4)); - if (retTmp) { - printf("Validate ZUC-256 n block multi-vector test " - "# jobs = %u, index %u (Int): FAIL\n", - numBuffs, i); - byte_hexdump("Expected", - (const uint8_t *)&vector->mac4, - ZUC_DIGEST_LEN); - byte_hexdump("Found", pDstData[i], ZUC_DIGEST_LEN); - ret = retTmp; + for (i = 0; i < numBuffs; i++) { + const uint32_t vector_idx = i % NUM_ZUC_256_EIA3_TESTS; + + vector = &test256EIA3_vectors[vector_idx]; + retTmp = verify_tag_256(pDstData[i], vector, + tag_sz, numBuffs, i, 1); + if (retTmp) + ret = retTmp; } -#ifdef DEBUG - else - printf("Validate ZUC-256 n block multi-vector test, " - "# jobs = %u, index %u (Int): PASS\n", - numBuffs, i); -#endif - fflush(stdout); } return ret; }; diff --git a/test/zuc_test_vectors.h b/test/zuc_test_vectors.h index 74fad44c83bdee2854e00d389fa1d639e319fdf0..8ab1b343043aab8814caf69ecaa83011e1d7fe1d 100644 --- a/test/zuc_test_vectors.h +++ b/test/zuc_test_vectors.h @@ -32,8 +32,8 @@ #define MAX_BUFFER_LENGTH_IN_BYTES ((MAX_BUFFER_LENGTH_IN_BITS) + 7)/8 #define NUM_ZUC_ALG_TESTS 3 #define NUM_ZUC_EEA3_TESTS 5 -#define NUM_ZUC_256_EEA3_TESTS 6 -#define NUM_ZUC_256_EIA3_TESTS 8 +#define NUM_ZUC_256_EEA3_TESTS 10 +#define NUM_ZUC_256_EIA3_TESTS 12 #define NUM_ZUC_EIA3_TESTS 10 #define ZUC_KEY_LEN_IN_BYTES 16 #define ZUC_IV_LEN_IN_BYTES 16 @@ -519,7 +519,191 @@ const struct test256EEA3_vectors_t test256EEA3_vectors[] = { 0x49, 0x43, 0xC6, 0xBB, 0xE8, 0xAD, 0x8A, 0xFD } }, - /* TestSet 2 */ + /* TestSet 2 */ + { + /* Key */ + {0x8f, 0x8e, 0xf9, 0xd8, 0xfb, 0x0a, 0xce, 0x2b, + 0x23, 0x19, 0x48, 0x42, 0xcb, 0x5c, 0x6d, 0x98, + 0x1e, 0x71, 0x68, 0x74, 0xe1, 0xdf, 0xeb, 0xe0, + 0xf2, 0x46, 0x02, 0x71, 0xbb, 0x69, 0x0d, 0x9e + }, + /* IV */ + {0x2c, 0xe8, 0x87, 0x0f, 0x8c, 0x7f, 0x47, 0x2a, + 0x02, 0x2d, 0x24, 0xcd, 0x23, 0x3f, 0x4d, 0x0a, + 0x40, 0x0d, 0x12, 0xdd, 0xc4, 0x16, 0x26 + }, + /* Message length */ + 1536, + /* IV length */ + 23, + /* Plaintext */ + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 + }, + /* Ciphertext */ + {0xc1, 0xce, 0x46, 0xd2, 0x4e, 0x78, 0x6f, 0x97, + 0xcf, 0xc0, 0xa5, 0x3c, 0xec, 0x50, 0x6e, 0x17, + 0xe0, 0x8c, 0x7e, 0x33, 0x84, 0x98, 0x2b, 0xc1, + 0x68, 0x97, 0x24, 0x83, 0x03, 0x7c, 0x0d, 0xc5, + 0x19, 0xa1, 0xe8, 0xb1, 0xb7, 0x53, 0x4f, 0x3b, + 0x8a, 0xa3, 0xce, 0x9b, 0x3d, 0xd0, 0x1a, 0xf7, + 0x7d, 0xae, 0x4c, 0x6b, 0xe4, 0xe3, 0x12, 0x70, + 0x63, 0xc9, 0x4c, 0xcf, 0x1f, 0xf7, 0x18, 0xdf, + 0xf1, 0x7d, 0x96, 0xe4, 0x60, 0xa8, 0x3b, 0xf5, + 0x71, 0x7d, 0x2a, 0x87, 0x1d, 0x82, 0xed, 0x92, + 0xc5, 0xe7, 0x6e, 0xd9, 0x3c, 0x01, 0x0d, 0x87, + 0x13, 0x3b, 0x1a, 0x92, 0xa2, 0x83, 0x1a, 0x5b, + 0x9a, 0xfb, 0x81, 0x1d, 0xdb, 0xbd, 0x82, 0x01, + 0x3b, 0x32, 0x0e, 0x2c, 0x67, 0x3c, 0x14, 0x13, + 0x9d, 0x58, 0xf1, 0x88, 0x9d, 0xe5, 0xd6, 0xe3, + 0x48, 0xaa, 0x43, 0xc2, 0x08, 0xa6, 0x64, 0xa8, + 0xad, 0x71, 0x26, 0x7e, 0xe7, 0xed, 0x0c, 0x58, + 0xd3, 0x27, 0x42, 0x5e, 0x10, 0xb0, 0x03, 0x62, + 0x18, 0x30, 0xde, 0xdb, 0x45, 0xcd, 0x78, 0xdd, + 0xee, 0x4f, 0xa1, 0x45, 0xa6, 0xbf, 0xc1, 0x37, + 0x3e, 0x47, 0x5c, 0x1b, 0xb6, 0x8b, 0x63, 0x87, + 0x49, 0xc4, 0x1b, 0x9e, 0xea, 0x01, 0x62, 0x2a, + 0x44, 0x65, 0x17, 0x0f, 0xee, 0xcc, 0x7f, 0xe6, + 0xab, 0x05, 0x46, 0x25, 0x7d, 0xdc, 0x40, 0x1e + } + }, + /* TestSet 3 */ + { + /* Key */ + {0x92, 0xf9, 0x27, 0xe8, 0xab, 0x48, 0x46, 0xdb, + 0x2f, 0xa3, 0x61, 0x36, 0x7e, 0x89, 0xe1, 0x17, + 0xc9, 0x99, 0x57, 0x63, 0xe0, 0xe4, 0x4c, 0xce, + 0x20, 0x03, 0x8a, 0x9c, 0x9a, 0x44, 0xca, 0x64 + }, + /* IV */ + {0x7d, 0x51, 0xfb, 0x42, 0xf8, 0x7e, 0x62, 0xfa, + 0x60, 0x25, 0xb9, 0x2b, 0x4e, 0xd6, 0x1c, 0x2e, + 0xcc, 0x6c, 0x65, 0x18, 0x1e, 0x9d, 0x04 + }, + /* Message length */ + 768, + /* IV length */ + 23, + /* Plaintext */ + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }, + /* Ciphertext */ + {0xe4, 0x14, 0xf9, 0x26, 0x45, 0xc6, 0x2e, 0x12, + 0xb0, 0xe1, 0x33, 0xf6, 0xa7, 0x96, 0x22, 0xfb, + 0x0f, 0xe0, 0x07, 0x5c, 0x6e, 0xbe, 0x10, 0x1b, + 0x37, 0xe4, 0xf7, 0x1c, 0x94, 0xcf, 0xf1, 0x73, + 0x02, 0x64, 0x61, 0x40, 0xb4, 0xf1, 0xfb, 0xcf, + 0x8c, 0xc6, 0xa2, 0xda, 0xd7, 0x3f, 0xb4, 0xcc, + 0xa8, 0x7b, 0x13, 0xaa, 0xd2, 0x6e, 0x2a, 0x1b, + 0x0a, 0x07, 0xef, 0x88, 0x41, 0xfb, 0x6c, 0x10, + 0x3f, 0x41, 0x60, 0xb3, 0x4c, 0x7d, 0x00, 0x9c, + 0x72, 0x2f, 0x4a, 0xa2, 0xc1, 0x0c, 0xf4, 0x6f, + 0xbf, 0xed, 0xec, 0xaf, 0xeb, 0xbc, 0xc8, 0x2a, + 0x54, 0x60, 0x48, 0x7d, 0xfe, 0x20, 0x38, 0x6a + } + }, + /* TestSet 4 */ + { + /* Key */ + {0xa7, 0x72, 0xf5, 0xfe, 0x9d, 0x81, 0xd1, 0xcf, + 0x22, 0x8e, 0x45, 0x53, 0x67, 0x75, 0xac, 0xc9, + 0x04, 0x19, 0x57, 0x55, 0x0f, 0x6c, 0x39, 0xf9, + 0xc5, 0x1b, 0x1e, 0x9e, 0xbb, 0x22, 0xa2, 0xf5 + }, + /* IV */ + {0xce, 0x51, 0x11, 0x83, 0x9b, 0x64, 0x4d, 0x20, + 0x51, 0x92, 0x71, 0x3b, 0x43, 0x47, 0xf9, 0x38, + 0x79, 0x0f, 0xd2, 0x59, 0xbc, 0x35, 0xd3 + }, + /* Message length */ + 384, + /* IV length */ + 23, + /* Plaintext */ + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }, + /* Ciphertext */ + {0x89, 0x63, 0x69, 0xca, 0x77, 0xd3, 0x05, 0xf7, + 0xa3, 0xcb, 0xf6, 0xba, 0xb3, 0x8c, 0x14, 0x4f, + 0xc3, 0x73, 0xf4, 0xf0, 0xf5, 0x0c, 0xf1, 0xad, + 0x0f, 0x41, 0x65, 0x48, 0x40, 0x47, 0x5e, 0xef, + 0xad, 0xea, 0x1c, 0x3c, 0x15, 0xa0, 0xd2, 0x72, + 0x25, 0x14, 0x1d, 0x6f, 0xa0, 0x0f, 0xe8, 0x9a + } + }, + /* TestSet 5 */ + { + /* Key */ + {0xf8, 0xa0, 0x45, 0x4f, 0x6d, 0xea, 0x74, 0x6e, + 0x4c, 0xd1, 0x6e, 0xb0, 0xc3, 0xa2, 0x1f, 0x57, + 0xeb, 0x6f, 0x35, 0x2d, 0x6a, 0x02, 0x5b, 0x35, + 0x32, 0xba, 0x47, 0x3f, 0x1f, 0x0e, 0xdd, 0xc9 + }, + /* IV */ + {0x07, 0x90, 0xeb, 0x7d, 0x09, 0x6d, 0xc1, 0xf1, + 0x86, 0x47, 0xea, 0x57, 0xe4, 0xb8, 0x92, 0xb1, + 0x4e, 0x3b, 0x2d, 0x62, 0xaa, 0x53, 0x6f + }, + /* Message length */ + 384, + /* IV length */ + 23, + /* Plaintext */ + {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + }, + /* Ciphertext */ + {0xc6, 0x02, 0x3c, 0x58, 0x53, 0xc9, 0xae, 0xba, + 0x0a, 0x47, 0x10, 0xdc, 0x85, 0x76, 0x13, 0x82, + 0x08, 0x66, 0xbc, 0x3e, 0x9c, 0x2d, 0xb2, 0x42, + 0x20, 0x3a, 0x0a, 0x0c, 0x49, 0x1d, 0xe9, 0x44, + 0x73, 0x87, 0xe5, 0x60, 0x9d, 0x98, 0xff, 0xf1, + 0x33, 0xc3, 0xd0, 0x3d, 0x49, 0xfc, 0x77, 0x07 + } + }, + /* TestSet 6 */ { /* Key */ {0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, @@ -562,7 +746,7 @@ const struct test256EEA3_vectors_t test256EEA3_vectors[] = { 0x68, 0x98, 0x94, 0x16, 0xb8, 0xfa, 0xc8, 0xc2 } }, - /* TestSet 3 */ + /* TestSet 7 */ { /* Key */ {0x67, 0xc6, 0x69, 0x73, 0x51, 0xff, 0x4a, 0xec, @@ -605,7 +789,7 @@ const struct test256EEA3_vectors_t test256EEA3_vectors[] = { 0x34, 0xFF, 0x9F, 0xF1, 0x3D, 0x80, 0xE8, 0x54 } }, - /* TestSet 4 */ + /* TestSet 8 */ { /* Key */ {0xC2, 0x54, 0xF8, 0x1B, 0xE8, 0xE7, 0x8D, 0x76, @@ -648,7 +832,7 @@ const struct test256EEA3_vectors_t test256EEA3_vectors[] = { 0x49, 0x37, 0xF9, 0x8A, 0xC6, 0x90, 0x7C, 0x36 } }, - /* TestSet 5 */ + /* TestSet 9 */ { /* Key */ {0x8D, 0x76, 0x5A, 0x2E, 0x63, 0x33, 0x9F, 0xC9, @@ -691,7 +875,7 @@ const struct test256EEA3_vectors_t test256EEA3_vectors[] = { 0xB4, 0x34, 0x5C, 0x5C, 0x0B, 0x48, 0x67, 0x37 } }, - /* TestSet 6 */ + /* TestSet 10 */ { /* Key */ {0x67, 0xc6, 0x69, 0x73, 0x51, 0xff, 0x4a, 0xec, @@ -995,7 +1179,7 @@ const struct test128EIA3_vectors_t testEIA3_vectors[] = { const struct test256EIA3_vectors_t test256EIA3_vectors[] = { { - /*Test 1*/ + /* Test 1 */ /* Key */ {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -1025,8 +1209,151 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { 0x70, 0x84, 0xc9, 0x52, 0xa1, 0x65, 0x4b, 0x26 }, }, + /* Test 2 */ + { + /* Key */ + {0x8f, 0x8e, 0xf9, 0xd8, 0xfb, 0x0a, 0xce, 0x2b, + 0x23, 0x19, 0x48, 0x42, 0xcb, 0x5c, 0x6d, 0x98, + 0x1e, 0x71, 0x68, 0x74, 0xe1, 0xdf, 0xeb, 0xe0, + 0xf2, 0x46, 0x02, 0x71, 0xbb, 0x69, 0x0d, 0x9e + }, + /* IV */ + {0x2c, 0xe8, 0x87, 0x0f, 0x8c, 0x7f, 0x47, 0x2a, + 0x02, 0x2d, 0x24, 0xcd, 0x23, 0x3f, 0x4d, 0x0a, + 0x40, 0x0d, 0x12, 0xdd, 0xc4, 0x16, 0x26 + }, + 1600, + /* IV length */ + 23, + {0x05, 0xa8, 0xc3, 0x4b, 0x70, 0x9c, 0x97, 0x71, + 0x67, 0x70, 0xa5, 0xa3, 0x08, 0x60, 0xca, 0x25, + 0x0a, 0x8b, 0xb5, 0xc1, 0xc9, 0xd5, 0x8c, 0x7d, + 0xfb, 0x00, 0x3b, 0xc0, 0x9d, 0xe1, 0x09, 0x9f, + 0xcc, 0x22, 0x8c, 0xf6, 0x12, 0x6f, 0xb9, 0x1e, + 0xc9, 0x45, 0x43, 0x43, 0x25, 0x7a, 0x2b, 0xba, + 0x64, 0x4b, 0x8c, 0x91, 0x77, 0xc8, 0xfd, 0xce, + 0x01, 0xcf, 0xab, 0x6b, 0xe6, 0xc2, 0x48, 0x80, + 0x82, 0x77, 0xad, 0xb8, 0xb9, 0x8d, 0x1f, 0xd7, + 0x48, 0x0b, 0x73, 0x4d, 0x98, 0x96, 0x12, 0xd5, + 0xf1, 0x86, 0xfd, 0xa1, 0x12, 0x50, 0x9a, 0x38, + 0x07, 0x37, 0xd5, 0xa3, 0xd0, 0x21, 0xfe, 0x55, + 0x7a, 0x8f, 0xff, 0xe0, 0x4f, 0x25, 0x9c, 0x73, + 0x01, 0x06, 0x66, 0xff, 0x10, 0xa4, 0xdd, 0xd4, + 0x2a, 0xbf, 0x0f, 0x5a, 0xa2, 0x29, 0x64, 0xd9, + 0x99, 0xc8, 0x46, 0xe6, 0x46, 0x48, 0x4d, 0x56, + 0xe9, 0x02, 0x17, 0xa8, 0x14, 0x28, 0x13, 0x22, + 0xf0, 0xd4, 0x43, 0xbe, 0xa0, 0x64, 0xd5, 0x28, + 0x99, 0x27, 0x24, 0x5d, 0x7c, 0x25, 0x46, 0xd6, + 0xdf, 0x2c, 0x05, 0x70, 0x5a, 0x55, 0xcd, 0xf6, + 0xe7, 0xdb, 0x3d, 0x94, 0x67, 0xfa, 0x67, 0x15, + 0xe3, 0x84, 0x96, 0x26, 0xee, 0xf4, 0x22, 0xaf, + 0x2f, 0xa4, 0x6e, 0xda, 0x2f, 0x4a, 0xa0, 0xcd, + 0x10, 0x72, 0x85, 0xb6, 0x45, 0x3b, 0x22, 0xb8, + 0x1f, 0xe0, 0x3c, 0xf9, 0x64, 0x29, 0xb4, 0x46 + }, + {0x8d, 0x74, 0x8b, 0x71}, + {0xe5, 0x1d, 0xf2, 0x0a, + 0x9e, 0x74, 0x06, 0xac}, + {0x4d, 0x40, 0x5d, 0x6e, + 0xf7, 0xf8, 0xaf, 0xad, + 0xd6, 0x71, 0x71, 0x03, + 0xdf, 0x92, 0x28, 0x20}, + }, + { + /* Test 3 */ + /* Key */ + {0x92, 0xf9, 0x27, 0xe8, 0xab, 0x48, 0x46, 0xdb, + 0x2f, 0xa3, 0x61, 0x36, 0x7e, 0x89, 0xe1, 0x17, + 0xc9, 0x99, 0x57, 0x63, 0xe0, 0xe4, 0x4c, 0xce, + 0x20, 0x03, 0x8a, 0x9c, 0x9a, 0x44, 0xca, 0x64 + }, + /* IV */ + {0x7d, 0x51, 0xfb, 0x42, 0xf8, 0x7e, 0x62, 0xfa, + 0x60, 0x25, 0xb9, 0x2b, 0x4e, 0xd6, 0x1c, 0x2e, + 0xcc, 0x6c, 0x65, 0x18, 0x1e, 0x9d, 0x04 + }, + 800, + /* IV length */ + 23, + {0xf6, 0x6e, 0x21, 0x54, 0xb9, 0x60, 0xb5, 0x90, + 0xdc, 0x35, 0xaf, 0xb9, 0x9d, 0x03, 0xf9, 0xbe, + 0x58, 0xf8, 0x7c, 0x5c, 0x03, 0xdb, 0x72, 0x2e, + 0xa6, 0x34, 0xff, 0x43, 0x8d, 0xcf, 0xd4, 0xa7, + 0x2a, 0x52, 0xae, 0x3a, 0xb8, 0xc7, 0x11, 0xd8, + 0x19, 0xd3, 0x94, 0x66, 0x84, 0x10, 0xf0, 0x81, + 0x45, 0xc5, 0x0b, 0x05, 0xe6, 0x89, 0xc6, 0xc9, + 0xb4, 0x25, 0x7b, 0xb7, 0x89, 0x42, 0xd4, 0x6c, + 0x1a, 0xfd, 0x00, 0x42, 0x80, 0x9d, 0x10, 0x5e, + 0x68, 0xd6, 0x02, 0x13, 0x07, 0x55, 0x08, 0x24, + 0xe5, 0x9e, 0x6b, 0xf3, 0xea, 0x04, 0xd7, 0xd7, + 0x8f, 0x0a, 0x48, 0x02, 0x8c, 0x98, 0xd5, 0x68, + 0xee, 0x11, 0x93, 0x22 + }, + {0xf2, 0xef, 0xfb, 0xf7}, + {0x5a, 0x99, 0xe9, 0x23, 0xfa, 0xf1, 0xec, 0xbb}, + {0x0b, 0xfb, 0x8f, 0xff, 0x65, 0xaf, 0x6a, 0x69, + 0xea, 0xde, 0xbd, 0x94, 0x79, 0x7b, 0x08, 0xa5}, + }, { - /*Test 2*/ + /* Test 4 */ + /* Key */ + {0xa7, 0x72, 0xf5, 0xfe, 0x9d, 0x81, 0xd1, 0xcf, + 0x22, 0x8e, 0x45, 0x53, 0x67, 0x75, 0xac, 0xc9, + 0x04, 0x19, 0x57, 0x55, 0x0f, 0x6c, 0x39, 0xf9, + 0xc5, 0x1b, 0x1e, 0x9e, 0xbb, 0x22, 0xa2, 0xf5 + }, + /* IV */ + {0xce, 0x51, 0x11, 0x83, 0x9b, 0x64, 0x4d, 0x20, + 0x51, 0x92, 0x71, 0x3b, 0x43, 0x47, 0xf9, 0x38, + 0x79, 0x0f, 0xd2, 0x59, 0xbc, 0x35, 0xd3 + }, + 400, + /* IV length */ + 23, + {0xad, 0x2a, 0x9a, 0x9a, 0x7e, 0xb8, 0xb9, 0x72, + 0xc5, 0x50, 0xe8, 0x28, 0x5e, 0x17, 0xda, 0xa2, + 0x4c, 0x9a, 0xeb, 0x61, 0x72, 0xc6, 0xa7, 0x06, + 0x54, 0x32, 0xa6, 0x5b, 0xc5, 0x8b, 0xd7, 0xa2, + 0xe0, 0x5e, 0x18, 0xf5, 0x41, 0x8e, 0xa8, 0x6e, + 0x50, 0xd9, 0xde, 0x67, 0x28, 0x04, 0xee, 0x22, + 0xa5, 0x72 + }, + {0x18, 0x3d, 0xf5, 0xf7}, + {0x50, 0xa0, 0xc4, 0x24, 0xa9, 0x25, 0xf4, 0x5b}, + {0x4b, 0xde, 0xd3, 0x15, 0xa4, 0x80, 0xa7, 0xe4, + 0x02, 0xe4, 0xc4, 0x48, 0x90, 0xfe, 0x62, 0xf5}, + }, + { + /* Test 5 */ + /* Key */ + {0xf8, 0xa0, 0x45, 0x4f, 0x6d, 0xea, 0x74, 0x6e, + 0x4c, 0xd1, 0x6e, 0xb0, 0xc3, 0xa2, 0x1f, 0x57, + 0xeb, 0x6f, 0x35, 0x2d, 0x6a, 0x02, 0x5b, 0x35, + 0x32, 0xba, 0x47, 0x3f, 0x1f, 0x0e, 0xdd, 0xc9 + }, + /* IV */ + {0x07, 0x90, 0xeb, 0x7d, 0x09, 0x6d, 0xc1, 0xf1, + 0x86, 0x47, 0xea, 0x57, 0xe4, 0xb8, 0x92, 0xb1, + 0x4e, 0x3b, 0x2d, 0x62, 0xaa, 0x53, 0x6f + }, + 400, + /* IV length */ + 23, + {0xf7, 0xc3, 0xc4, 0x82, 0xe7, 0x20, 0x76, 0xa2, + 0x78, 0x5d, 0xe1, 0xcb, 0xa5, 0x3f, 0x7d, 0x7c, + 0xa3, 0x84, 0x0b, 0x69, 0xff, 0x3b, 0x19, 0xb5, + 0x6b, 0x9f, 0x25, 0x04, 0x35, 0xad, 0x89, 0x3b, + 0xad, 0xba, 0xa5, 0xe1, 0xe8, 0x4e, 0xa4, 0xf5, + 0x49, 0x84, 0x9a, 0x2c, 0x71, 0xb1, 0xd6, 0xc1, + 0x1b, 0xdc + }, + {0xb8, 0xbf, 0x02, 0x13}, + {0x2c, 0xfc, 0xa3, 0x59, 0x33, 0x50, 0xd8, 0xee}, + {0x9c, 0x6e, 0xd5, 0xf9, 0x63, 0x95, 0xa7, 0x28, + 0xe1, 0x6b, 0xb2, 0x8a, 0x0e, 0x5b, 0x90, 0x72} + }, + { + /* Test 6 */ /* Key */ {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -1113,7 +1440,7 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { }, }, { - /*Test 3*/ + /* Test 7 */ /* Key */ {0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, @@ -1144,7 +1471,7 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { }, }, { - /*Test 4*/ + /* Test 8 */ /* Key */ {0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, @@ -1231,7 +1558,7 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { }, }, { - /*Test 5*/ + /* Test 9 */ /* Key */ {0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, @@ -1261,7 +1588,7 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { }, }, { - /*Test 6*/ + /* Test 10 */ /* Key */ {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -1290,7 +1617,7 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { } }, { - /*Test 7*/ + /* Test 11 */ /* Key */ {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -1319,7 +1646,7 @@ const struct test256EIA3_vectors_t test256EIA3_vectors[] = { } }, { - /*Test 8*/ + /* Test 12 */ /* Key */ {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, diff --git a/win_x64.mak b/win_x64.mak index 6c097cb91e0fe42a7906f102e96aafae384de3fd..8fac25413477fe29a1ec1b814dd6ab0b401fd281 100644 --- a/win_x64.mak +++ b/win_x64.mak @@ -28,11 +28,13 @@ all: cd lib & $(MAKE) /f win_x64.mak cd test & $(MAKE) /f win_x64.mak + cd test\wycheproof & $(MAKE) /f win_x64.mak cd perf & $(MAKE) /f win_x64.mak clean: cd lib & $(MAKE) /f win_x64.mak clean cd test & $(MAKE) /f win_x64.mak clean + cd test\wycheproof & $(MAKE) /f win_x64.mak clean cd perf & $(MAKE) /f win_x64.mak clean install: