x86, hash: Simplify switch, add __init annotation
[deliverable/linux.git] / arch / x86 / lib / hash.c
1 /*
2 * Some portions derived from code covered by the following notice:
3 *
4 * Copyright (c) 2010-2013 Intel Corporation. All rights reserved.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 *
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in
15 * the documentation and/or other materials provided with the
16 * distribution.
17 * * Neither the name of Intel Corporation nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
24 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
25 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
26 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
27 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
28 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
29 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
30 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
31 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 */
33
34 #include <linux/hash.h>
35 #include <linux/init.h>
36
37 #include <asm/processor.h>
38 #include <asm/cpufeature.h>
39 #include <asm/hash.h>
40
41 static inline u32 crc32_u32(u32 crc, u32 val)
42 {
43 #ifdef CONFIG_AS_CRC32
44 asm ("crc32l %1,%0\n" : "+r" (crc) : "rm" (val));
45 #else
46 asm (".byte 0xf2, 0x0f, 0x38, 0xf1, 0xc1" : "+a" (crc) : "c" (val));
47 #endif
48 return crc;
49 }
50
51 static u32 intel_crc4_2_hash(const void *data, u32 len, u32 seed)
52 {
53 const u32 *p32 = (const u32 *) data;
54 u32 i, tmp = 0;
55
56 for (i = 0; i < len / 4; i++)
57 seed = crc32_u32(seed, *p32++);
58
59 switch (len & 3) {
60 case 3:
61 tmp |= *((const u8 *) p32 + 2) << 16;
62 /* fallthrough */
63 case 2:
64 tmp |= *((const u8 *) p32 + 1) << 8;
65 /* fallthrough */
66 case 1:
67 tmp |= *((const u8 *) p32);
68 seed = crc32_u32(seed, tmp);
69 break;
70 }
71
72 return seed;
73 }
74
75 static u32 intel_crc4_2_hash2(const u32 *data, u32 len, u32 seed)
76 {
77 const u32 *p32 = (const u32 *) data;
78 u32 i;
79
80 for (i = 0; i < len; i++)
81 seed = crc32_u32(seed, *p32++);
82
83 return seed;
84 }
85
86 void __init setup_arch_fast_hash(struct fast_hash_ops *ops)
87 {
88 if (cpu_has_xmm4_2) {
89 ops->hash = intel_crc4_2_hash;
90 ops->hash2 = intel_crc4_2_hash2;
91 }
92 }
This page took 0.031358 seconds and 5 git commands to generate.