2 * Author......: Jens Steube <jens.steube@gmail.com>
10 #include "include/constants.h"
11 #include "include/kernel_vendor.h"
18 #include "include/kernel_functions.c"
19 #include "OpenCL/types_ocl.c"
20 #include "OpenCL/common.c"
21 #include "OpenCL/simd.c"
23 void m04900m (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 pw_len, __global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset)
29 const u32 gid = get_global_id (0);
30 const u32 lid = get_local_id (0);
41 salt_buf0[0] = salt_bufs[salt_pos].salt_buf[ 0];
42 salt_buf0[1] = salt_bufs[salt_pos].salt_buf[ 1];
43 salt_buf0[2] = salt_bufs[salt_pos].salt_buf[ 2];
44 salt_buf0[3] = salt_bufs[salt_pos].salt_buf[ 3];
45 salt_buf1[0] = salt_bufs[salt_pos].salt_buf[ 4];
46 salt_buf1[1] = salt_bufs[salt_pos].salt_buf[ 5];
47 salt_buf1[2] = salt_bufs[salt_pos].salt_buf[ 6];
48 salt_buf1[3] = salt_bufs[salt_pos].salt_buf[ 7];
49 salt_buf2[0] = salt_bufs[salt_pos].salt_buf[ 8];
50 salt_buf2[1] = salt_bufs[salt_pos].salt_buf[ 9];
51 salt_buf2[2] = salt_bufs[salt_pos].salt_buf[10];
52 salt_buf2[3] = salt_bufs[salt_pos].salt_buf[11];
53 salt_buf3[0] = salt_bufs[salt_pos].salt_buf[12];
54 salt_buf3[1] = salt_bufs[salt_pos].salt_buf[13];
55 salt_buf3[2] = salt_bufs[salt_pos].salt_buf[14];
56 salt_buf3[3] = salt_bufs[salt_pos].salt_buf[15];
63 salt_buf0_t[0] = salt_bufs[salt_pos].salt_buf[ 0];
64 salt_buf0_t[1] = salt_bufs[salt_pos].salt_buf[ 1];
65 salt_buf0_t[2] = salt_bufs[salt_pos].salt_buf[ 2];
66 salt_buf0_t[3] = salt_bufs[salt_pos].salt_buf[ 3];
67 salt_buf1_t[0] = salt_bufs[salt_pos].salt_buf[ 4];
68 salt_buf1_t[1] = salt_bufs[salt_pos].salt_buf[ 5];
69 salt_buf1_t[2] = salt_bufs[salt_pos].salt_buf[ 6];
70 salt_buf1_t[3] = salt_bufs[salt_pos].salt_buf[ 7];
71 salt_buf2_t[0] = salt_bufs[salt_pos].salt_buf[ 8];
72 salt_buf2_t[1] = salt_bufs[salt_pos].salt_buf[ 9];
73 salt_buf2_t[2] = salt_bufs[salt_pos].salt_buf[10];
74 salt_buf2_t[3] = salt_bufs[salt_pos].salt_buf[11];
75 salt_buf3_t[0] = salt_bufs[salt_pos].salt_buf[12];
76 salt_buf3_t[1] = salt_bufs[salt_pos].salt_buf[13];
77 salt_buf3_t[2] = salt_bufs[salt_pos].salt_buf[14];
78 salt_buf3_t[3] = salt_bufs[salt_pos].salt_buf[15];
80 const u32 salt_len = salt_bufs[salt_pos].salt_len;
82 const u32 pw_salt_len = pw_len + salt_len;
84 const u32 salt_pw_salt_len = salt_len + pw_len + salt_len;
86 switch_buffer_by_offset_le_S (salt_buf0_t, salt_buf1_t, salt_buf2_t, salt_buf3_t, pw_salt_len);
88 salt_buf0[0] |= salt_buf0_t[0];
89 salt_buf0[1] |= salt_buf0_t[1];
90 salt_buf0[2] |= salt_buf0_t[2];
91 salt_buf0[3] |= salt_buf0_t[3];
92 salt_buf1[0] |= salt_buf1_t[0];
93 salt_buf1[1] |= salt_buf1_t[1];
94 salt_buf1[2] |= salt_buf1_t[2];
95 salt_buf1[3] |= salt_buf1_t[3];
96 salt_buf2[0] |= salt_buf2_t[0];
97 salt_buf2[1] |= salt_buf2_t[1];
98 salt_buf2[2] |= salt_buf2_t[2];
99 salt_buf2[3] |= salt_buf2_t[3];
100 salt_buf3[0] |= salt_buf3_t[0];
101 salt_buf3[1] |= salt_buf3_t[1];
102 salt_buf3[2] |= salt_buf3_t[2];
103 salt_buf3[3] |= salt_buf3_t[3];
105 append_0x80_4x4_S (salt_buf0, salt_buf1, salt_buf2, salt_buf3, salt_pw_salt_len);
113 for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
115 const u32x w0r = ix_create_bft (bfs_buf, il_pos);
117 const u32x w0lr = w0l | w0r;
142 * put the password after the first salt but before the second salt
145 switch_buffer_by_offset_le (t0, t1, t2, t3, salt_len);
147 t0[0] |= salt_buf0[0];
148 t0[1] |= salt_buf0[1];
149 t0[2] |= salt_buf0[2];
150 t0[3] |= salt_buf0[3];
151 t1[0] |= salt_buf1[0];
152 t1[1] |= salt_buf1[1];
153 t1[2] |= salt_buf1[2];
154 t1[3] |= salt_buf1[3];
155 t2[0] |= salt_buf2[0];
156 t2[1] |= salt_buf2[1];
157 t2[2] |= salt_buf2[2];
158 t2[3] |= salt_buf2[3];
159 t3[0] |= salt_buf3[0];
160 t3[1] |= salt_buf3[1];
161 t3[2] |= salt_buf3[2];
167 u32x w0_t = swap32 (t0[0]);
168 u32x w1_t = swap32 (t0[1]);
169 u32x w2_t = swap32 (t0[2]);
170 u32x w3_t = swap32 (t0[3]);
171 u32x w4_t = swap32 (t1[0]);
172 u32x w5_t = swap32 (t1[1]);
173 u32x w6_t = swap32 (t1[2]);
174 u32x w7_t = swap32 (t1[3]);
175 u32x w8_t = swap32 (t2[0]);
176 u32x w9_t = swap32 (t2[1]);
177 u32x wa_t = swap32 (t2[2]);
178 u32x wb_t = swap32 (t2[3]);
179 u32x wc_t = swap32 (t3[0]);
180 u32x wd_t = swap32 (t3[1]);
182 u32x wf_t = salt_pw_salt_len * 8;
193 SHA1_STEP (SHA1_F0o, a, b, c, d, e, w0_t);
194 SHA1_STEP (SHA1_F0o, e, a, b, c, d, w1_t);
195 SHA1_STEP (SHA1_F0o, d, e, a, b, c, w2_t);
196 SHA1_STEP (SHA1_F0o, c, d, e, a, b, w3_t);
197 SHA1_STEP (SHA1_F0o, b, c, d, e, a, w4_t);
198 SHA1_STEP (SHA1_F0o, a, b, c, d, e, w5_t);
199 SHA1_STEP (SHA1_F0o, e, a, b, c, d, w6_t);
200 SHA1_STEP (SHA1_F0o, d, e, a, b, c, w7_t);
201 SHA1_STEP (SHA1_F0o, c, d, e, a, b, w8_t);
202 SHA1_STEP (SHA1_F0o, b, c, d, e, a, w9_t);
203 SHA1_STEP (SHA1_F0o, a, b, c, d, e, wa_t);
204 SHA1_STEP (SHA1_F0o, e, a, b, c, d, wb_t);
205 SHA1_STEP (SHA1_F0o, d, e, a, b, c, wc_t);
206 SHA1_STEP (SHA1_F0o, c, d, e, a, b, wd_t);
207 SHA1_STEP (SHA1_F0o, b, c, d, e, a, we_t);
208 SHA1_STEP (SHA1_F0o, a, b, c, d, e, wf_t);
209 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F0o, e, a, b, c, d, w0_t);
210 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F0o, d, e, a, b, c, w1_t);
211 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F0o, c, d, e, a, b, w2_t);
212 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F0o, b, c, d, e, a, w3_t);
217 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w4_t);
218 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w5_t);
219 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w6_t);
220 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w7_t);
221 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w8_t);
222 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w9_t);
223 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wa_t);
224 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, wb_t);
225 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, wc_t);
226 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, wd_t);
227 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, we_t);
228 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wf_t);
229 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w0_t);
230 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w1_t);
231 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w2_t);
232 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w3_t);
233 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w4_t);
234 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w5_t);
235 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w6_t);
236 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w7_t);
241 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, w8_t);
242 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, w9_t);
243 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, wa_t);
244 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, wb_t);
245 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, wc_t);
246 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, wd_t);
247 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, we_t);
248 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, wf_t);
249 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, w0_t);
250 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, w1_t);
251 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, w2_t);
252 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, w3_t);
253 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, w4_t);
254 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, w5_t);
255 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, w6_t);
256 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, w7_t);
257 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, w8_t);
258 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, w9_t);
259 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, wa_t);
260 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, wb_t);
265 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, wc_t);
266 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wd_t);
267 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, we_t);
268 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, wf_t);
269 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w0_t);
270 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w1_t);
271 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w2_t);
272 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w3_t);
273 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w4_t);
274 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w5_t);
275 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w6_t);
276 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w7_t);
277 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w8_t);
278 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w9_t);
279 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, wa_t);
280 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, wb_t);
281 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wc_t);
282 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, wd_t);
283 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, we_t);
284 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, wf_t);
286 COMPARE_M_SIMD (d, e, c, b);
290 void m04900s (u32 w0[4], u32 w1[4], u32 w2[4], u32 w3[4], const u32 pw_len, __global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset)
296 const u32 gid = get_global_id (0);
297 const u32 lid = get_local_id (0);
308 salt_buf0[0] = salt_bufs[salt_pos].salt_buf[ 0];
309 salt_buf0[1] = salt_bufs[salt_pos].salt_buf[ 1];
310 salt_buf0[2] = salt_bufs[salt_pos].salt_buf[ 2];
311 salt_buf0[3] = salt_bufs[salt_pos].salt_buf[ 3];
312 salt_buf1[0] = salt_bufs[salt_pos].salt_buf[ 4];
313 salt_buf1[1] = salt_bufs[salt_pos].salt_buf[ 5];
314 salt_buf1[2] = salt_bufs[salt_pos].salt_buf[ 6];
315 salt_buf1[3] = salt_bufs[salt_pos].salt_buf[ 7];
316 salt_buf2[0] = salt_bufs[salt_pos].salt_buf[ 8];
317 salt_buf2[1] = salt_bufs[salt_pos].salt_buf[ 9];
318 salt_buf2[2] = salt_bufs[salt_pos].salt_buf[10];
319 salt_buf2[3] = salt_bufs[salt_pos].salt_buf[11];
320 salt_buf3[0] = salt_bufs[salt_pos].salt_buf[12];
321 salt_buf3[1] = salt_bufs[salt_pos].salt_buf[13];
322 salt_buf3[2] = salt_bufs[salt_pos].salt_buf[14];
323 salt_buf3[3] = salt_bufs[salt_pos].salt_buf[15];
330 salt_buf0_t[0] = salt_bufs[salt_pos].salt_buf[ 0];
331 salt_buf0_t[1] = salt_bufs[salt_pos].salt_buf[ 1];
332 salt_buf0_t[2] = salt_bufs[salt_pos].salt_buf[ 2];
333 salt_buf0_t[3] = salt_bufs[salt_pos].salt_buf[ 3];
334 salt_buf1_t[0] = salt_bufs[salt_pos].salt_buf[ 4];
335 salt_buf1_t[1] = salt_bufs[salt_pos].salt_buf[ 5];
336 salt_buf1_t[2] = salt_bufs[salt_pos].salt_buf[ 6];
337 salt_buf1_t[3] = salt_bufs[salt_pos].salt_buf[ 7];
338 salt_buf2_t[0] = salt_bufs[salt_pos].salt_buf[ 8];
339 salt_buf2_t[1] = salt_bufs[salt_pos].salt_buf[ 9];
340 salt_buf2_t[2] = salt_bufs[salt_pos].salt_buf[10];
341 salt_buf2_t[3] = salt_bufs[salt_pos].salt_buf[11];
342 salt_buf3_t[0] = salt_bufs[salt_pos].salt_buf[12];
343 salt_buf3_t[1] = salt_bufs[salt_pos].salt_buf[13];
344 salt_buf3_t[2] = salt_bufs[salt_pos].salt_buf[14];
345 salt_buf3_t[3] = salt_bufs[salt_pos].salt_buf[15];
347 const u32 salt_len = salt_bufs[salt_pos].salt_len;
349 const u32 pw_salt_len = pw_len + salt_len;
351 const u32 salt_pw_salt_len = salt_len + pw_len + salt_len;
353 switch_buffer_by_offset_le_S (salt_buf0_t, salt_buf1_t, salt_buf2_t, salt_buf3_t, pw_salt_len);
355 salt_buf0[0] |= salt_buf0_t[0];
356 salt_buf0[1] |= salt_buf0_t[1];
357 salt_buf0[2] |= salt_buf0_t[2];
358 salt_buf0[3] |= salt_buf0_t[3];
359 salt_buf1[0] |= salt_buf1_t[0];
360 salt_buf1[1] |= salt_buf1_t[1];
361 salt_buf1[2] |= salt_buf1_t[2];
362 salt_buf1[3] |= salt_buf1_t[3];
363 salt_buf2[0] |= salt_buf2_t[0];
364 salt_buf2[1] |= salt_buf2_t[1];
365 salt_buf2[2] |= salt_buf2_t[2];
366 salt_buf2[3] |= salt_buf2_t[3];
367 salt_buf3[0] |= salt_buf3_t[0];
368 salt_buf3[1] |= salt_buf3_t[1];
369 salt_buf3[2] |= salt_buf3_t[2];
370 salt_buf3[3] |= salt_buf3_t[3];
372 append_0x80_4x4_S (salt_buf0, salt_buf1, salt_buf2, salt_buf3, salt_pw_salt_len);
378 const u32 search[4] =
380 digests_buf[digests_offset].digest_buf[DGST_R0],
381 digests_buf[digests_offset].digest_buf[DGST_R1],
382 digests_buf[digests_offset].digest_buf[DGST_R2],
383 digests_buf[digests_offset].digest_buf[DGST_R3]
390 const u32 e_rev = rotl32_S (search[1], 2u);
398 for (u32 il_pos = 0; il_pos < il_cnt; il_pos += VECT_SIZE)
400 const u32x w0r = ix_create_bft (bfs_buf, il_pos);
402 const u32x w0lr = w0l | w0r;
427 * put the password after the first salt but before the second salt
430 switch_buffer_by_offset_le (t0, t1, t2, t3, salt_len);
432 t0[0] |= salt_buf0[0];
433 t0[1] |= salt_buf0[1];
434 t0[2] |= salt_buf0[2];
435 t0[3] |= salt_buf0[3];
436 t1[0] |= salt_buf1[0];
437 t1[1] |= salt_buf1[1];
438 t1[2] |= salt_buf1[2];
439 t1[3] |= salt_buf1[3];
440 t2[0] |= salt_buf2[0];
441 t2[1] |= salt_buf2[1];
442 t2[2] |= salt_buf2[2];
443 t2[3] |= salt_buf2[3];
444 t3[0] |= salt_buf3[0];
445 t3[1] |= salt_buf3[1];
446 t3[2] |= salt_buf3[2];
452 u32x w0_t = swap32 (t0[0]);
453 u32x w1_t = swap32 (t0[1]);
454 u32x w2_t = swap32 (t0[2]);
455 u32x w3_t = swap32 (t0[3]);
456 u32x w4_t = swap32 (t1[0]);
457 u32x w5_t = swap32 (t1[1]);
458 u32x w6_t = swap32 (t1[2]);
459 u32x w7_t = swap32 (t1[3]);
460 u32x w8_t = swap32 (t2[0]);
461 u32x w9_t = swap32 (t2[1]);
462 u32x wa_t = swap32 (t2[2]);
463 u32x wb_t = swap32 (t2[3]);
464 u32x wc_t = swap32 (t3[0]);
465 u32x wd_t = swap32 (t3[1]);
467 u32x wf_t = salt_pw_salt_len * 8;
478 SHA1_STEP (SHA1_F0o, a, b, c, d, e, w0_t);
479 SHA1_STEP (SHA1_F0o, e, a, b, c, d, w1_t);
480 SHA1_STEP (SHA1_F0o, d, e, a, b, c, w2_t);
481 SHA1_STEP (SHA1_F0o, c, d, e, a, b, w3_t);
482 SHA1_STEP (SHA1_F0o, b, c, d, e, a, w4_t);
483 SHA1_STEP (SHA1_F0o, a, b, c, d, e, w5_t);
484 SHA1_STEP (SHA1_F0o, e, a, b, c, d, w6_t);
485 SHA1_STEP (SHA1_F0o, d, e, a, b, c, w7_t);
486 SHA1_STEP (SHA1_F0o, c, d, e, a, b, w8_t);
487 SHA1_STEP (SHA1_F0o, b, c, d, e, a, w9_t);
488 SHA1_STEP (SHA1_F0o, a, b, c, d, e, wa_t);
489 SHA1_STEP (SHA1_F0o, e, a, b, c, d, wb_t);
490 SHA1_STEP (SHA1_F0o, d, e, a, b, c, wc_t);
491 SHA1_STEP (SHA1_F0o, c, d, e, a, b, wd_t);
492 SHA1_STEP (SHA1_F0o, b, c, d, e, a, we_t);
493 SHA1_STEP (SHA1_F0o, a, b, c, d, e, wf_t);
494 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F0o, e, a, b, c, d, w0_t);
495 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F0o, d, e, a, b, c, w1_t);
496 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F0o, c, d, e, a, b, w2_t);
497 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F0o, b, c, d, e, a, w3_t);
502 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w4_t);
503 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w5_t);
504 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w6_t);
505 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w7_t);
506 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w8_t);
507 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w9_t);
508 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wa_t);
509 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, wb_t);
510 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, wc_t);
511 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, wd_t);
512 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, we_t);
513 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wf_t);
514 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w0_t);
515 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w1_t);
516 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w2_t);
517 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w3_t);
518 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w4_t);
519 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w5_t);
520 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w6_t);
521 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w7_t);
526 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, w8_t);
527 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, w9_t);
528 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, wa_t);
529 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, wb_t);
530 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, wc_t);
531 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, wd_t);
532 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, we_t);
533 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, wf_t);
534 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, w0_t);
535 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, w1_t);
536 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, w2_t);
537 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, w3_t);
538 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, w4_t);
539 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, w5_t);
540 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, w6_t);
541 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F2o, a, b, c, d, e, w7_t);
542 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F2o, e, a, b, c, d, w8_t);
543 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F2o, d, e, a, b, c, w9_t);
544 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F2o, c, d, e, a, b, wa_t);
545 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F2o, b, c, d, e, a, wb_t);
550 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, wc_t);
551 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wd_t);
552 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, we_t);
553 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, wf_t);
554 w0_t = rotl32 ((wd_t ^ w8_t ^ w2_t ^ w0_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w0_t);
555 w1_t = rotl32 ((we_t ^ w9_t ^ w3_t ^ w1_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w1_t);
556 w2_t = rotl32 ((wf_t ^ wa_t ^ w4_t ^ w2_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w2_t);
557 w3_t = rotl32 ((w0_t ^ wb_t ^ w5_t ^ w3_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w3_t);
558 w4_t = rotl32 ((w1_t ^ wc_t ^ w6_t ^ w4_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w4_t);
559 w5_t = rotl32 ((w2_t ^ wd_t ^ w7_t ^ w5_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, w5_t);
560 w6_t = rotl32 ((w3_t ^ we_t ^ w8_t ^ w6_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, w6_t);
561 w7_t = rotl32 ((w4_t ^ wf_t ^ w9_t ^ w7_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, w7_t);
562 w8_t = rotl32 ((w5_t ^ w0_t ^ wa_t ^ w8_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, w8_t);
563 w9_t = rotl32 ((w6_t ^ w1_t ^ wb_t ^ w9_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, w9_t);
564 wa_t = rotl32 ((w7_t ^ w2_t ^ wc_t ^ wa_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, wa_t);
565 wb_t = rotl32 ((w8_t ^ w3_t ^ wd_t ^ wb_t), 1u); SHA1_STEP (SHA1_F1, a, b, c, d, e, wb_t);
567 if (MATCHES_NONE_VS (e, e_rev)) continue;
569 wc_t = rotl32 ((w9_t ^ w4_t ^ we_t ^ wc_t), 1u); SHA1_STEP (SHA1_F1, e, a, b, c, d, wc_t);
570 wd_t = rotl32 ((wa_t ^ w5_t ^ wf_t ^ wd_t), 1u); SHA1_STEP (SHA1_F1, d, e, a, b, c, wd_t);
571 we_t = rotl32 ((wb_t ^ w6_t ^ w0_t ^ we_t), 1u); SHA1_STEP (SHA1_F1, c, d, e, a, b, we_t);
572 wf_t = rotl32 ((wc_t ^ w7_t ^ w1_t ^ wf_t), 1u); SHA1_STEP (SHA1_F1, b, c, d, e, a, wf_t);
574 COMPARE_S_SIMD (d, e, c, b);
578 __kernel void m04900_m04 (__global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
584 const u32 gid = get_global_id (0);
586 if (gid >= gid_max) return;
592 const u32 lid = get_local_id (0);
596 w0[0] = pws[gid].i[ 0];
597 w0[1] = pws[gid].i[ 1];
598 w0[2] = pws[gid].i[ 2];
599 w0[3] = pws[gid].i[ 3];
619 w3[2] = pws[gid].i[14];
622 const u32 pw_len = pws[gid].pw_len;
628 m04900m (w0, w1, w2, w3, pw_len, pws, rules_buf, combs_buf, bfs_buf, tmps, hooks, bitmaps_buf_s1_a, bitmaps_buf_s1_b, bitmaps_buf_s1_c, bitmaps_buf_s1_d, bitmaps_buf_s2_a, bitmaps_buf_s2_b, bitmaps_buf_s2_c, bitmaps_buf_s2_d, plains_buf, digests_buf, hashes_shown, salt_bufs, esalt_bufs, d_return_buf, d_scryptV_buf, bitmap_mask, bitmap_shift1, bitmap_shift2, salt_pos, loop_pos, loop_cnt, il_cnt, digests_cnt, digests_offset);
631 __kernel void m04900_m08 (__global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
637 const u32 gid = get_global_id (0);
639 if (gid >= gid_max) return;
645 const u32 lid = get_local_id (0);
649 w0[0] = pws[gid].i[ 0];
650 w0[1] = pws[gid].i[ 1];
651 w0[2] = pws[gid].i[ 2];
652 w0[3] = pws[gid].i[ 3];
656 w1[0] = pws[gid].i[ 4];
657 w1[1] = pws[gid].i[ 5];
658 w1[2] = pws[gid].i[ 6];
659 w1[3] = pws[gid].i[ 7];
672 w3[2] = pws[gid].i[14];
675 const u32 pw_len = pws[gid].pw_len;
681 m04900m (w0, w1, w2, w3, pw_len, pws, rules_buf, combs_buf, bfs_buf, tmps, hooks, bitmaps_buf_s1_a, bitmaps_buf_s1_b, bitmaps_buf_s1_c, bitmaps_buf_s1_d, bitmaps_buf_s2_a, bitmaps_buf_s2_b, bitmaps_buf_s2_c, bitmaps_buf_s2_d, plains_buf, digests_buf, hashes_shown, salt_bufs, esalt_bufs, d_return_buf, d_scryptV_buf, bitmap_mask, bitmap_shift1, bitmap_shift2, salt_pos, loop_pos, loop_cnt, il_cnt, digests_cnt, digests_offset);
684 __kernel void m04900_m16 (__global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
690 const u32 gid = get_global_id (0);
692 if (gid >= gid_max) return;
698 const u32 lid = get_local_id (0);
702 w0[0] = pws[gid].i[ 0];
703 w0[1] = pws[gid].i[ 1];
704 w0[2] = pws[gid].i[ 2];
705 w0[3] = pws[gid].i[ 3];
709 w1[0] = pws[gid].i[ 4];
710 w1[1] = pws[gid].i[ 5];
711 w1[2] = pws[gid].i[ 6];
712 w1[3] = pws[gid].i[ 7];
716 w2[0] = pws[gid].i[ 8];
717 w2[1] = pws[gid].i[ 9];
718 w2[2] = pws[gid].i[10];
719 w2[3] = pws[gid].i[11];
723 w3[0] = pws[gid].i[12];
724 w3[1] = pws[gid].i[13];
725 w3[2] = pws[gid].i[14];
726 w3[3] = pws[gid].i[15];
728 const u32 pw_len = pws[gid].pw_len;
734 m04900m (w0, w1, w2, w3, pw_len, pws, rules_buf, combs_buf, bfs_buf, tmps, hooks, bitmaps_buf_s1_a, bitmaps_buf_s1_b, bitmaps_buf_s1_c, bitmaps_buf_s1_d, bitmaps_buf_s2_a, bitmaps_buf_s2_b, bitmaps_buf_s2_c, bitmaps_buf_s2_d, plains_buf, digests_buf, hashes_shown, salt_bufs, esalt_bufs, d_return_buf, d_scryptV_buf, bitmap_mask, bitmap_shift1, bitmap_shift2, salt_pos, loop_pos, loop_cnt, il_cnt, digests_cnt, digests_offset);
737 __kernel void m04900_s04 (__global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
743 const u32 gid = get_global_id (0);
745 if (gid >= gid_max) return;
751 const u32 lid = get_local_id (0);
755 w0[0] = pws[gid].i[ 0];
756 w0[1] = pws[gid].i[ 1];
757 w0[2] = pws[gid].i[ 2];
758 w0[3] = pws[gid].i[ 3];
778 w3[2] = pws[gid].i[14];
781 const u32 pw_len = pws[gid].pw_len;
787 m04900s (w0, w1, w2, w3, pw_len, pws, rules_buf, combs_buf, bfs_buf, tmps, hooks, bitmaps_buf_s1_a, bitmaps_buf_s1_b, bitmaps_buf_s1_c, bitmaps_buf_s1_d, bitmaps_buf_s2_a, bitmaps_buf_s2_b, bitmaps_buf_s2_c, bitmaps_buf_s2_d, plains_buf, digests_buf, hashes_shown, salt_bufs, esalt_bufs, d_return_buf, d_scryptV_buf, bitmap_mask, bitmap_shift1, bitmap_shift2, salt_pos, loop_pos, loop_cnt, il_cnt, digests_cnt, digests_offset);
790 __kernel void m04900_s08 (__global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
796 const u32 gid = get_global_id (0);
798 if (gid >= gid_max) return;
804 const u32 lid = get_local_id (0);
808 w0[0] = pws[gid].i[ 0];
809 w0[1] = pws[gid].i[ 1];
810 w0[2] = pws[gid].i[ 2];
811 w0[3] = pws[gid].i[ 3];
815 w1[0] = pws[gid].i[ 4];
816 w1[1] = pws[gid].i[ 5];
817 w1[2] = pws[gid].i[ 6];
818 w1[3] = pws[gid].i[ 7];
831 w3[2] = pws[gid].i[14];
834 const u32 pw_len = pws[gid].pw_len;
840 m04900s (w0, w1, w2, w3, pw_len, pws, rules_buf, combs_buf, bfs_buf, tmps, hooks, bitmaps_buf_s1_a, bitmaps_buf_s1_b, bitmaps_buf_s1_c, bitmaps_buf_s1_d, bitmaps_buf_s2_a, bitmaps_buf_s2_b, bitmaps_buf_s2_c, bitmaps_buf_s2_d, plains_buf, digests_buf, hashes_shown, salt_bufs, esalt_bufs, d_return_buf, d_scryptV_buf, bitmap_mask, bitmap_shift1, bitmap_shift2, salt_pos, loop_pos, loop_cnt, il_cnt, digests_cnt, digests_offset);
843 __kernel void m04900_s16 (__global pw_t *pws, __global kernel_rule_t *rules_buf, __global comb_t *combs_buf, __global bf_t *bfs_buf, __global void *tmps, __global void *hooks, __global u32 *bitmaps_buf_s1_a, __global u32 *bitmaps_buf_s1_b, __global u32 *bitmaps_buf_s1_c, __global u32 *bitmaps_buf_s1_d, __global u32 *bitmaps_buf_s2_a, __global u32 *bitmaps_buf_s2_b, __global u32 *bitmaps_buf_s2_c, __global u32 *bitmaps_buf_s2_d, __global plain_t *plains_buf, __global digest_t *digests_buf, __global u32 *hashes_shown, __global salt_t *salt_bufs, __global void *esalt_bufs, __global u32 *d_return_buf, __global u32 *d_scryptV_buf, const u32 bitmap_mask, const u32 bitmap_shift1, const u32 bitmap_shift2, const u32 salt_pos, const u32 loop_pos, const u32 loop_cnt, const u32 il_cnt, const u32 digests_cnt, const u32 digests_offset, const u32 combs_mode, const u32 gid_max)
849 const u32 gid = get_global_id (0);
851 if (gid >= gid_max) return;
857 const u32 lid = get_local_id (0);
861 w0[0] = pws[gid].i[ 0];
862 w0[1] = pws[gid].i[ 1];
863 w0[2] = pws[gid].i[ 2];
864 w0[3] = pws[gid].i[ 3];
868 w1[0] = pws[gid].i[ 4];
869 w1[1] = pws[gid].i[ 5];
870 w1[2] = pws[gid].i[ 6];
871 w1[3] = pws[gid].i[ 7];
875 w2[0] = pws[gid].i[ 8];
876 w2[1] = pws[gid].i[ 9];
877 w2[2] = pws[gid].i[10];
878 w2[3] = pws[gid].i[11];
882 w3[0] = pws[gid].i[12];
883 w3[1] = pws[gid].i[13];
884 w3[2] = pws[gid].i[14];
885 w3[3] = pws[gid].i[15];
887 const u32 pw_len = pws[gid].pw_len;
893 m04900s (w0, w1, w2, w3, pw_len, pws, rules_buf, combs_buf, bfs_buf, tmps, hooks, bitmaps_buf_s1_a, bitmaps_buf_s1_b, bitmaps_buf_s1_c, bitmaps_buf_s1_d, bitmaps_buf_s2_a, bitmaps_buf_s2_b, bitmaps_buf_s2_c, bitmaps_buf_s2_d, plains_buf, digests_buf, hashes_shown, salt_bufs, esalt_bufs, d_return_buf, d_scryptV_buf, bitmap_mask, bitmap_shift1, bitmap_shift2, salt_pos, loop_pos, loop_cnt, il_cnt, digests_cnt, digests_offset);