Lines Matching refs:p_scr
50 #define p_scr p6 // default register for same-cycle branches macro
86 cmp.eq p_scr, p0 = cnt, r0
95 (p_scr) br.ret.dpnt.many rp // return immediately if count = 0
101 cmp.gt p_scr, p0 = 16, cnt // is it a minimalistic task?
102 (p_scr) br.cond.dptk.many .move_bytes_unaligned // go move just a few (M_B_U)
135 cmp.gt p_scr, p0 = tmp, cnt // is it a minimalistic task?
138 (p_scr) br.cond.dpnt.many .fraction_of_line // go move just a few
154 cmp.gt p_scr, p0 = PREF_AHEAD, linecnt // check against actual value
157 (p_scr) add loopcnt = -1, linecnt
187 cmp.lt p_scr, p0 = ptr9, ptr1 // do we need more prefetching?
191 (p_scr) stf.spill [ptr9] = f0, 128
195 cmp.gt p_scr, p0 = 8, cnt // just a few bytes left ?
196 (p_scr) br.cond.dpnt.many .move_bytes_from_alignment
205 cmp.eq p_scr, p0 = loopcnt, r0
207 (p_scr) br.cond.dpnt.many .store_words
225 cmp.gt p_scr, p0 = 8, cnt // just a few bytes left ?
226 (p_scr) br.cond.dpnt.many .move_bytes_from_alignment // Branch
246 cmp.eq p_scr, p0 = cnt, r0
248 (p_scr) br.cond.dpnt.few .restore_and_exit
293 tbit.nz p_scr, p0 = cnt, 1 // will there be a st2 at the end ?
308 (p_scr) st2 [ptr1] = r0 // fill 2 (aligned) bytes