bb                109 arch/alpha/boot/tools/objstrip.c 	unsigned long bb[64], sum = 0;
bb                124 arch/alpha/boot/tools/objstrip.c 	memset(bb, 0, sizeof(bb));
bb                125 arch/alpha/boot/tools/objstrip.c 	strcpy((char *) bb, "Linux SRM bootblock");
bb                126 arch/alpha/boot/tools/objstrip.c 	bb[60] = size / BLOCK_SIZE;	/* count */
bb                127 arch/alpha/boot/tools/objstrip.c 	bb[61] = 1;			/* starting sector # */
bb                128 arch/alpha/boot/tools/objstrip.c 	bb[62] = 0;			/* flags---must be 0 */
bb                130 arch/alpha/boot/tools/objstrip.c 	    sum += bb[i];
bb                132 arch/alpha/boot/tools/objstrip.c 	bb[63] = sum;
bb                133 arch/alpha/boot/tools/objstrip.c 	if (write(ofd, bb, sizeof(bb)) != sizeof(bb)) {
bb                 35 arch/mips/lib/multi3.c 	TWunion res, aa, bb;
bb                 38 arch/mips/lib/multi3.c 	bb.ti = b;
bb                 45 arch/mips/lib/multi3.c 	res.s.low = dmulu(aa.s.low, bb.s.low);
bb                 46 arch/mips/lib/multi3.c 	res.s.high = dmuhu(aa.s.low, bb.s.low);
bb                 47 arch/mips/lib/multi3.c 	res.s.high += dmulu(aa.s.high, bb.s.low);
bb                 48 arch/mips/lib/multi3.c 	res.s.high += dmulu(aa.s.low, bb.s.high);
bb                154 arch/x86/kernel/cpu/centaur.c 	u32  aa, bb, cc, dd;
bb                238 arch/x86/kernel/cpu/centaur.c 			cpuid(0x80000005, &aa, &bb, &cc, &dd);
bb                 61 arch/x86/kernel/kprobes/core.c #define W(row, b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, ba, bb, bc, bd, be, bf)\
bb                 64 arch/x86/kernel/kprobes/core.c 	  (b8##UL << 0x8)|(b9##UL << 0x9)|(ba##UL << 0xa)|(bb##UL << 0xb) |   \
bb                 46 arch/x86/kernel/uprobes.c #define W(row, b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, ba, bb, bc, bd, be, bf)\
bb                 49 arch/x86/kernel/uprobes.c 	  (b8##UL << 0x8)|(b9##UL << 0x9)|(ba##UL << 0xa)|(bb##UL << 0xb) |   \
bb                 53 block/badblocks.c int badblocks_check(struct badblocks *bb, sector_t s, int sectors,
bb                 58 block/badblocks.c 	u64 *p = bb->page;
bb                 63 block/badblocks.c 	if (bb->shift > 0) {
bb                 65 block/badblocks.c 		s >>= bb->shift;
bb                 66 block/badblocks.c 		target += (1<<bb->shift) - 1;
bb                 67 block/badblocks.c 		target >>= bb->shift;
bb                 73 block/badblocks.c 	seq = read_seqbegin(&bb->lock);
bb                 76 block/badblocks.c 	hi = bb->count;
bb                121 block/badblocks.c 	if (read_seqretry(&bb->lock, seq))
bb                128 block/badblocks.c static void badblocks_update_acked(struct badblocks *bb)
bb                130 block/badblocks.c 	u64 *p = bb->page;
bb                134 block/badblocks.c 	if (!bb->unacked_exist)
bb                137 block/badblocks.c 	for (i = 0; i < bb->count ; i++) {
bb                145 block/badblocks.c 		bb->unacked_exist = 0;
bb                163 block/badblocks.c int badblocks_set(struct badblocks *bb, sector_t s, int sectors,
bb                171 block/badblocks.c 	if (bb->shift < 0)
bb                175 block/badblocks.c 	if (bb->shift) {
bb                179 block/badblocks.c 		s >>= bb->shift;
bb                180 block/badblocks.c 		next += (1<<bb->shift) - 1;
bb                181 block/badblocks.c 		next >>= bb->shift;
bb                185 block/badblocks.c 	write_seqlock_irqsave(&bb->lock, flags);
bb                187 block/badblocks.c 	p = bb->page;
bb                189 block/badblocks.c 	hi = bb->count;
bb                235 block/badblocks.c 	if (sectors && hi < bb->count) {
bb                265 block/badblocks.c 	if (sectors == 0 && hi < bb->count) {
bb                279 block/badblocks.c 				(bb->count - hi - 1) * 8);
bb                280 block/badblocks.c 			bb->count--;
bb                287 block/badblocks.c 		if (bb->count >= MAX_BADBLOCKS) {
bb                295 block/badblocks.c 				(bb->count - hi) * 8);
bb                296 block/badblocks.c 			bb->count++;
bb                306 block/badblocks.c 	bb->changed = 1;
bb                308 block/badblocks.c 		bb->unacked_exist = 1;
bb                310 block/badblocks.c 		badblocks_update_acked(bb);
bb                311 block/badblocks.c 	write_sequnlock_irqrestore(&bb->lock, flags);
bb                331 block/badblocks.c int badblocks_clear(struct badblocks *bb, sector_t s, int sectors)
bb                338 block/badblocks.c 	if (bb->shift > 0) {
bb                345 block/badblocks.c 		s += (1<<bb->shift) - 1;
bb                346 block/badblocks.c 		s >>= bb->shift;
bb                347 block/badblocks.c 		target >>= bb->shift;
bb                351 block/badblocks.c 	write_seqlock_irq(&bb->lock);
bb                353 block/badblocks.c 	p = bb->page;
bb                355 block/badblocks.c 	hi = bb->count;
bb                380 block/badblocks.c 				if (bb->count >= MAX_BADBLOCKS) {
bb                384 block/badblocks.c 				memmove(p+lo+1, p+lo, (bb->count - lo) * 8);
bb                385 block/badblocks.c 				bb->count++;
bb                413 block/badblocks.c 			memmove(p+lo+1, p+hi, (bb->count - hi) * 8);
bb                414 block/badblocks.c 			bb->count -= (hi - lo - 1);
bb                418 block/badblocks.c 	badblocks_update_acked(bb);
bb                419 block/badblocks.c 	bb->changed = 1;
bb                421 block/badblocks.c 	write_sequnlock_irq(&bb->lock);
bb                433 block/badblocks.c void ack_all_badblocks(struct badblocks *bb)
bb                435 block/badblocks.c 	if (bb->page == NULL || bb->changed)
bb                438 block/badblocks.c 	write_seqlock_irq(&bb->lock);
bb                440 block/badblocks.c 	if (bb->changed == 0 && bb->unacked_exist) {
bb                441 block/badblocks.c 		u64 *p = bb->page;
bb                444 block/badblocks.c 		for (i = 0; i < bb->count ; i++) {
bb                452 block/badblocks.c 		bb->unacked_exist = 0;
bb                454 block/badblocks.c 	write_sequnlock_irq(&bb->lock);
bb                467 block/badblocks.c ssize_t badblocks_show(struct badblocks *bb, char *page, int unack)
bb                471 block/badblocks.c 	u64 *p = bb->page;
bb                474 block/badblocks.c 	if (bb->shift < 0)
bb                478 block/badblocks.c 	seq = read_seqbegin(&bb->lock);
bb                483 block/badblocks.c 	while (len < PAGE_SIZE && i < bb->count) {
bb                494 block/badblocks.c 				(unsigned long long)s << bb->shift,
bb                495 block/badblocks.c 				length << bb->shift);
bb                498 block/badblocks.c 		bb->unacked_exist = 0;
bb                500 block/badblocks.c 	if (read_seqretry(&bb->lock, seq))
bb                517 block/badblocks.c ssize_t badblocks_store(struct badblocks *bb, const char *page, size_t len,
bb                537 block/badblocks.c 	if (badblocks_set(bb, sector, length, !unack))
bb                544 block/badblocks.c static int __badblocks_init(struct device *dev, struct badblocks *bb,
bb                547 block/badblocks.c 	bb->dev = dev;
bb                548 block/badblocks.c 	bb->count = 0;
bb                550 block/badblocks.c 		bb->shift = 0;
bb                552 block/badblocks.c 		bb->shift = -1;
bb                554 block/badblocks.c 		bb->page = devm_kzalloc(dev, PAGE_SIZE, GFP_KERNEL);
bb                556 block/badblocks.c 		bb->page = kzalloc(PAGE_SIZE, GFP_KERNEL);
bb                557 block/badblocks.c 	if (!bb->page) {
bb                558 block/badblocks.c 		bb->shift = -1;
bb                561 block/badblocks.c 	seqlock_init(&bb->lock);
bb                575 block/badblocks.c int badblocks_init(struct badblocks *bb, int enable)
bb                577 block/badblocks.c 	return __badblocks_init(NULL, bb, enable);
bb                581 block/badblocks.c int devm_init_badblocks(struct device *dev, struct badblocks *bb)
bb                583 block/badblocks.c 	if (!bb)
bb                585 block/badblocks.c 	return __badblocks_init(dev, bb, 1);
bb                593 block/badblocks.c void badblocks_exit(struct badblocks *bb)
bb                595 block/badblocks.c 	if (!bb)
bb                597 block/badblocks.c 	if (bb->dev)
bb                598 block/badblocks.c 		devm_kfree(bb->dev, bb->page);
bb                600 block/badblocks.c 		kfree(bb->page);
bb                601 block/badblocks.c 	bb->page = NULL;
bb                845 block/genhd.c  	if (!disk->bb)
bb                848 block/genhd.c  	return badblocks_show(disk->bb, page, 0);
bb                857 block/genhd.c  	if (!disk->bb)
bb                860 block/genhd.c  	return badblocks_store(disk->bb, page, len, 0);
bb               1231 block/genhd.c  	if (a == &dev_attr_badblocks.attr && !disk->bb)
bb                 47 crypto/rmd128.c 	u32 aa, bb, cc, dd, aaa, bbb, ccc, ddd;
bb                 51 crypto/rmd128.c 	bb = state[1];
bb                 62 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F1, K1, in[0],  11);
bb                 63 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F1, K1, in[1],  14);
bb                 64 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F1, K1, in[2],  15);
bb                 65 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F1, K1, in[3],  12);
bb                 66 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F1, K1, in[4],   5);
bb                 67 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F1, K1, in[5],   8);
bb                 68 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F1, K1, in[6],   7);
bb                 69 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F1, K1, in[7],   9);
bb                 70 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F1, K1, in[8],  11);
bb                 71 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F1, K1, in[9],  13);
bb                 72 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F1, K1, in[10], 14);
bb                 73 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F1, K1, in[11], 15);
bb                 74 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F1, K1, in[12],  6);
bb                 75 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F1, K1, in[13],  7);
bb                 76 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F1, K1, in[14],  9);
bb                 77 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F1, K1, in[15],  8);
bb                 80 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F2, K2, in[7],   7);
bb                 81 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F2, K2, in[4],   6);
bb                 82 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F2, K2, in[13],  8);
bb                 83 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F2, K2, in[1],  13);
bb                 84 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F2, K2, in[10], 11);
bb                 85 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F2, K2, in[6],   9);
bb                 86 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F2, K2, in[15],  7);
bb                 87 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F2, K2, in[3],  15);
bb                 88 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F2, K2, in[12],  7);
bb                 89 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F2, K2, in[0],  12);
bb                 90 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F2, K2, in[9],  15);
bb                 91 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F2, K2, in[5],   9);
bb                 92 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F2, K2, in[2],  11);
bb                 93 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F2, K2, in[14],  7);
bb                 94 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F2, K2, in[11], 13);
bb                 95 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F2, K2, in[8],  12);
bb                 98 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F3, K3, in[3],  11);
bb                 99 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F3, K3, in[10], 13);
bb                100 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F3, K3, in[14],  6);
bb                101 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F3, K3, in[4],   7);
bb                102 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F3, K3, in[9],  14);
bb                103 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F3, K3, in[15],  9);
bb                104 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F3, K3, in[8],  13);
bb                105 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F3, K3, in[1],  15);
bb                106 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F3, K3, in[2],  14);
bb                107 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F3, K3, in[7],   8);
bb                108 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F3, K3, in[0],  13);
bb                109 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F3, K3, in[6],   6);
bb                110 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F3, K3, in[13],  5);
bb                111 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F3, K3, in[11], 12);
bb                112 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F3, K3, in[5],   7);
bb                113 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F3, K3, in[12],  5);
bb                116 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F4, K4, in[1],  11);
bb                117 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F4, K4, in[9],  12);
bb                118 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F4, K4, in[11], 14);
bb                119 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F4, K4, in[10], 15);
bb                120 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F4, K4, in[0],  14);
bb                121 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F4, K4, in[8],  15);
bb                122 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F4, K4, in[12],  9);
bb                123 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F4, K4, in[4],   8);
bb                124 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F4, K4, in[13],  9);
bb                125 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F4, K4, in[3],  14);
bb                126 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F4, K4, in[7],   5);
bb                127 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F4, K4, in[15],  6);
bb                128 crypto/rmd128.c 	ROUND(aa, bb, cc, dd, F4, K4, in[14],  8);
bb                129 crypto/rmd128.c 	ROUND(dd, aa, bb, cc, F4, K4, in[5],   6);
bb                130 crypto/rmd128.c 	ROUND(cc, dd, aa, bb, F4, K4, in[6],   5);
bb                131 crypto/rmd128.c 	ROUND(bb, cc, dd, aa, F4, K4, in[2],  12);
bb                209 crypto/rmd128.c 	state[3] = state[0] + bb + ccc;
bb                 51 crypto/rmd160.c 	u32 aa, bb, cc, dd, ee, aaa, bbb, ccc, ddd, eee;
bb                 55 crypto/rmd160.c 	bb = state[1];
bb                 68 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[0],  11);
bb                 69 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F1, K1, in[1],  14);
bb                 70 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F1, K1, in[2],  15);
bb                 71 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F1, K1, in[3],  12);
bb                 72 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F1, K1, in[4],   5);
bb                 73 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[5],   8);
bb                 74 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F1, K1, in[6],   7);
bb                 75 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F1, K1, in[7],   9);
bb                 76 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F1, K1, in[8],  11);
bb                 77 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F1, K1, in[9],  13);
bb                 78 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[10], 14);
bb                 79 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F1, K1, in[11], 15);
bb                 80 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F1, K1, in[12],  6);
bb                 81 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F1, K1, in[13],  7);
bb                 82 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F1, K1, in[14],  9);
bb                 83 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[15],  8);
bb                 86 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[7],   7);
bb                 87 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F2, K2, in[4],   6);
bb                 88 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F2, K2, in[13],  8);
bb                 89 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F2, K2, in[1],  13);
bb                 90 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11);
bb                 91 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[6],   9);
bb                 92 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F2, K2, in[15],  7);
bb                 93 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F2, K2, in[3],  15);
bb                 94 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F2, K2, in[12],  7);
bb                 95 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F2, K2, in[0],  12);
bb                 96 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[9],  15);
bb                 97 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F2, K2, in[5],   9);
bb                 98 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F2, K2, in[2],  11);
bb                 99 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F2, K2, in[14],  7);
bb                100 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F2, K2, in[11], 13);
bb                101 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[8],  12);
bb                104 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[3],  11);
bb                105 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F3, K3, in[10], 13);
bb                106 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F3, K3, in[14],  6);
bb                107 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F3, K3, in[4],   7);
bb                108 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F3, K3, in[9],  14);
bb                109 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[15],  9);
bb                110 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F3, K3, in[8],  13);
bb                111 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F3, K3, in[1],  15);
bb                112 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F3, K3, in[2],  14);
bb                113 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F3, K3, in[7],   8);
bb                114 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[0],  13);
bb                115 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F3, K3, in[6],   6);
bb                116 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F3, K3, in[13],  5);
bb                117 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F3, K3, in[11], 12);
bb                118 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F3, K3, in[5],   7);
bb                119 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[12],  5);
bb                122 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[1],  11);
bb                123 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F4, K4, in[9],  12);
bb                124 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F4, K4, in[11], 14);
bb                125 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F4, K4, in[10], 15);
bb                126 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F4, K4, in[0],  14);
bb                127 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[8],  15);
bb                128 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F4, K4, in[12],  9);
bb                129 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F4, K4, in[4],   8);
bb                130 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F4, K4, in[13],  9);
bb                131 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F4, K4, in[3],  14);
bb                132 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[7],   5);
bb                133 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F4, K4, in[15],  6);
bb                134 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F4, K4, in[14],  8);
bb                135 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F4, K4, in[5],   6);
bb                136 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F4, K4, in[6],   5);
bb                137 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[2],  12);
bb                140 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[4],   9);
bb                141 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F5, K5, in[0],  15);
bb                142 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F5, K5, in[5],   5);
bb                143 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F5, K5, in[9],  11);
bb                144 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F5, K5, in[7],   6);
bb                145 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[12],  8);
bb                146 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F5, K5, in[2],  13);
bb                147 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F5, K5, in[10], 12);
bb                148 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F5, K5, in[14],  5);
bb                149 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F5, K5, in[1],  12);
bb                150 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[3],  13);
bb                151 crypto/rmd160.c 	ROUND(aa, bb, cc, dd, ee, F5, K5, in[8],  14);
bb                152 crypto/rmd160.c 	ROUND(ee, aa, bb, cc, dd, F5, K5, in[11], 11);
bb                153 crypto/rmd160.c 	ROUND(dd, ee, aa, bb, cc, F5, K5, in[6],   8);
bb                154 crypto/rmd160.c 	ROUND(cc, dd, ee, aa, bb, F5, K5, in[15],  5);
bb                155 crypto/rmd160.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[13],  6);
bb                252 crypto/rmd160.c 	state[4] = state[0] + bb + ccc;
bb                 47 crypto/rmd256.c 	u32 aa, bb, cc, dd, aaa, bbb, ccc, ddd;
bb                 51 crypto/rmd256.c 	bb = state[1];
bb                 62 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F1, K1, in[0],  11);
bb                 63 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F1, K1, in[1],  14);
bb                 64 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F1, K1, in[2],  15);
bb                 65 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F1, K1, in[3],  12);
bb                 66 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F1, K1, in[4],   5);
bb                 67 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F1, K1, in[5],   8);
bb                 68 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F1, K1, in[6],   7);
bb                 69 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F1, K1, in[7],   9);
bb                 70 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F1, K1, in[8],  11);
bb                 71 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F1, K1, in[9],  13);
bb                 72 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F1, K1, in[10], 14);
bb                 73 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F1, K1, in[11], 15);
bb                 74 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F1, K1, in[12],  6);
bb                 75 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F1, K1, in[13],  7);
bb                 76 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F1, K1, in[14],  9);
bb                 77 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F1, K1, in[15],  8);
bb                101 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F2, K2, in[7],   7);
bb                102 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F2, K2, in[4],   6);
bb                103 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F2, K2, in[13],  8);
bb                104 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F2, K2, in[1],  13);
bb                105 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F2, K2, in[10], 11);
bb                106 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F2, K2, in[6],   9);
bb                107 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F2, K2, in[15],  7);
bb                108 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F2, K2, in[3],  15);
bb                109 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F2, K2, in[12],  7);
bb                110 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F2, K2, in[0],  12);
bb                111 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F2, K2, in[9],  15);
bb                112 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F2, K2, in[5],   9);
bb                113 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F2, K2, in[2],  11);
bb                114 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F2, K2, in[14],  7);
bb                115 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F2, K2, in[11], 13);
bb                116 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F2, K2, in[8],  12);
bb                137 crypto/rmd256.c 	swap(bb, bbb);
bb                140 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F3, K3, in[3],  11);
bb                141 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F3, K3, in[10], 13);
bb                142 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F3, K3, in[14],  6);
bb                143 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F3, K3, in[4],   7);
bb                144 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F3, K3, in[9],  14);
bb                145 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F3, K3, in[15],  9);
bb                146 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F3, K3, in[8],  13);
bb                147 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F3, K3, in[1],  15);
bb                148 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F3, K3, in[2],  14);
bb                149 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F3, K3, in[7],   8);
bb                150 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F3, K3, in[0],  13);
bb                151 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F3, K3, in[6],   6);
bb                152 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F3, K3, in[13],  5);
bb                153 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F3, K3, in[11], 12);
bb                154 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F3, K3, in[5],   7);
bb                155 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F3, K3, in[12],  5);
bb                179 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F4, K4, in[1],  11);
bb                180 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F4, K4, in[9],  12);
bb                181 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F4, K4, in[11], 14);
bb                182 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F4, K4, in[10], 15);
bb                183 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F4, K4, in[0],  14);
bb                184 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F4, K4, in[8],  15);
bb                185 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F4, K4, in[12],  9);
bb                186 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F4, K4, in[4],   8);
bb                187 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F4, K4, in[13],  9);
bb                188 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F4, K4, in[3],  14);
bb                189 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F4, K4, in[7],   5);
bb                190 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F4, K4, in[15],  6);
bb                191 crypto/rmd256.c 	ROUND(aa, bb, cc, dd, F4, K4, in[14],  8);
bb                192 crypto/rmd256.c 	ROUND(dd, aa, bb, cc, F4, K4, in[5],   6);
bb                193 crypto/rmd256.c 	ROUND(cc, dd, aa, bb, F4, K4, in[6],   5);
bb                194 crypto/rmd256.c 	ROUND(bb, cc, dd, aa, F4, K4, in[2],  12);
bb                219 crypto/rmd256.c 	state[1] += bb;
bb                 51 crypto/rmd320.c 	u32 aa, bb, cc, dd, ee, aaa, bbb, ccc, ddd, eee;
bb                 55 crypto/rmd320.c 	bb = state[1];
bb                 68 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[0],  11);
bb                 69 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F1, K1, in[1],  14);
bb                 70 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F1, K1, in[2],  15);
bb                 71 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F1, K1, in[3],  12);
bb                 72 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F1, K1, in[4],   5);
bb                 73 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[5],   8);
bb                 74 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F1, K1, in[6],   7);
bb                 75 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F1, K1, in[7],   9);
bb                 76 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F1, K1, in[8],  11);
bb                 77 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F1, K1, in[9],  13);
bb                 78 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[10], 14);
bb                 79 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F1, K1, in[11], 15);
bb                 80 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F1, K1, in[12],  6);
bb                 81 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F1, K1, in[13],  7);
bb                 82 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F1, K1, in[14],  9);
bb                 83 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F1, K1, in[15],  8);
bb                107 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[7],   7);
bb                108 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F2, K2, in[4],   6);
bb                109 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F2, K2, in[13],  8);
bb                110 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F2, K2, in[1],  13);
bb                111 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F2, K2, in[10], 11);
bb                112 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[6],   9);
bb                113 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F2, K2, in[15],  7);
bb                114 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F2, K2, in[3],  15);
bb                115 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F2, K2, in[12],  7);
bb                116 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F2, K2, in[0],  12);
bb                117 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[9],  15);
bb                118 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F2, K2, in[5],   9);
bb                119 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F2, K2, in[2],  11);
bb                120 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F2, K2, in[14],  7);
bb                121 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F2, K2, in[11], 13);
bb                122 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F2, K2, in[8],  12);
bb                143 crypto/rmd320.c 	swap(bb, bbb);
bb                146 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[3],  11);
bb                147 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F3, K3, in[10], 13);
bb                148 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F3, K3, in[14],  6);
bb                149 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F3, K3, in[4],   7);
bb                150 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F3, K3, in[9],  14);
bb                151 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[15],  9);
bb                152 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F3, K3, in[8],  13);
bb                153 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F3, K3, in[1],  15);
bb                154 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F3, K3, in[2],  14);
bb                155 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F3, K3, in[7],   8);
bb                156 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[0],  13);
bb                157 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F3, K3, in[6],   6);
bb                158 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F3, K3, in[13],  5);
bb                159 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F3, K3, in[11], 12);
bb                160 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F3, K3, in[5],   7);
bb                161 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F3, K3, in[12],  5);
bb                185 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[1],  11);
bb                186 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F4, K4, in[9],  12);
bb                187 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F4, K4, in[11], 14);
bb                188 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F4, K4, in[10], 15);
bb                189 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F4, K4, in[0],  14);
bb                190 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[8],  15);
bb                191 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F4, K4, in[12],  9);
bb                192 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F4, K4, in[4],   8);
bb                193 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F4, K4, in[13],  9);
bb                194 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F4, K4, in[3],  14);
bb                195 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[7],   5);
bb                196 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F4, K4, in[15],  6);
bb                197 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F4, K4, in[14],  8);
bb                198 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F4, K4, in[5],   6);
bb                199 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F4, K4, in[6],   5);
bb                200 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F4, K4, in[2],  12);
bb                224 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[4],   9);
bb                225 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F5, K5, in[0],  15);
bb                226 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F5, K5, in[5],   5);
bb                227 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F5, K5, in[9],  11);
bb                228 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F5, K5, in[7],   6);
bb                229 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[12],  8);
bb                230 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F5, K5, in[2],  13);
bb                231 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F5, K5, in[10], 12);
bb                232 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F5, K5, in[14],  5);
bb                233 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F5, K5, in[1],  12);
bb                234 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[3],  13);
bb                235 crypto/rmd320.c 	ROUND(aa, bb, cc, dd, ee, F5, K5, in[8],  14);
bb                236 crypto/rmd320.c 	ROUND(ee, aa, bb, cc, dd, F5, K5, in[11], 11);
bb                237 crypto/rmd320.c 	ROUND(dd, ee, aa, bb, cc, F5, K5, in[6],   8);
bb                238 crypto/rmd320.c 	ROUND(cc, dd, ee, aa, bb, F5, K5, in[15],  5);
bb                239 crypto/rmd320.c 	ROUND(bb, cc, dd, ee, aa, F5, K5, in[13],  6);
bb                264 crypto/rmd320.c 	state[1] += bb;
bb                464 crypto/tgr192.c 	u64 a, b, c, aa, bb, cc;
bb                473 crypto/tgr192.c 	b = bb = tctx->b;
bb                485 crypto/tgr192.c 	b -= bb;
bb               1162 drivers/block/null_blk_main.c 	struct badblocks *bb = &cmd->nq->dev->badblocks;
bb               1166 drivers/block/null_blk_main.c 	if (badblocks_check(bb, sector, nr_sectors, &first_bad, &bad_sectors))
bb               3093 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		struct _vcs_dpi_soc_bounding_box_st *bb,
bb               3099 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	for (i = 0; i < bb->num_states; i++) {
bb               3100 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].dcfclk_mhz > (max_clocks.dcfClockInKhz / 1000))
bb               3102 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].dcfclk_mhz = (max_clocks.dcfClockInKhz / 1000);
bb               3104 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].dram_speed_mts > (max_clocks.uClockInKhz / 1000) * 16)
bb               3106 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].dram_speed_mts = (max_clocks.uClockInKhz / 1000) * 16;
bb               3108 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].fabricclk_mhz > (max_clocks.fabricClockInKhz / 1000))
bb               3110 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].fabricclk_mhz = (max_clocks.fabricClockInKhz / 1000);
bb               3112 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].dispclk_mhz > (max_clocks.displayClockInKhz / 1000))
bb               3114 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].dispclk_mhz = (max_clocks.displayClockInKhz / 1000);
bb               3116 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].dppclk_mhz > (max_clocks.dppClockInKhz / 1000))
bb               3118 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].dppclk_mhz = (max_clocks.dppClockInKhz / 1000);
bb               3120 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].phyclk_mhz > (max_clocks.phyClockInKhz / 1000))
bb               3122 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].phyclk_mhz = (max_clocks.phyClockInKhz / 1000);
bb               3124 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].socclk_mhz > (max_clocks.socClockInKhz / 1000))
bb               3126 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].socclk_mhz = (max_clocks.socClockInKhz / 1000);
bb               3128 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if ((bb->clock_limits[i].dscclk_mhz > (max_clocks.dscClockInKhz / 1000))
bb               3130 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->clock_limits[i].dscclk_mhz = (max_clocks.dscClockInKhz / 1000);
bb               3134 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	for (i = bb->num_states - 1; i > 1; i--) {
bb               3137 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].dcfclk_mhz != bb->clock_limits[i].dcfclk_mhz)
bb               3139 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].dispclk_mhz != bb->clock_limits[i].dispclk_mhz)
bb               3141 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].dppclk_mhz != bb->clock_limits[i].dppclk_mhz)
bb               3143 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].dram_speed_mts != bb->clock_limits[i].dram_speed_mts)
bb               3145 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].dscclk_mhz != bb->clock_limits[i].dscclk_mhz)
bb               3147 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].fabricclk_mhz != bb->clock_limits[i].fabricclk_mhz)
bb               3149 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].phyclk_mhz != bb->clock_limits[i].phyclk_mhz)
bb               3151 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		if (bb->clock_limits[i-1].socclk_mhz != bb->clock_limits[i].socclk_mhz)
bb               3155 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 			bb->num_states--;
bb               3159 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c static void update_bounding_box(struct dc *dc, struct _vcs_dpi_soc_bounding_box_st *bb,
bb               3209 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	memcpy(bb->clock_limits, calculated_states, sizeof(bb->clock_limits));
bb               3210 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	bb->num_states = num_calculated_states;
bb               3213 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	memcpy(&bb->clock_limits[num_calculated_states], &bb->clock_limits[num_calculated_states - 1], sizeof(struct _vcs_dpi_voltage_scaling_st));
bb               3214 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	bb->clock_limits[num_calculated_states].state = bb->num_states;
bb               3217 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c static void patch_bounding_box(struct dc *dc, struct _vcs_dpi_soc_bounding_box_st *bb)
bb               3220 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if ((int)(bb->sr_exit_time_us * 1000) != dc->bb_overrides.sr_exit_time_ns
bb               3222 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		bb->sr_exit_time_us = dc->bb_overrides.sr_exit_time_ns / 1000.0;
bb               3225 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if ((int)(bb->sr_enter_plus_exit_time_us * 1000)
bb               3228 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		bb->sr_enter_plus_exit_time_us =
bb               3232 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if ((int)(bb->urgent_latency_us * 1000) != dc->bb_overrides.urgent_latency_ns
bb               3234 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		bb->urgent_latency_us = dc->bb_overrides.urgent_latency_ns / 1000.0;
bb               3237 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if ((int)(bb->dram_clock_change_latency_us * 1000)
bb               3240 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 		bb->dram_clock_change_latency_us =
bb               3276 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	const struct gpu_info_soc_bounding_box_v1_0 *bb = dc->soc_bounding_box;
bb               3284 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (!bb && !SOC_BOUNDING_BOX_VALID) {
bb               3289 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 	if (bb && !SOC_BOUNDING_BOX_VALID) {
bb               3293 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->sr_exit_time_us);
bb               3295 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->sr_enter_plus_exit_time_us);
bb               3297 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->urgent_latency_us);
bb               3299 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->urgent_latency_pixel_data_only_us);
bb               3301 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->urgent_latency_pixel_mixed_with_vm_data_us);
bb               3303 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->urgent_latency_vm_data_only_us);
bb               3305 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->urgent_out_of_order_return_per_channel_pixel_only_bytes);
bb               3307 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->urgent_out_of_order_return_per_channel_pixel_and_vm_bytes);
bb               3309 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->urgent_out_of_order_return_per_channel_vm_only_bytes);
bb               3311 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->pct_ideal_dram_sdp_bw_after_urgent_pixel_only);
bb               3313 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->pct_ideal_dram_sdp_bw_after_urgent_pixel_and_vm);
bb               3315 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->pct_ideal_dram_sdp_bw_after_urgent_vm_only);
bb               3317 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->max_avg_sdp_bw_use_normal_percent);
bb               3319 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->max_avg_dram_bw_use_normal_percent);
bb               3321 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->writeback_latency_us);
bb               3323 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->ideal_dram_bw_after_urgent_percent);
bb               3325 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->max_request_size_bytes);
bb               3327 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->dram_channel_width_bytes);
bb               3329 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->fabric_datapath_to_dcn_data_return_bytes);
bb               3331 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->dcn_downspread_percent);
bb               3333 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->downspread_percent);
bb               3335 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->dram_page_open_time_ns);
bb               3337 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->dram_rw_turnaround_time_ns);
bb               3339 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->dram_return_buffer_per_channel_bytes);
bb               3341 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->round_trip_ping_latency_dcfclk_cycles);
bb               3343 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->urgent_out_of_order_return_per_channel_bytes);
bb               3345 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->channel_interleave_bytes);
bb               3347 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->num_banks);
bb               3349 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->num_chans);
bb               3351 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->vmm_page_size_bytes);
bb               3353 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->dram_clock_change_latency_us);
bb               3357 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				fixed16_to_double_to_cpu(bb->writeback_dram_clock_change_latency_us);
bb               3359 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->return_bus_width_bytes);
bb               3361 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->dispclk_dppclk_vco_speed_mhz);
bb               3363 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->xfc_bus_transport_time_us);
bb               3365 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->xfc_xbuf_latency_tolerance_us);
bb               3367 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->use_urgent_burst_bw);
bb               3369 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 				le32_to_cpu(bb->num_states);
bb               3373 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					le32_to_cpu(bb->clock_limits[i].state);
bb               3375 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].dcfclk_mhz);
bb               3377 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].fabricclk_mhz);
bb               3379 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].dispclk_mhz);
bb               3381 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].dppclk_mhz);
bb               3383 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].phyclk_mhz);
bb               3385 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].socclk_mhz);
bb               3387 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].dscclk_mhz);
bb               3389 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.c 					fixed16_to_double_to_cpu(bb->clock_limits[i].dram_speed_mts);
bb                139 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 		struct _vcs_dpi_soc_bounding_box_st *bb);
bb                141 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_resource.h 		struct _vcs_dpi_soc_bounding_box_st *bb,
bb               1820 drivers/gpu/drm/i915/gvt/cmd_parser.c 	struct intel_vgpu_shadow_bb *bb;
bb               1838 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb = kzalloc(sizeof(*bb), GFP_KERNEL);
bb               1839 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (!bb)
bb               1842 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb->ppgtt = (s->buf_addr_type == GTT_BUFFER) ? false : true;
bb               1855 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (bb->ppgtt)
bb               1858 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb->obj = i915_gem_object_create_shmem(s->vgpu->gvt->dev_priv,
bb               1861 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (IS_ERR(bb->obj)) {
bb               1862 drivers/gpu/drm/i915/gvt/cmd_parser.c 		ret = PTR_ERR(bb->obj);
bb               1866 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = i915_gem_object_prepare_write(bb->obj, &bb->clflush);
bb               1870 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb->va = i915_gem_object_pin_map(bb->obj, I915_MAP_WB);
bb               1871 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (IS_ERR(bb->va)) {
bb               1872 drivers/gpu/drm/i915/gvt/cmd_parser.c 		ret = PTR_ERR(bb->va);
bb               1876 drivers/gpu/drm/i915/gvt/cmd_parser.c 	if (bb->clflush & CLFLUSH_BEFORE) {
bb               1877 drivers/gpu/drm/i915/gvt/cmd_parser.c 		drm_clflush_virt_range(bb->va, bb->obj->base.size);
bb               1878 drivers/gpu/drm/i915/gvt/cmd_parser.c 		bb->clflush &= ~CLFLUSH_BEFORE;
bb               1883 drivers/gpu/drm/i915/gvt/cmd_parser.c 			      bb->va + start_offset);
bb               1890 drivers/gpu/drm/i915/gvt/cmd_parser.c 	ret = audit_bb_end(s, bb->va + start_offset + bb_end_cmd_offset);
bb               1894 drivers/gpu/drm/i915/gvt/cmd_parser.c 	INIT_LIST_HEAD(&bb->list);
bb               1895 drivers/gpu/drm/i915/gvt/cmd_parser.c 	list_add(&bb->list, &s->workload->shadow_bb);
bb               1897 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb->accessing = true;
bb               1898 drivers/gpu/drm/i915/gvt/cmd_parser.c 	bb->bb_start_cmd_va = s->ip_va;
bb               1901 drivers/gpu/drm/i915/gvt/cmd_parser.c 		bb->bb_offset = s->ip_va - s->rb_va;
bb               1903 drivers/gpu/drm/i915/gvt/cmd_parser.c 		bb->bb_offset = 0;
bb               1913 drivers/gpu/drm/i915/gvt/cmd_parser.c 	s->ip_va = bb->va + start_offset;
bb               1917 drivers/gpu/drm/i915/gvt/cmd_parser.c 	i915_gem_object_unpin_map(bb->obj);
bb               1919 drivers/gpu/drm/i915/gvt/cmd_parser.c 	i915_gem_object_finish_access(bb->obj);
bb               1921 drivers/gpu/drm/i915/gvt/cmd_parser.c 	i915_gem_object_put(bb->obj);
bb               1923 drivers/gpu/drm/i915/gvt/cmd_parser.c 	kfree(bb);
bb                457 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_shadow_bb *bb;
bb                460 drivers/gpu/drm/i915/gvt/scheduler.c 	list_for_each_entry(bb, &workload->shadow_bb, list) {
bb                468 drivers/gpu/drm/i915/gvt/scheduler.c 		if (bb->bb_offset)
bb                469 drivers/gpu/drm/i915/gvt/scheduler.c 			bb->bb_start_cmd_va = workload->shadow_ring_buffer_va
bb                470 drivers/gpu/drm/i915/gvt/scheduler.c 				+ bb->bb_offset;
bb                472 drivers/gpu/drm/i915/gvt/scheduler.c 		if (bb->ppgtt) {
bb                481 drivers/gpu/drm/i915/gvt/scheduler.c 			if (bb->clflush & CLFLUSH_AFTER) {
bb                482 drivers/gpu/drm/i915/gvt/scheduler.c 				drm_clflush_virt_range(bb->va,
bb                483 drivers/gpu/drm/i915/gvt/scheduler.c 						bb->obj->base.size);
bb                484 drivers/gpu/drm/i915/gvt/scheduler.c 				bb->clflush &= ~CLFLUSH_AFTER;
bb                486 drivers/gpu/drm/i915/gvt/scheduler.c 			i915_gem_object_finish_access(bb->obj);
bb                487 drivers/gpu/drm/i915/gvt/scheduler.c 			bb->accessing = false;
bb                490 drivers/gpu/drm/i915/gvt/scheduler.c 			bb->vma = i915_gem_object_ggtt_pin(bb->obj,
bb                492 drivers/gpu/drm/i915/gvt/scheduler.c 			if (IS_ERR(bb->vma)) {
bb                493 drivers/gpu/drm/i915/gvt/scheduler.c 				ret = PTR_ERR(bb->vma);
bb                498 drivers/gpu/drm/i915/gvt/scheduler.c 			bb->bb_start_cmd_va[1] = i915_ggtt_offset(bb->vma);
bb                500 drivers/gpu/drm/i915/gvt/scheduler.c 				bb->bb_start_cmd_va[2] = 0;
bb                503 drivers/gpu/drm/i915/gvt/scheduler.c 			if (bb->clflush & CLFLUSH_AFTER) {
bb                504 drivers/gpu/drm/i915/gvt/scheduler.c 				drm_clflush_virt_range(bb->va,
bb                505 drivers/gpu/drm/i915/gvt/scheduler.c 						bb->obj->base.size);
bb                506 drivers/gpu/drm/i915/gvt/scheduler.c 				bb->clflush &= ~CLFLUSH_AFTER;
bb                509 drivers/gpu/drm/i915/gvt/scheduler.c 			ret = i915_gem_object_set_to_gtt_domain(bb->obj,
bb                514 drivers/gpu/drm/i915/gvt/scheduler.c 			ret = i915_vma_move_to_active(bb->vma,
bb                520 drivers/gpu/drm/i915/gvt/scheduler.c 			i915_gem_object_finish_access(bb->obj);
bb                521 drivers/gpu/drm/i915/gvt/scheduler.c 			bb->accessing = false;
bb                589 drivers/gpu/drm/i915/gvt/scheduler.c 	struct intel_vgpu_shadow_bb *bb, *pos;
bb                594 drivers/gpu/drm/i915/gvt/scheduler.c 	bb = list_first_entry(&workload->shadow_bb,
bb                599 drivers/gpu/drm/i915/gvt/scheduler.c 	list_for_each_entry_safe(bb, pos, &workload->shadow_bb, list) {
bb                600 drivers/gpu/drm/i915/gvt/scheduler.c 		if (bb->obj) {
bb                601 drivers/gpu/drm/i915/gvt/scheduler.c 			if (bb->accessing)
bb                602 drivers/gpu/drm/i915/gvt/scheduler.c 				i915_gem_object_finish_access(bb->obj);
bb                604 drivers/gpu/drm/i915/gvt/scheduler.c 			if (bb->va && !IS_ERR(bb->va))
bb                605 drivers/gpu/drm/i915/gvt/scheduler.c 				i915_gem_object_unpin_map(bb->obj);
bb                607 drivers/gpu/drm/i915/gvt/scheduler.c 			if (bb->vma && !IS_ERR(bb->vma)) {
bb                608 drivers/gpu/drm/i915/gvt/scheduler.c 				i915_vma_unpin(bb->vma);
bb                609 drivers/gpu/drm/i915/gvt/scheduler.c 				i915_vma_close(bb->vma);
bb                611 drivers/gpu/drm/i915/gvt/scheduler.c 			i915_gem_object_put(bb->obj);
bb                613 drivers/gpu/drm/i915/gvt/scheduler.c 		list_del(&bb->list);
bb                614 drivers/gpu/drm/i915/gvt/scheduler.c 		kfree(bb);
bb               1325 drivers/gpu/drm/omapdrm/dss/dispc.c 		FLD_VAL(coefs->bb, 9, 0);
bb                226 drivers/gpu/drm/omapdrm/dss/omapdss.h 	s16 br, bg, bb;
bb               2800 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	struct drm_rect bb;
bb               2864 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	bb.x1 = INT_MAX;
bb               2865 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	bb.y1 = INT_MAX;
bb               2866 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	bb.x2 = INT_MIN;
bb               2867 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	bb.y2 = INT_MIN;
bb               2881 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		bb.x1 = min_t(int, bb.x1, clip.x1);
bb               2882 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		bb.y1 = min_t(int, bb.y1, clip.y1);
bb               2883 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		bb.x2 = max_t(int, bb.x2, clip.x2);
bb               2884 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 		bb.y2 = max_t(int, bb.y2, clip.y2);
bb               2887 drivers/gpu/drm/vmwgfx/vmwgfx_kms.c 	curr_size = update->post_clip(update, cmd_next, &bb);
bb                119 drivers/gpu/drm/vmwgfx/vmwgfx_kms.h 				    struct drm_rect *bb);
bb                520 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 				      void *cmd, struct drm_rect *bb)
bb                636 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 					  void *cmd, struct drm_rect *bb)
bb                656 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	src_bb = *bb;
bb                669 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	blit->body.destRect.left = bb->x1;
bb                670 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	blit->body.destRect.top = bb->y1;
bb                671 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	blit->body.destRect.right = bb->x2;
bb                672 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 	blit->body.destRect.bottom = bb->y2;
bb                676 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		rect->left -= bb->x1;
bb                677 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		rect->top -= bb->y1;
bb                678 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		rect->right -= bb->x1;
bb                679 drivers/gpu/drm/vmwgfx/vmwgfx_scrn.c 		rect->bottom -= bb->y1;
bb               1243 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 					    void *cmd, struct drm_rect *bb)
bb               1255 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	vmw_stdu_populate_update(&suffix[1], stdu->base.unit, bb->x1, bb->x2,
bb               1256 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 				 bb->y1, bb->y2);
bb               1288 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 				struct drm_rect *bb)
bb               1305 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	width = bb->x2 - bb->x1;
bb               1306 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	height = bb->y2 - bb->y1;
bb               1312 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	dst_offset = bb->y1 * dst_pitch + bb->x1 * stdu->cpp;
bb               1517 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 				 struct drm_rect *bb)
bb               1519 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 	vmw_stdu_populate_update(cmd, update->du->unit, bb->x1, bb->x2, bb->y1,
bb               1520 drivers/gpu/drm/vmwgfx/vmwgfx_stdu.c 				 bb->y2);
bb               2206 drivers/hid/hid-wiimote-modules.c 	__u8 sx, sy, tb, wb, bd, bm, bp, bo, br, bb, bg, by, bu;
bb               2248 drivers/hid/hid-wiimote-modules.c 	bb = !(ext[5] & 0x20);
bb               2275 drivers/hid/hid-wiimote-modules.c 			 bb);
bb                 10 drivers/input/mouse/cypress_ps2.h #define ENCODE_CMD(aa, bb, cc, dd) \
bb                 11 drivers/input/mouse/cypress_ps2.h 	(COMPOSIT((aa), 6) | COMPOSIT((bb), 4) | COMPOSIT((cc), 2) | COMPOSIT((dd), 0))
bb                 19 drivers/md/dm-dust.c 	sector_t bb;
bb                 42 drivers/md/dm-dust.c 		if (bblk->bb > blk)
bb                 44 drivers/md/dm-dust.c 		else if (bblk->bb < blk)
bb                 57 drivers/md/dm-dust.c 	sector_t value = new->bb;
bb                 63 drivers/md/dm-dust.c 		if (bblk->bb > value)
bb                 65 drivers/md/dm-dust.c 		else if (bblk->bb < value)
bb                117 drivers/md/dm-dust.c 	bblock->bb = block;
bb               1658 drivers/md/md.c 			u64 bb = le64_to_cpu(*bbp);
bb               1659 drivers/md/md.c 			int count = bb & (0x3ff);
bb               1660 drivers/md/md.c 			u64 sector = bb >> 10;
bb               1663 drivers/md/md.c 			if (bb + 1 == 0)
bb               1992 drivers/md/md.c 		struct badblocks *bb = &rdev->badblocks;
bb               1994 drivers/md/md.c 		u64 *p = bb->page;
bb               1996 drivers/md/md.c 		if (bb->changed) {
bb               2000 drivers/md/md.c 			seq = read_seqbegin(&bb->lock);
bb               2004 drivers/md/md.c 			for (i = 0 ; i < bb->count ; i++) {
bb               2010 drivers/md/md.c 			bb->changed = 0;
bb               2011 drivers/md/md.c 			if (read_seqretry(&bb->lock, seq))
bb               2014 drivers/md/md.c 			bb->sector = (rdev->sb_start +
bb               2016 drivers/md/md.c 			bb->size = le16_to_cpu(sb->bblog_size);
bb                918 drivers/media/dvb-frontends/dib0090.c 	u16 rf, bb, ref;
bb                956 drivers/media/dvb-frontends/dib0090.c 		bb = gain - rf;
bb                957 drivers/media/dvb-frontends/dib0090.c 		if (bb > state->bb_ramp[0])
bb                958 drivers/media/dvb-frontends/dib0090.c 			bb = state->bb_ramp[0];
bb                961 drivers/media/dvb-frontends/dib0090.c 		bb = 0;
bb                965 drivers/media/dvb-frontends/dib0090.c 	state->gain[1] = bb;
bb                999 drivers/media/dvb-frontends/dib0090.c 			ref = bb;
bb               1003 drivers/media/dvb-frontends/dib0090.c 	gain_reg[3] |= ((bb % 10) * 100) / 125;
bb               1006 drivers/media/dvb-frontends/dib0090.c 	dprintk("GA CALC: DB: %3d(rf) + %3d(bb) = %3d gain_reg[0]=%04x gain_reg[1]=%04x gain_reg[2]=%04x gain_reg[0]=%04x\n", rf, bb, rf + bb,
bb               1313 drivers/media/dvb-frontends/dib0090.c void dib0090_get_current_gain(struct dvb_frontend *fe, u16 * rf, u16 * bb, u16 * rf_gain_limit, u16 * rflt)
bb               1318 drivers/media/dvb-frontends/dib0090.c 	if (bb)
bb               1319 drivers/media/dvb-frontends/dib0090.c 		*bb = state->gain[1];
bb                 85 drivers/media/dvb-frontends/dib0090.h extern void dib0090_get_current_gain(struct dvb_frontend *fe, u16 * rf, u16 * bb, u16 * rf_gain_limit, u16 * rflt);
bb                146 drivers/media/dvb-frontends/dib0090.h static inline void dib0090_get_current_gain(struct dvb_frontend *fe, u16 * rf, u16 * bb, u16 * rf_gain_limit, u16 * rflt)
bb                268 drivers/message/fusion/mptfc.c 	FCDevicePage0_t **bb = (FCDevicePage0_t **)b;
bb                270 drivers/message/fusion/mptfc.c 	if ((*aa)->CurrentBus == (*bb)->CurrentBus) {
bb                271 drivers/message/fusion/mptfc.c 		if ((*aa)->CurrentTargetID == (*bb)->CurrentTargetID)
bb                273 drivers/message/fusion/mptfc.c 		if ((*aa)->CurrentTargetID < (*bb)->CurrentTargetID)
bb                277 drivers/message/fusion/mptfc.c 	if ((*aa)->CurrentBus < (*bb)->CurrentBus)
bb                360 drivers/misc/fastrpc.c #define CMP(aa, bb) ((aa) == (bb) ? 0 : (aa) < (bb) ? -1 : 1)
bb               1834 drivers/net/ethernet/qlogic/qed/qed_l2.c 		struct qed_eth_stats_bb *p_bb = &p_stats->bb;
bb                150 drivers/net/ethernet/qlogic/qede/qede.h 		struct qede_stats_bb bb;
bb                 95 drivers/net/ethernet/qlogic/qede/qede_ethtool.c 		   offsetof(struct qede_stats, bb), \
bb                396 drivers/net/ethernet/qlogic/qede/qede_main.c 		struct qede_stats_bb *p_bb = &edev->stats.bb;
bb                399 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.rx_1519_to_1522_byte_packets;
bb                401 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.rx_1519_to_2047_byte_packets;
bb                403 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.rx_2048_to_4095_byte_packets;
bb                405 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.rx_4096_to_9216_byte_packets;
bb                407 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.rx_9217_to_16383_byte_packets;
bb                409 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.tx_1519_to_2047_byte_packets;
bb                411 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.tx_2048_to_4095_byte_packets;
bb                413 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.tx_4096_to_9216_byte_packets;
bb                415 drivers/net/ethernet/qlogic/qede/qede_main.c 		    stats.bb.tx_9217_to_16383_byte_packets;
bb                416 drivers/net/ethernet/qlogic/qede/qede_main.c 		p_bb->tx_lpi_entry_count = stats.bb.tx_lpi_entry_count;
bb                417 drivers/net/ethernet/qlogic/qede/qede_main.c 		p_bb->tx_total_collisions = stats.bb.tx_total_collisions;
bb                453 drivers/net/ethernet/qlogic/qede/qede_main.c 		stats->collisions = edev->stats.bb.tx_total_collisions;
bb                214 drivers/net/ethernet/sun/sunbmac.c 	struct bmac_init_block *bb = bp->bmac_block;
bb                240 drivers/net/ethernet/sun/sunbmac.c 		bb->be_rxd[i].rx_addr =
bb                245 drivers/net/ethernet/sun/sunbmac.c 		bb->be_rxd[i].rx_flags =
bb                250 drivers/net/ethernet/sun/sunbmac.c 		bb->be_txd[i].tx_flags = bb->be_txd[i].tx_addr = 0;
bb                905 drivers/net/wireless/broadcom/b43/lo.c 	struct b43_bbatt bb;
bb                909 drivers/net/wireless/broadcom/b43/lo.c 	memset(&bb, 0, sizeof(bb));
bb                911 drivers/net/wireless/broadcom/b43/lo.c 	bb.att = bbatt;
bb                913 drivers/net/wireless/broadcom/b43/lo.c 	cal = b43_get_calib_lo_settings(dev, &bb, &rf);
bb                210 drivers/net/wireless/broadcom/b43/phy_g.c 	u16 bb, rf;
bb                213 drivers/net/wireless/broadcom/b43/phy_g.c 	bb = bbatt->att;
bb                231 drivers/net/wireless/broadcom/b43/phy_g.c 		       bb, rf, tx_control, tx_bias, tx_magn);
bb                234 drivers/net/wireless/broadcom/b43/phy_g.c 	b43_gphy_set_baseband_attenuation(dev, bb);
bb                277 drivers/net/wireless/broadcom/b43/phy_g.c 	u8 rf, bb;
bb                280 drivers/net/wireless/broadcom/b43/phy_g.c 		for (bb = 0; bb < lo->bbatt_list.len; bb++) {
bb                283 drivers/net/wireless/broadcom/b43/phy_g.c 			tmp = lo->bbatt_list.list[bb].att;
bb               2131 drivers/net/wireless/broadcom/b43/phy_g.c 					 struct b43_bbatt *bb)
bb               2136 drivers/net/wireless/broadcom/b43/phy_g.c 		bb->att = 0;
bb               2138 drivers/net/wireless/broadcom/b43/phy_g.c 		bb->att = 2;
bb                303 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 					int idx, bb;
bb                306 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 					bb = (agc & 0x1F) * 2;
bb                308 drivers/net/wireless/realtek/rtl818x/rtl8180/dev.c 					signal = 4 - bb - rtl8187se_lna_gain[idx];
bb                644 drivers/net/wireless/ti/wl1251/acx.c 	struct acx_beacon_broadcast *bb;
bb                649 drivers/net/wireless/ti/wl1251/acx.c 	bb = kzalloc(sizeof(*bb), GFP_KERNEL);
bb                650 drivers/net/wireless/ti/wl1251/acx.c 	if (!bb)
bb                653 drivers/net/wireless/ti/wl1251/acx.c 	bb->beacon_rx_timeout = BCN_RX_TIMEOUT_DEF_VALUE;
bb                654 drivers/net/wireless/ti/wl1251/acx.c 	bb->broadcast_timeout = BROADCAST_RX_TIMEOUT_DEF_VALUE;
bb                655 drivers/net/wireless/ti/wl1251/acx.c 	bb->rx_broadcast_in_ps = RX_BROADCAST_IN_PS_DEF_VALUE;
bb                656 drivers/net/wireless/ti/wl1251/acx.c 	bb->ps_poll_threshold = CONSECUTIVE_PS_POLL_FAILURE_DEF;
bb                658 drivers/net/wireless/ti/wl1251/acx.c 	ret = wl1251_cmd_configure(wl, ACX_BCN_DTIM_OPTIONS, bb, sizeof(*bb));
bb                665 drivers/net/wireless/ti/wl1251/acx.c 	kfree(bb);
bb                565 drivers/net/wireless/ti/wlcore/acx.c 	struct acx_beacon_broadcast *bb;
bb                570 drivers/net/wireless/ti/wlcore/acx.c 	bb = kzalloc(sizeof(*bb), GFP_KERNEL);
bb                571 drivers/net/wireless/ti/wlcore/acx.c 	if (!bb) {
bb                576 drivers/net/wireless/ti/wlcore/acx.c 	bb->role_id = wlvif->role_id;
bb                577 drivers/net/wireless/ti/wlcore/acx.c 	bb->beacon_rx_timeout = cpu_to_le16(wl->conf.conn.beacon_rx_timeout);
bb                578 drivers/net/wireless/ti/wlcore/acx.c 	bb->broadcast_timeout = cpu_to_le16(wl->conf.conn.broadcast_timeout);
bb                579 drivers/net/wireless/ti/wlcore/acx.c 	bb->rx_broadcast_in_ps = wl->conf.conn.rx_broadcast_in_ps;
bb                580 drivers/net/wireless/ti/wlcore/acx.c 	bb->ps_poll_threshold = wl->conf.conn.ps_poll_threshold;
bb                582 drivers/net/wireless/ti/wlcore/acx.c 	ret = wl1271_cmd_configure(wl, ACX_BCN_DTIM_OPTIONS, bb, sizeof(*bb));
bb                589 drivers/net/wireless/ti/wlcore/acx.c 	kfree(bb);
bb                165 drivers/nvdimm/badrange.c static void set_badblock(struct badblocks *bb, sector_t s, int num)
bb                167 drivers/nvdimm/badrange.c 	dev_dbg(bb->dev, "Found a bad range (0x%llx, 0x%llx)\n",
bb                170 drivers/nvdimm/badrange.c 	if (badblocks_set(bb, s, num, 1))
bb                171 drivers/nvdimm/badrange.c 		dev_info_once(bb->dev, "%s: failed for sector %llx\n",
bb                185 drivers/nvdimm/badrange.c static void __add_badblock_range(struct badblocks *bb, u64 ns_offset, u64 len)
bb                205 drivers/nvdimm/badrange.c 			set_badblock(bb, s, done);
bb                210 drivers/nvdimm/badrange.c 		set_badblock(bb, start_sector, num_sectors);
bb                214 drivers/nvdimm/badrange.c 		struct badblocks *bb, const struct resource *res)
bb                239 drivers/nvdimm/badrange.c 			__add_badblock_range(bb, start - res->start, len);
bb                253 drivers/nvdimm/badrange.c 			__add_badblock_range(bb, 0, len);
bb                270 drivers/nvdimm/badrange.c 		struct badblocks *bb, const struct resource *res)
bb                282 drivers/nvdimm/badrange.c 	badblocks_populate(&nvdimm_bus->badrange, bb, res);
bb               1611 drivers/nvdimm/btt.c 	btt->phys_bb = &nsio->bb;
bb                195 drivers/nvdimm/bus.c 	badblocks_clear(&nd_region->bb, sector, ctx->cleared / 512);
bb                269 drivers/nvdimm/claim.c 		if (unlikely(is_bad_pmem(&nsio->bb, sector, sz_align)))
bb                276 drivers/nvdimm/claim.c 	if (unlikely(is_bad_pmem(&nsio->bb, sector, sz_align))) {
bb                288 drivers/nvdimm/claim.c 				badblocks_clear(&nsio->bb, sector, cleared);
bb                316 drivers/nvdimm/claim.c 	if (devm_init_badblocks(dev, &nsio->bb))
bb                318 drivers/nvdimm/claim.c 	nvdimm_badblocks_populate(to_nd_region(ndns->dev.parent), &nsio->bb,
bb                333 drivers/nvdimm/claim.c 	devm_exit_badblocks(dev, &nsio->bb);
bb                155 drivers/nvdimm/nd.h 	struct badblocks bb;
bb                372 drivers/nvdimm/nd.h 		struct badblocks *bb, const struct resource *res);
bb                418 drivers/nvdimm/nd.h static inline bool is_bad_pmem(struct badblocks *bb, sector_t sector,
bb                421 drivers/nvdimm/nd.h 	if (bb->count) {
bb                425 drivers/nvdimm/nd.h 		return !!badblocks_check(bb, sector, len / 512, &first_bad,
bb                389 drivers/nvdimm/pfn_devs.c 		bb_present = badblocks_check(&nd_region->bb, meta_start,
bb                 39 drivers/nvdimm/pmem.c 	return pmem->bb.dev;
bb                 90 drivers/nvdimm/pmem.c 		badblocks_clear(&pmem->bb, sector, cleared);
bb                149 drivers/nvdimm/pmem.c 	if (unlikely(is_bad_pmem(&pmem->bb, sector, len)))
bb                248 drivers/nvdimm/pmem.c 	if (unlikely(is_bad_pmem(&pmem->bb, PFN_PHYS(pgoff) / 512,
bb                261 drivers/nvdimm/pmem.c 	if (unlikely(pmem->bb.count))
bb                460 drivers/nvdimm/pmem.c 	if (devm_init_badblocks(dev, &pmem->bb))
bb                462 drivers/nvdimm/pmem.c 	nvdimm_badblocks_populate(nd_region, &pmem->bb, &bb_res);
bb                463 drivers/nvdimm/pmem.c 	disk->bb = &pmem->bb;
bb                569 drivers/nvdimm/pmem.c 	struct badblocks *bb;
bb                581 drivers/nvdimm/pmem.c 		bb = &nsio->bb;
bb                587 drivers/nvdimm/pmem.c 		bb = &pmem->bb;
bb                607 drivers/nvdimm/pmem.c 	nvdimm_badblocks_populate(nd_region, bb, &res);
bb                 23 drivers/nvdimm/pmem.h 	struct badblocks	bb;
bb                 40 drivers/nvdimm/region.c 		if (devm_init_badblocks(dev, &nd_region->bb))
bb                 49 drivers/nvdimm/region.c 		nvdimm_badblocks_populate(nd_region, &nd_region->bb, &ndr_res);
bb                131 drivers/nvdimm/region.c 					&nd_region->bb, &res);
bb                570 drivers/nvdimm/region_devs.c 		rc = badblocks_show(&nd_region->bb, buf, 0);
bb                344 drivers/pci/p2pdma.c 	struct pci_dev *a = provider, *b = client, *bb;
bb                366 drivers/pci/p2pdma.c 		bb = b;
bb                368 drivers/pci/p2pdma.c 		while (bb) {
bb                369 drivers/pci/p2pdma.c 			if (a == bb)
bb                372 drivers/pci/p2pdma.c 			bb = pci_upstream_bridge(bb);
bb                386 drivers/pci/p2pdma.c 	bb = b;
bb                388 drivers/pci/p2pdma.c 	while (bb) {
bb                389 drivers/pci/p2pdma.c 		if (a == bb)
bb                392 drivers/pci/p2pdma.c 		if (pci_bridge_has_acs_redir(bb)) {
bb                393 drivers/pci/p2pdma.c 			seq_buf_print_bus_devfn(acs_list, bb);
bb                397 drivers/pci/p2pdma.c 		bb = pci_upstream_bridge(bb);
bb                256 drivers/regulator/ltc3589.c 	LTC3589_FIXED_REG(BB_OUT, bb-out),
bb                 81 drivers/s390/cio/qdio_main.c 				 unsigned int *bb, unsigned int fc,
bb                 97 drivers/s390/cio/qdio_main.c 	*bb = __fc >> 31;
bb                364 drivers/spi/spi-gpio.c 	struct spi_bitbang		*bb;
bb                410 drivers/spi/spi-gpio.c 	bb = &spi_gpio->bitbang;
bb                411 drivers/spi/spi-gpio.c 	bb->master = master;
bb                418 drivers/spi/spi-gpio.c 	bb->chipselect = spi_gpio_chipselect;
bb                419 drivers/spi/spi-gpio.c 	bb->set_line_direction = spi_gpio_set_direction;
bb                422 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_0] = spi_gpio_spec_txrx_word_mode0;
bb                423 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_1] = spi_gpio_spec_txrx_word_mode1;
bb                424 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_2] = spi_gpio_spec_txrx_word_mode2;
bb                425 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_3] = spi_gpio_spec_txrx_word_mode3;
bb                427 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_0] = spi_gpio_txrx_word_mode0;
bb                428 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_1] = spi_gpio_txrx_word_mode1;
bb                429 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_2] = spi_gpio_txrx_word_mode2;
bb                430 drivers/spi/spi-gpio.c 		bb->txrx_word[SPI_MODE_3] = spi_gpio_txrx_word_mode3;
bb                432 drivers/spi/spi-gpio.c 	bb->setup_transfer = spi_bitbang_setup_transfer;
bb               1102 drivers/video/fbdev/omap2/omapfb/dss/dispc.c 		FLD_VAL(coefs->bb, 9, 0);
bb                371 drivers/video/fbdev/omap2/omapfb/dss/manager-sysfs.c 			info.cpr_coefs.bb);
bb                388 drivers/video/fbdev/omap2/omapfb/dss/manager-sysfs.c 				&coefs.br, &coefs.bg, &coefs.bb) != 9)
bb                393 drivers/video/fbdev/omap2/omapfb/dss/manager-sysfs.c 		coefs.br, coefs.bg, coefs.bb };
bb                430 fs/ext4/mballoc.c 	char *bb;
bb                446 fs/ext4/mballoc.c 	bb = e4b->bd_buddy + EXT4_SB(e4b->bd_sb)->s_mb_offsets[order];
bb                449 fs/ext4/mballoc.c 	return bb;
bb               1257 fs/ext4/mballoc.c 	void *bb;
bb               1262 fs/ext4/mballoc.c 	bb = e4b->bd_buddy;
bb               1265 fs/ext4/mballoc.c 		if (!mb_test_bit(block, bb)) {
bb               1269 fs/ext4/mballoc.c 		bb += bb_incr;
bb                 64 fs/xfs/libxfs/xfs_bmap_btree.h #define XFS_BMAP_BROOT_PTR_ADDR(mp, bb, i, sz) \
bb                 65 fs/xfs/libxfs/xfs_bmap_btree.h 	XFS_BMBT_PTR_ADDR(mp, bb, i, xfs_bmbt_maxrecs(mp, sz, 0))
bb                 71 fs/xfs/libxfs/xfs_bmap_btree.h #define XFS_BMAP_BROOT_SPACE(mp, bb) \
bb                 72 fs/xfs/libxfs/xfs_bmap_btree.h 	(XFS_BMAP_BROOT_SPACE_CALC(mp, be16_to_cpu((bb)->bb_numrecs)))
bb                 76 fs/xfs/libxfs/xfs_bmap_btree.h #define XFS_BMAP_BMDR_SPACE(bb) \
bb                 77 fs/xfs/libxfs/xfs_bmap_btree.h 	(XFS_BMDR_SPACE_CALC(be16_to_cpu((bb)->bb_numrecs)))
bb                580 fs/xfs/libxfs/xfs_format.h #define	XFS_BB_TO_FSB(mp,bb)	\
bb                581 fs/xfs/libxfs/xfs_format.h 	(((bb) + (XFS_FSB_TO_BB(mp,1) - 1)) >> (mp)->m_blkbb_log)
bb                582 fs/xfs/libxfs/xfs_format.h #define	XFS_BB_TO_FSBT(mp,bb)	((bb) >> (mp)->m_blkbb_log)
bb               1119 fs/xfs/libxfs/xfs_format.h #define	XFS_SUMOFFS(mp,ls,bb)	((int)((ls) * (mp)->m_sb.sb_rbmblocks + (bb)))
bb               1127 fs/xfs/libxfs/xfs_format.h #define	XFS_BLOCKTOBIT(mp,bb)	((bb) << (mp)->m_blkbit_log)
bb                274 fs/xfs/xfs_bmap_item.c 	struct xfs_bmap_intent		*bb;
bb                277 fs/xfs/xfs_bmap_item.c 	bb = container_of(b, struct xfs_bmap_intent, bi_list);
bb                278 fs/xfs/xfs_bmap_item.c 	return ba->bi_owner->i_ino - bb->bi_owner->i_ino;
bb                 44 include/linux/badblocks.h int badblocks_check(struct badblocks *bb, sector_t s, int sectors,
bb                 46 include/linux/badblocks.h int badblocks_set(struct badblocks *bb, sector_t s, int sectors,
bb                 48 include/linux/badblocks.h int badblocks_clear(struct badblocks *bb, sector_t s, int sectors);
bb                 49 include/linux/badblocks.h void ack_all_badblocks(struct badblocks *bb);
bb                 50 include/linux/badblocks.h ssize_t badblocks_show(struct badblocks *bb, char *page, int unack);
bb                 51 include/linux/badblocks.h ssize_t badblocks_store(struct badblocks *bb, const char *page, size_t len,
bb                 53 include/linux/badblocks.h int badblocks_init(struct badblocks *bb, int enable);
bb                 54 include/linux/badblocks.h void badblocks_exit(struct badblocks *bb);
bb                 56 include/linux/badblocks.h int devm_init_badblocks(struct device *dev, struct badblocks *bb);
bb                 57 include/linux/badblocks.h static inline void devm_exit_badblocks(struct device *dev, struct badblocks *bb)
bb                 59 include/linux/badblocks.h 	if (bb->dev != dev) {
bb                 64 include/linux/badblocks.h 	badblocks_exit(bb);
bb                220 include/linux/genhd.h 	struct badblocks *bb;
bb                 75 include/linux/nd.h 	struct badblocks bb;
bb               1343 include/linux/qed/qed_if.h 		struct qed_eth_stats_bb bb;
bb                291 include/video/omapfb_dss.h 	s16 br, bg, bb;
bb                224 lib/inflate.c  STATIC ulg bb;                         /* bit buffer */
bb                608 lib/inflate.c    b = bb;                       /* initialize bit buffer */
bb                692 lib/inflate.c    bb = b;                       /* restore global bit buffer */
bb                715 lib/inflate.c    b = bb;                       /* initialize bit buffer */
bb                751 lib/inflate.c    bb = b;                       /* restore global bit buffer */
bb                861 lib/inflate.c    b = bb;
bb                971 lib/inflate.c    bb = b;
bb               1046 lib/inflate.c    b = bb;
bb               1063 lib/inflate.c    bb = b;
bb               1095 lib/inflate.c    bb = 0;
bb                 97 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                100 net/tipc/bcast.c 	bb->bc_threshold = 1 + (cluster_size * bb->rc_ratio / 100);
bb                108 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                109 net/tipc/bcast.c 	int all_dests =  tipc_link_bc_peers(bb->link);
bb                112 net/tipc/bcast.c 	bb->primary_bearer = INVALID_BEARER_ID;
bb                113 net/tipc/bcast.c 	bb->bcast_support = true;
bb                119 net/tipc/bcast.c 		if (!bb->dests[i])
bb                123 net/tipc/bcast.c 		if (mtu < tipc_link_mtu(bb->link))
bb                124 net/tipc/bcast.c 			tipc_link_set_mtu(bb->link, mtu);
bb                125 net/tipc/bcast.c 		bb->bcast_support &= tipc_bearer_bcast_support(net, i);
bb                126 net/tipc/bcast.c 		if (bb->dests[i] < all_dests)
bb                129 net/tipc/bcast.c 		bb->primary_bearer = i;
bb                135 net/tipc/bcast.c 	prim = bb->primary_bearer;
bb                137 net/tipc/bcast.c 		bb->bcast_support = tipc_bearer_bcast_support(net, prim);
bb                142 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                145 net/tipc/bcast.c 	bb->dests[bearer_id]++;
bb                152 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                155 net/tipc/bcast.c 	bb->dests[bearer_id]--;
bb                173 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                181 net/tipc/bcast.c 	bearer_id = bb->primary_bearer;
bb                190 net/tipc/bcast.c 		if (!bb->dests[bearer_id])
bb                208 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                212 net/tipc/bcast.c 	if (!bb->bcast_support) {
bb                217 net/tipc/bcast.c 	if (!bb->rcast_support) {
bb                231 net/tipc/bcast.c 	if (bb->force_bcast) {
bb                236 net/tipc/bcast.c 	if (bb->force_rcast) {
bb                242 net/tipc/bcast.c 	method->rcast = dests <= bb->bc_threshold;
bb                589 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                593 net/tipc/bcast.c 		if (!bb->bcast_support)
bb                596 net/tipc/bcast.c 		bb->force_bcast = true;
bb                597 net/tipc/bcast.c 		bb->force_rcast = false;
bb                600 net/tipc/bcast.c 		if (!bb->rcast_support)
bb                603 net/tipc/bcast.c 		bb->force_bcast = false;
bb                604 net/tipc/bcast.c 		bb->force_rcast = true;
bb                607 net/tipc/bcast.c 		if (!bb->bcast_support || !bb->rcast_support)
bb                610 net/tipc/bcast.c 		bb->force_bcast = false;
bb                611 net/tipc/bcast.c 		bb->force_rcast = false;
bb                622 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                624 net/tipc/bcast.c 	if (!bb->bcast_support || !bb->rcast_support)
bb                630 net/tipc/bcast.c 	bb->rc_ratio = bc_ratio;
bb                680 net/tipc/bcast.c 	struct tipc_bc_base *bb = NULL;
bb                683 net/tipc/bcast.c 	bb = kzalloc(sizeof(*bb), GFP_KERNEL);
bb                684 net/tipc/bcast.c 	if (!bb)
bb                686 net/tipc/bcast.c 	tn->bcbase = bb;
bb                693 net/tipc/bcast.c 				 &bb->inputq,
bb                698 net/tipc/bcast.c 	bb->link = l;
bb                700 net/tipc/bcast.c 	bb->rc_ratio = 10;
bb                701 net/tipc/bcast.c 	bb->rcast_support = true;
bb                704 net/tipc/bcast.c 	kfree(bb);
bb                750 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                752 net/tipc/bcast.c 	if (bb->force_bcast)
bb                755 net/tipc/bcast.c 	if (bb->force_rcast)
bb                758 net/tipc/bcast.c 	if (bb->bcast_support && bb->rcast_support)
bb                766 net/tipc/bcast.c 	struct tipc_bc_base *bb = tipc_bc_base(net);
bb                768 net/tipc/bcast.c 	return bb->rc_ratio;
bb                377 scripts/gcc-plugins/gcc-common.h static inline int bb_loop_depth(const_basic_block bb)
bb                379 scripts/gcc-plugins/gcc-common.h 	return bb->loop_father ? loop_depth(bb->loop_father) : 0;
bb                346 scripts/gcc-plugins/latent_entropy_plugin.c static void perturb_local_entropy(basic_block bb, tree local_entropy)
bb                355 scripts/gcc-plugins/latent_entropy_plugin.c 	gsi = gsi_after_labels(bb);
bb                389 scripts/gcc-plugins/latent_entropy_plugin.c static bool handle_tail_calls(basic_block bb, tree local_entropy)
bb                393 scripts/gcc-plugins/latent_entropy_plugin.c 	for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) {
bb                437 scripts/gcc-plugins/latent_entropy_plugin.c static void init_local_entropy(basic_block bb, tree local_entropy)
bb                443 scripts/gcc-plugins/latent_entropy_plugin.c 	gimple_stmt_iterator gsi = gsi_after_labels(bb);
bb                509 scripts/gcc-plugins/latent_entropy_plugin.c 	basic_block bb;
bb                517 scripts/gcc-plugins/latent_entropy_plugin.c 	bb = single_succ(ENTRY_BLOCK_PTR_FOR_FN(cfun));
bb                518 scripts/gcc-plugins/latent_entropy_plugin.c 	if (!single_pred_p(bb)) {
bb                521 scripts/gcc-plugins/latent_entropy_plugin.c 		bb = single_succ(ENTRY_BLOCK_PTR_FOR_FN(cfun));
bb                528 scripts/gcc-plugins/latent_entropy_plugin.c 	init_local_entropy(bb, local_entropy);
bb                530 scripts/gcc-plugins/latent_entropy_plugin.c 	bb = bb->next_bb;
bb                536 scripts/gcc-plugins/latent_entropy_plugin.c 	while (bb != EXIT_BLOCK_PTR_FOR_FN(cfun)) {
bb                537 scripts/gcc-plugins/latent_entropy_plugin.c 		perturb_local_entropy(bb, local_entropy);
bb                538 scripts/gcc-plugins/latent_entropy_plugin.c 		bb = bb->next_bb;
bb                678 scripts/gcc-plugins/randomize_layout_plugin.c static bool dominated_by_is_err(const_tree rhs, basic_block bb)
bb                688 scripts/gcc-plugins/randomize_layout_plugin.c 	dom = get_immediate_dominator(CDI_DOMINATORS, bb);
bb                811 scripts/gcc-plugins/randomize_layout_plugin.c 	basic_block bb;
bb                815 scripts/gcc-plugins/randomize_layout_plugin.c 	FOR_EACH_BB_FN(bb, cfun) {
bb                818 scripts/gcc-plugins/randomize_layout_plugin.c 		for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) {
bb                872 scripts/gcc-plugins/randomize_layout_plugin.c 			if (dominated_by_is_err(rhs1, bb))
bb                 35 scripts/gcc-plugins/sancov_plugin.c 	basic_block bb;
bb                 42 scripts/gcc-plugins/sancov_plugin.c 	FOR_EACH_BB_FN(bb, cfun) {
bb                 45 scripts/gcc-plugins/sancov_plugin.c 		gimple_stmt_iterator gsi = gsi_after_labels(bb);
bb                 55 scripts/gcc-plugins/stackleak_plugin.c 	basic_block bb;
bb                 68 scripts/gcc-plugins/stackleak_plugin.c 	bb = gimple_bb(stackleak_track_stack);
bb                 71 scripts/gcc-plugins/stackleak_plugin.c 	frequency = compute_call_stmt_bb_frequency(current_function_decl, bb);
bb                 73 scripts/gcc-plugins/stackleak_plugin.c 			stackleak_track_stack, bb->count, frequency);
bb                 96 scripts/gcc-plugins/stackleak_plugin.c 	basic_block bb, entry_bb;
bb                113 scripts/gcc-plugins/stackleak_plugin.c 	FOR_EACH_BB_FN(bb, cfun) {
bb                114 scripts/gcc-plugins/stackleak_plugin.c 		for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) {
bb                128 scripts/gcc-plugins/stackleak_plugin.c 			if (bb == entry_bb)
bb                163 scripts/gcc-plugins/stackleak_plugin.c 	bb = entry_bb;
bb                164 scripts/gcc-plugins/stackleak_plugin.c 	if (!single_pred_p(bb)) {
bb                169 scripts/gcc-plugins/stackleak_plugin.c 		bb = single_succ(ENTRY_BLOCK_PTR_FOR_FN(cfun));
bb                171 scripts/gcc-plugins/stackleak_plugin.c 	gsi = gsi_after_labels(bb);
bb                122 scripts/gcc-plugins/structleak_plugin.c 	basic_block bb;
bb                129 scripts/gcc-plugins/structleak_plugin.c 	bb = single_succ(ENTRY_BLOCK_PTR_FOR_FN(cfun));
bb                132 scripts/gcc-plugins/structleak_plugin.c 	for (gsi = gsi_start_bb(bb); !gsi_end_p(gsi); gsi_next(&gsi)) {
bb                176 scripts/gcc-plugins/structleak_plugin.c 	basic_block bb;
bb                183 scripts/gcc-plugins/structleak_plugin.c 	bb = single_succ(ENTRY_BLOCK_PTR_FOR_FN(cfun));
bb                184 scripts/gcc-plugins/structleak_plugin.c 	if (!single_pred_p(bb)) {
bb                 50 tools/bpf/bpftool/cfg.c #define bb_prev(bb)		list_prev_entry(bb, l)
bb                 51 tools/bpf/bpftool/cfg.c #define bb_next(bb)		list_next_entry(bb, l)
bb                 91 tools/bpf/bpftool/cfg.c 	struct bb_node *new_bb, *bb;
bb                 93 tools/bpf/bpftool/cfg.c 	list_for_each_entry(bb, &func->bbs, l) {
bb                 94 tools/bpf/bpftool/cfg.c 		if (bb->head == insn)
bb                 95 tools/bpf/bpftool/cfg.c 			return bb;
bb                 96 tools/bpf/bpftool/cfg.c 		else if (bb->head > insn)
bb                100 tools/bpf/bpftool/cfg.c 	bb = bb_prev(bb);
bb                109 tools/bpf/bpftool/cfg.c 	list_add(&new_bb->l, &bb->l);
bb                116 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                118 tools/bpf/bpftool/cfg.c 	bb = calloc(1, sizeof(*bb));
bb                119 tools/bpf/bpftool/cfg.c 	if (!bb) {
bb                124 tools/bpf/bpftool/cfg.c 	INIT_LIST_HEAD(&bb->e_prevs);
bb                125 tools/bpf/bpftool/cfg.c 	INIT_LIST_HEAD(&bb->e_succs);
bb                126 tools/bpf/bpftool/cfg.c 	list_add(&bb->l, after);
bb                128 tools/bpf/bpftool/cfg.c 	return bb;
bb                168 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                173 tools/bpf/bpftool/cfg.c 	bb = func_append_bb(func, cur);
bb                174 tools/bpf/bpftool/cfg.c 	if (!bb)
bb                184 tools/bpf/bpftool/cfg.c 			bb = func_append_bb(func, cur + cur->off + 1);
bb                185 tools/bpf/bpftool/cfg.c 			if (!bb)
bb                189 tools/bpf/bpftool/cfg.c 				bb = func_append_bb(func, cur + 1);
bb                190 tools/bpf/bpftool/cfg.c 				if (!bb)
bb                202 tools/bpf/bpftool/cfg.c 	struct bb_node *bb, *last;
bb                206 tools/bpf/bpftool/cfg.c 	bb = func_first_bb(func);
bb                207 tools/bpf/bpftool/cfg.c 	list_for_each_entry_from(bb, &last->l, l) {
bb                208 tools/bpf/bpftool/cfg.c 		bb->tail = bb_next(bb)->head - 1;
bb                209 tools/bpf/bpftool/cfg.c 		bb->idx = bb_idx++;
bb                218 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                220 tools/bpf/bpftool/cfg.c 	bb = func_insert_dummy_bb(&func->bbs);
bb                221 tools/bpf/bpftool/cfg.c 	if (!bb)
bb                223 tools/bpf/bpftool/cfg.c 	bb->idx = ENTRY_BLOCK_INDEX;
bb                225 tools/bpf/bpftool/cfg.c 	bb = func_insert_dummy_bb(&func_last_bb(func)->l);
bb                226 tools/bpf/bpftool/cfg.c 	if (!bb)
bb                228 tools/bpf/bpftool/cfg.c 	bb->idx = EXIT_BLOCK_INDEX;
bb                246 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                248 tools/bpf/bpftool/cfg.c 	list_for_each_entry(bb, &func->bbs, l) {
bb                249 tools/bpf/bpftool/cfg.c 		if (bb->head == insn)
bb                250 tools/bpf/bpftool/cfg.c 			return bb;
bb                281 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                283 tools/bpf/bpftool/cfg.c 	bb = entry_bb(func);
bb                284 tools/bpf/bpftool/cfg.c 	e = new_edge(bb, bb_next(bb), EDGE_FLAG_FALLTHROUGH);
bb                287 tools/bpf/bpftool/cfg.c 	list_add_tail(&e->l, &bb->e_succs);
bb                289 tools/bpf/bpftool/cfg.c 	bb = exit_bb(func);
bb                290 tools/bpf/bpftool/cfg.c 	e = new_edge(bb_prev(bb), bb, EDGE_FLAG_FALLTHROUGH);
bb                293 tools/bpf/bpftool/cfg.c 	list_add_tail(&e->l, &bb->e_prevs);
bb                295 tools/bpf/bpftool/cfg.c 	bb = entry_bb(func);
bb                296 tools/bpf/bpftool/cfg.c 	bb = bb_next(bb);
bb                297 tools/bpf/bpftool/cfg.c 	list_for_each_entry_from(bb, &exit_bb(func)->l, l) {
bb                298 tools/bpf/bpftool/cfg.c 		e = new_edge(bb, NULL, EDGE_FLAG_EMPTY);
bb                301 tools/bpf/bpftool/cfg.c 		e->src = bb;
bb                303 tools/bpf/bpftool/cfg.c 		insn = bb->tail;
bb                306 tools/bpf/bpftool/cfg.c 			e->dst = bb_next(bb);
bb                308 tools/bpf/bpftool/cfg.c 			list_add_tail(&e->l, &bb->e_succs);
bb                314 tools/bpf/bpftool/cfg.c 			list_add_tail(&e->l, &bb->e_succs);
bb                318 tools/bpf/bpftool/cfg.c 		e->dst = bb_next(bb);
bb                320 tools/bpf/bpftool/cfg.c 		list_add_tail(&e->l, &bb->e_succs);
bb                322 tools/bpf/bpftool/cfg.c 		e = new_edge(bb, NULL, EDGE_FLAG_JUMP);
bb                325 tools/bpf/bpftool/cfg.c 		e->src = bb;
bb                327 tools/bpf/bpftool/cfg.c 		list_add_tail(&e->l, &bb->e_succs);
bb                359 tools/bpf/bpftool/cfg.c 		struct bb_node *bb, *bb2;
bb                361 tools/bpf/bpftool/cfg.c 		list_for_each_entry_safe(bb, bb2, &func->bbs, l) {
bb                364 tools/bpf/bpftool/cfg.c 			list_for_each_entry_safe(e, e2, &bb->e_prevs, l) {
bb                369 tools/bpf/bpftool/cfg.c 			list_for_each_entry_safe(e, e2, &bb->e_succs, l) {
bb                374 tools/bpf/bpftool/cfg.c 			list_del(&bb->l);
bb                375 tools/bpf/bpftool/cfg.c 			free(bb);
bb                383 tools/bpf/bpftool/cfg.c static void draw_bb_node(struct func_node *func, struct bb_node *bb)
bb                387 tools/bpf/bpftool/cfg.c 	if (bb->idx == ENTRY_BLOCK_INDEX || bb->idx == EXIT_BLOCK_INDEX)
bb                393 tools/bpf/bpftool/cfg.c 	       func->idx, bb->idx, shape);
bb                395 tools/bpf/bpftool/cfg.c 	if (bb->idx == ENTRY_BLOCK_INDEX) {
bb                397 tools/bpf/bpftool/cfg.c 	} else if (bb->idx == EXIT_BLOCK_INDEX) {
bb                405 tools/bpf/bpftool/cfg.c 		start_idx = bb->head - func->start;
bb                406 tools/bpf/bpftool/cfg.c 		dump_xlated_for_graph(&dd, bb->head, bb->tail, start_idx);
bb                414 tools/bpf/bpftool/cfg.c static void draw_bb_succ_edges(struct func_node *func, struct bb_node *bb)
bb                422 tools/bpf/bpftool/cfg.c 	if (list_empty(&bb->e_succs))
bb                425 tools/bpf/bpftool/cfg.c 	list_for_each_entry(e, &bb->e_succs, l) {
bb                435 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                437 tools/bpf/bpftool/cfg.c 	list_for_each_entry(bb, &func->bbs, l) {
bb                438 tools/bpf/bpftool/cfg.c 		draw_bb_node(func, bb);
bb                445 tools/bpf/bpftool/cfg.c 	struct bb_node *bb;
bb                447 tools/bpf/bpftool/cfg.c 	list_for_each_entry(bb, &func->bbs, l) {
bb                448 tools/bpf/bpftool/cfg.c 		draw_bb_succ_edges(func, bb);
bb                 15 tools/testing/nvdimm/pmem-dax.c 	if (unlikely(is_bad_pmem(&pmem->bb, PFN_PHYS(pgoff) / 512,
bb                 46 tools/testing/nvdimm/pmem-dax.c 	if (unlikely(pmem->bb.count))