Lines Matching refs:tx_sa
142 struct macsec_tx_sa *tx_sa; member
455 struct macsec_tx_sa *tx_sa) in __macsec_pn_wrapped() argument
458 tx_sa->active = false; in __macsec_pn_wrapped()
463 void macsec_pn_wrapped(struct macsec_secy *secy, struct macsec_tx_sa *tx_sa) in macsec_pn_wrapped() argument
465 spin_lock_bh(&tx_sa->lock); in macsec_pn_wrapped()
466 __macsec_pn_wrapped(secy, tx_sa); in macsec_pn_wrapped()
467 spin_unlock_bh(&tx_sa->lock); in macsec_pn_wrapped()
471 static pn_t tx_sa_update_pn(struct macsec_tx_sa *tx_sa, in tx_sa_update_pn() argument
476 spin_lock_bh(&tx_sa->lock); in tx_sa_update_pn()
478 pn = tx_sa->next_pn_halves; in tx_sa_update_pn()
480 tx_sa->next_pn++; in tx_sa_update_pn()
482 tx_sa->next_pn_halves.lower++; in tx_sa_update_pn()
484 if (tx_sa->next_pn == 0) in tx_sa_update_pn()
485 __macsec_pn_wrapped(secy, tx_sa); in tx_sa_update_pn()
486 spin_unlock_bh(&tx_sa->lock); in tx_sa_update_pn()
501 struct macsec_tx_sa *tx_sa) in macsec_count_tx() argument
509 this_cpu_inc(tx_sa->stats->OutPktsEncrypted); in macsec_count_tx()
513 this_cpu_inc(tx_sa->stats->OutPktsProtected); in macsec_count_tx()
535 struct macsec_tx_sa *sa = macsec_skb_cb(skb)->tx_sa; in macsec_encrypt_done()
542 macsec_count_tx(skb, &macsec->secy.tx_sc, macsec_skb_cb(skb)->tx_sa); in macsec_encrypt_done()
595 struct macsec_tx_sa *tx_sa; in macsec_encrypt() local
604 tx_sa = macsec_txsa_get(tx_sc->sa[tx_sc->encoding_sa]); in macsec_encrypt()
605 if (!tx_sa) { in macsec_encrypt()
621 macsec_txsa_put(tx_sa); in macsec_encrypt()
628 macsec_txsa_put(tx_sa); in macsec_encrypt()
639 pn = tx_sa_update_pn(tx_sa, secy); in macsec_encrypt()
641 macsec_txsa_put(tx_sa); in macsec_encrypt()
657 macsec_txsa_put(tx_sa); in macsec_encrypt()
664 macsec_txsa_put(tx_sa); in macsec_encrypt()
669 req = macsec_alloc_req(tx_sa->key.tfm, &iv, &sg, ret); in macsec_encrypt()
671 macsec_txsa_put(tx_sa); in macsec_encrypt()
677 macsec_fill_iv_xpn(iv, tx_sa->ssci, pn.full64, tx_sa->key.salt); in macsec_encrypt()
685 macsec_txsa_put(tx_sa); in macsec_encrypt()
701 macsec_skb_cb(skb)->tx_sa = tx_sa; in macsec_encrypt()
712 macsec_txsa_put(tx_sa); in macsec_encrypt()
718 macsec_txsa_put(tx_sa); in macsec_encrypt()
1430 static int init_tx_sa(struct macsec_tx_sa *tx_sa, char *sak, int key_len, in init_tx_sa() argument
1433 tx_sa->stats = alloc_percpu(struct macsec_tx_sa_stats); in init_tx_sa()
1434 if (!tx_sa->stats) in init_tx_sa()
1437 tx_sa->key.tfm = macsec_alloc_tfm(sak, key_len, icv_len); in init_tx_sa()
1438 if (IS_ERR(tx_sa->key.tfm)) { in init_tx_sa()
1439 free_percpu(tx_sa->stats); in init_tx_sa()
1440 return PTR_ERR(tx_sa->key.tfm); in init_tx_sa()
1443 tx_sa->ssci = MACSEC_UNDEF_SSCI; in init_tx_sa()
1444 tx_sa->active = false; in init_tx_sa()
1445 refcount_set(&tx_sa->refcnt, 1); in init_tx_sa()
1446 spin_lock_init(&tx_sa->lock); in init_tx_sa()
1451 static void clear_tx_sa(struct macsec_tx_sa *tx_sa) in clear_tx_sa() argument
1453 tx_sa->active = false; in clear_tx_sa()
1455 macsec_txsa_put(tx_sa); in clear_tx_sa()
1513 struct macsec_tx_sa *tx_sa; in get_txsa_from_nl() local
1530 tx_sa = rtnl_dereference(tx_sc->sa[*assoc_num]); in get_txsa_from_nl()
1531 if (!tx_sa) in get_txsa_from_nl()
1537 return tx_sa; in get_txsa_from_nl()
1960 struct macsec_tx_sa *tx_sa; in macsec_add_txsa() local
2018 tx_sa = rtnl_dereference(tx_sc->sa[assoc_num]); in macsec_add_txsa()
2019 if (tx_sa) { in macsec_add_txsa()
2024 tx_sa = kmalloc(sizeof(*tx_sa), GFP_KERNEL); in macsec_add_txsa()
2025 if (!tx_sa) { in macsec_add_txsa()
2030 err = init_tx_sa(tx_sa, nla_data(tb_sa[MACSEC_SA_ATTR_KEY]), in macsec_add_txsa()
2033 kfree(tx_sa); in macsec_add_txsa()
2038 spin_lock_bh(&tx_sa->lock); in macsec_add_txsa()
2039 tx_sa->next_pn = nla_get_u64(tb_sa[MACSEC_SA_ATTR_PN]); in macsec_add_txsa()
2040 spin_unlock_bh(&tx_sa->lock); in macsec_add_txsa()
2043 tx_sa->active = !!nla_get_u8(tb_sa[MACSEC_SA_ATTR_ACTIVE]); in macsec_add_txsa()
2046 if (assoc_num == tx_sc->encoding_sa && tx_sa->active) in macsec_add_txsa()
2061 ctx.sa.tx_sa = tx_sa; in macsec_add_txsa()
2072 tx_sa->ssci = nla_get_ssci(tb_sa[MACSEC_SA_ATTR_SSCI]); in macsec_add_txsa()
2073 nla_memcpy(tx_sa->key.salt.bytes, tb_sa[MACSEC_SA_ATTR_SALT], in macsec_add_txsa()
2077 nla_memcpy(tx_sa->key.id, tb_sa[MACSEC_SA_ATTR_KEYID], MACSEC_KEYID_LEN); in macsec_add_txsa()
2078 rcu_assign_pointer(tx_sc->sa[assoc_num], tx_sa); in macsec_add_txsa()
2086 kfree(tx_sa); in macsec_add_txsa()
2226 struct macsec_tx_sa *tx_sa; in macsec_del_txsa() local
2238 tx_sa = get_txsa_from_nl(genl_info_net(info), attrs, tb_sa, in macsec_del_txsa()
2240 if (IS_ERR(tx_sa)) { in macsec_del_txsa()
2242 return PTR_ERR(tx_sa); in macsec_del_txsa()
2245 if (tx_sa->active) { in macsec_del_txsa()
2262 ctx.sa.tx_sa = tx_sa; in macsec_del_txsa()
2271 clear_tx_sa(tx_sa); in macsec_del_txsa()
2311 struct macsec_tx_sa *tx_sa; in macsec_upd_txsa() local
2330 tx_sa = get_txsa_from_nl(genl_info_net(info), attrs, tb_sa, in macsec_upd_txsa()
2332 if (IS_ERR(tx_sa)) { in macsec_upd_txsa()
2334 return PTR_ERR(tx_sa); in macsec_upd_txsa()
2348 spin_lock_bh(&tx_sa->lock); in macsec_upd_txsa()
2349 prev_pn = tx_sa->next_pn_halves; in macsec_upd_txsa()
2350 tx_sa->next_pn = nla_get_u64(tb_sa[MACSEC_SA_ATTR_PN]); in macsec_upd_txsa()
2351 spin_unlock_bh(&tx_sa->lock); in macsec_upd_txsa()
2354 was_active = tx_sa->active; in macsec_upd_txsa()
2356 tx_sa->active = nla_get_u8(tb_sa[MACSEC_SA_ATTR_ACTIVE]); in macsec_upd_txsa()
2360 secy->operational = tx_sa->active; in macsec_upd_txsa()
2374 ctx.sa.tx_sa = tx_sa; in macsec_upd_txsa()
2388 spin_lock_bh(&tx_sa->lock); in macsec_upd_txsa()
2389 tx_sa->next_pn_halves = prev_pn; in macsec_upd_txsa()
2390 spin_unlock_bh(&tx_sa->lock); in macsec_upd_txsa()
2392 tx_sa->active = was_active; in macsec_upd_txsa()
2663 struct macsec_tx_sa *tx_sa, in get_tx_sa_stats() argument
2677 ctx.sa.tx_sa = tx_sa; in get_tx_sa_stats()
2687 per_cpu_ptr(tx_sa->stats, cpu); in get_tx_sa_stats()
3083 struct macsec_tx_sa *tx_sa = rtnl_dereference(tx_sc->sa[i]); in dump_secy() local
3088 if (!tx_sa) in dump_secy()
3104 get_tx_sa_stats(dev, i, tx_sa, &tx_sa_stats); in dump_secy()
3114 pn = tx_sa->next_pn; in dump_secy()
3117 pn = tx_sa->next_pn_halves.lower; in dump_secy()
3123 nla_put(skb, MACSEC_SA_ATTR_KEYID, MACSEC_KEYID_LEN, tx_sa->key.id) || in dump_secy()
3124 (secy->xpn && nla_put_ssci(skb, MACSEC_SA_ATTR_SSCI, tx_sa->ssci)) || in dump_secy()
3125 nla_put_u8(skb, MACSEC_SA_ATTR_ACTIVE, tx_sa->active)) { in dump_secy()
3410 macsec_count_tx(skb, &macsec->secy.tx_sc, macsec_skb_cb(skb)->tx_sa); in macsec_start_xmit()
3733 struct macsec_tx_sa *tx_sa; in macsec_changelink_common() local
3736 tx_sa = rtnl_dereference(tx_sc->sa[tx_sc->encoding_sa]); in macsec_changelink_common()
3738 secy->operational = tx_sa && tx_sa->active; in macsec_changelink_common()