Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | #ifndef __NET_GEN_STATS_H |
| 3 | #define __NET_GEN_STATS_H |
| 4 | |
| 5 | #include <linux/gen_stats.h> |
| 6 | #include <linux/socket.h> |
| 7 | #include <linux/rtnetlink.h> |
| 8 | #include <linux/pkt_sched.h> |
| 9 | |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 10 | struct gnet_stats_basic_cpu { |
| 11 | struct gnet_stats_basic_packed bstats; |
| 12 | struct u64_stats_sync syncp; |
| 13 | }; |
| 14 | |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 15 | struct net_rate_estimator; |
| 16 | |
Eric Dumazet | fd2c3ef | 2009-11-03 03:26:03 +0000 | [diff] [blame] | 17 | struct gnet_dump { |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 18 | spinlock_t * lock; |
| 19 | struct sk_buff * skb; |
Patrick McHardy | 1e90474 | 2008-01-22 22:11:17 -0800 | [diff] [blame] | 20 | struct nlattr * tail; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | |
Lucas De Marchi | 25985ed | 2011-03-30 22:57:33 -0300 | [diff] [blame] | 22 | /* Backward compatibility */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 23 | int compat_tc_stats; |
| 24 | int compat_xstats; |
Nicolas Dichtel | 9854518 | 2016-04-26 10:06:18 +0200 | [diff] [blame] | 25 | int padattr; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 26 | void * xstats; |
| 27 | int xstats_len; |
| 28 | struct tc_stats tc_stats; |
| 29 | }; |
| 30 | |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 31 | int gnet_stats_start_copy(struct sk_buff *skb, int type, spinlock_t *lock, |
Nicolas Dichtel | 9854518 | 2016-04-26 10:06:18 +0200 | [diff] [blame] | 32 | struct gnet_dump *d, int padattr); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 33 | |
| 34 | int gnet_stats_start_copy_compat(struct sk_buff *skb, int type, |
| 35 | int tc_stats_type, int xstats_type, |
Nicolas Dichtel | 9854518 | 2016-04-26 10:06:18 +0200 | [diff] [blame] | 36 | spinlock_t *lock, struct gnet_dump *d, |
| 37 | int padattr); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 38 | |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 39 | int gnet_stats_copy_basic(const seqcount_t *running, |
| 40 | struct gnet_dump *d, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 41 | struct gnet_stats_basic_cpu __percpu *cpu, |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 42 | struct gnet_stats_basic_packed *b); |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 43 | void __gnet_stats_copy_basic(const seqcount_t *running, |
| 44 | struct gnet_stats_basic_packed *bstats, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 45 | struct gnet_stats_basic_cpu __percpu *cpu, |
| 46 | struct gnet_stats_basic_packed *b); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 47 | int gnet_stats_copy_rate_est(struct gnet_dump *d, |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 48 | struct net_rate_estimator __rcu **ptr); |
John Fastabend | 6401585 | 2014-09-28 11:53:57 -0700 | [diff] [blame] | 49 | int gnet_stats_copy_queue(struct gnet_dump *d, |
John Fastabend | b0ab6f9 | 2014-09-28 11:54:24 -0700 | [diff] [blame] | 50 | struct gnet_stats_queue __percpu *cpu_q, |
| 51 | struct gnet_stats_queue *q, __u32 qlen); |
John Fastabend | b01ac09 | 2017-12-07 09:57:20 -0800 | [diff] [blame] | 52 | void __gnet_stats_copy_queue(struct gnet_stats_queue *qstats, |
| 53 | const struct gnet_stats_queue __percpu *cpu_q, |
| 54 | const struct gnet_stats_queue *q, __u32 qlen); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 55 | int gnet_stats_copy_app(struct gnet_dump *d, void *st, int len); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 56 | |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 57 | int gnet_stats_finish_copy(struct gnet_dump *d); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 58 | |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 59 | int gen_new_estimator(struct gnet_stats_basic_packed *bstats, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 60 | struct gnet_stats_basic_cpu __percpu *cpu_bstats, |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 61 | struct net_rate_estimator __rcu **rate_est, |
Vlad Buslov | 51a9f5a | 2018-08-10 20:51:54 +0300 | [diff] [blame] | 62 | spinlock_t *lock, |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 63 | seqcount_t *running, struct nlattr *opt); |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 64 | void gen_kill_estimator(struct net_rate_estimator __rcu **ptr); |
Joe Perches | 8aae218 | 2013-09-20 11:23:26 -0700 | [diff] [blame] | 65 | int gen_replace_estimator(struct gnet_stats_basic_packed *bstats, |
John Fastabend | 22e0f8b | 2014-09-28 11:52:56 -0700 | [diff] [blame] | 66 | struct gnet_stats_basic_cpu __percpu *cpu_bstats, |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 67 | struct net_rate_estimator __rcu **ptr, |
Vlad Buslov | 51a9f5a | 2018-08-10 20:51:54 +0300 | [diff] [blame] | 68 | spinlock_t *lock, |
Eric Dumazet | edb09eb | 2016-06-06 09:37:16 -0700 | [diff] [blame] | 69 | seqcount_t *running, struct nlattr *opt); |
Eric Dumazet | 1c0d32f | 2016-12-04 09:48:16 -0800 | [diff] [blame] | 70 | bool gen_estimator_active(struct net_rate_estimator __rcu **ptr); |
| 71 | bool gen_estimator_read(struct net_rate_estimator __rcu **ptr, |
| 72 | struct gnet_stats_rate_est64 *sample); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 73 | #endif |