2005-04-17 00:20:36 +02:00
|
|
|
#ifndef __NET_GEN_STATS_H
|
|
|
|
#define __NET_GEN_STATS_H
|
|
|
|
|
|
|
|
#include <linux/gen_stats.h>
|
|
|
|
#include <linux/socket.h>
|
|
|
|
#include <linux/rtnetlink.h>
|
|
|
|
#include <linux/pkt_sched.h>
|
|
|
|
|
2014-09-28 20:52:56 +02:00
|
|
|
struct gnet_stats_basic_cpu {
|
|
|
|
struct gnet_stats_basic_packed bstats;
|
|
|
|
struct u64_stats_sync syncp;
|
|
|
|
};
|
|
|
|
|
2009-11-03 04:26:03 +01:00
|
|
|
struct gnet_dump {
|
2005-04-17 00:20:36 +02:00
|
|
|
spinlock_t * lock;
|
|
|
|
struct sk_buff * skb;
|
2008-01-23 07:11:17 +01:00
|
|
|
struct nlattr * tail;
|
2005-04-17 00:20:36 +02:00
|
|
|
|
2011-03-31 03:57:33 +02:00
|
|
|
/* Backward compatibility */
|
2005-04-17 00:20:36 +02:00
|
|
|
int compat_tc_stats;
|
|
|
|
int compat_xstats;
|
2016-04-26 10:06:18 +02:00
|
|
|
int padattr;
|
2005-04-17 00:20:36 +02:00
|
|
|
void * xstats;
|
|
|
|
int xstats_len;
|
|
|
|
struct tc_stats tc_stats;
|
|
|
|
};
|
|
|
|
|
2013-09-20 20:23:26 +02:00
|
|
|
int gnet_stats_start_copy(struct sk_buff *skb, int type, spinlock_t *lock,
|
2016-04-26 10:06:18 +02:00
|
|
|
struct gnet_dump *d, int padattr);
|
2013-09-20 20:23:26 +02:00
|
|
|
|
|
|
|
int gnet_stats_start_copy_compat(struct sk_buff *skb, int type,
|
|
|
|
int tc_stats_type, int xstats_type,
|
2016-04-26 10:06:18 +02:00
|
|
|
spinlock_t *lock, struct gnet_dump *d,
|
|
|
|
int padattr);
|
2005-04-17 00:20:36 +02:00
|
|
|
|
2013-09-20 20:23:26 +02:00
|
|
|
int gnet_stats_copy_basic(struct gnet_dump *d,
|
2014-09-28 20:52:56 +02:00
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu,
|
2013-09-20 20:23:26 +02:00
|
|
|
struct gnet_stats_basic_packed *b);
|
2014-09-28 20:52:56 +02:00
|
|
|
void __gnet_stats_copy_basic(struct gnet_stats_basic_packed *bstats,
|
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu,
|
|
|
|
struct gnet_stats_basic_packed *b);
|
2013-09-20 20:23:26 +02:00
|
|
|
int gnet_stats_copy_rate_est(struct gnet_dump *d,
|
|
|
|
const struct gnet_stats_basic_packed *b,
|
|
|
|
struct gnet_stats_rate_est64 *r);
|
2014-09-28 20:53:57 +02:00
|
|
|
int gnet_stats_copy_queue(struct gnet_dump *d,
|
2014-09-28 20:54:24 +02:00
|
|
|
struct gnet_stats_queue __percpu *cpu_q,
|
|
|
|
struct gnet_stats_queue *q, __u32 qlen);
|
2013-09-20 20:23:26 +02:00
|
|
|
int gnet_stats_copy_app(struct gnet_dump *d, void *st, int len);
|
|
|
|
|
|
|
|
int gnet_stats_finish_copy(struct gnet_dump *d);
|
|
|
|
|
|
|
|
int gen_new_estimator(struct gnet_stats_basic_packed *bstats,
|
2014-09-28 20:52:56 +02:00
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu_bstats,
|
2013-09-20 20:23:26 +02:00
|
|
|
struct gnet_stats_rate_est64 *rate_est,
|
|
|
|
spinlock_t *stats_lock, struct nlattr *opt);
|
|
|
|
void gen_kill_estimator(struct gnet_stats_basic_packed *bstats,
|
|
|
|
struct gnet_stats_rate_est64 *rate_est);
|
|
|
|
int gen_replace_estimator(struct gnet_stats_basic_packed *bstats,
|
2014-09-28 20:52:56 +02:00
|
|
|
struct gnet_stats_basic_cpu __percpu *cpu_bstats,
|
2013-09-20 20:23:26 +02:00
|
|
|
struct gnet_stats_rate_est64 *rate_est,
|
|
|
|
spinlock_t *stats_lock, struct nlattr *opt);
|
|
|
|
bool gen_estimator_active(const struct gnet_stats_basic_packed *bstats,
|
|
|
|
const struct gnet_stats_rate_est64 *rate_est);
|
2005-04-17 00:20:36 +02:00
|
|
|
#endif
|