]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - net/sched/cls_matchall.c
cls_flower: Fix incorrect idr release when failing to modify rule
[mirror_ubuntu-bionic-kernel.git] / net / sched / cls_matchall.c
CommitLineData
bf3994d2
JP
1/*
2 * net/sched/cls_matchll.c Match-all classifier
3 *
4 * Copyright (c) 2016 Jiri Pirko <jiri@mellanox.com>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/kernel.h>
13#include <linux/init.h>
14#include <linux/module.h>
15
16#include <net/sch_generic.h>
17#include <net/pkt_cls.h>
18
fd62d9f5 19struct cls_mall_head {
bf3994d2
JP
20 struct tcf_exts exts;
21 struct tcf_result res;
22 u32 handle;
b87f7936 23 u32 flags;
df2735ee
CW
24 union {
25 struct work_struct work;
26 struct rcu_head rcu;
27 };
bf3994d2
JP
28};
29
30static int mall_classify(struct sk_buff *skb, const struct tcf_proto *tp,
31 struct tcf_result *res)
32{
33 struct cls_mall_head *head = rcu_dereference_bh(tp->root);
bf3994d2 34
fd62d9f5 35 if (tc_skip_sw(head->flags))
b87f7936
YG
36 return -1;
37
3ff4cbec 38 *res = head->res;
fd62d9f5 39 return tcf_exts_exec(skb, &head->exts, res);
bf3994d2
JP
40}
41
42static int mall_init(struct tcf_proto *tp)
43{
bf3994d2
JP
44 return 0;
45}
46
57767e78
CW
47static void __mall_destroy(struct cls_mall_head *head)
48{
49 tcf_exts_destroy(&head->exts);
50 tcf_exts_put_net(&head->exts);
51 kfree(head);
52}
53
df2735ee
CW
54static void mall_destroy_work(struct work_struct *work)
55{
56 struct cls_mall_head *head = container_of(work, struct cls_mall_head,
57 work);
58 rtnl_lock();
57767e78 59 __mall_destroy(head);
df2735ee
CW
60 rtnl_unlock();
61}
62
fd62d9f5 63static void mall_destroy_rcu(struct rcu_head *rcu)
bf3994d2 64{
fd62d9f5
YG
65 struct cls_mall_head *head = container_of(rcu, struct cls_mall_head,
66 rcu);
bf3994d2 67
df2735ee
CW
68 INIT_WORK(&head->work, mall_destroy_work);
69 tcf_queue_work(&head->work);
bf3994d2
JP
70}
71
2447a96f
JP
72static void mall_destroy_hw_filter(struct tcf_proto *tp,
73 struct cls_mall_head *head,
74 unsigned long cookie)
b87f7936 75{
de4784ca 76 struct tc_cls_matchall_offload cls_mall = {};
2447a96f 77 struct tcf_block *block = tp->chain->block;
b87f7936 78
de4784ca 79 tc_cls_common_offload_init(&cls_mall.common, tp);
2447a96f 80 cls_mall.command = TC_CLSMATCHALL_DESTROY;
de4784ca 81 cls_mall.cookie = cookie;
b87f7936 82
2447a96f 83 tc_setup_cb_call(block, NULL, TC_SETUP_CLSMATCHALL, &cls_mall, false);
b87f7936
YG
84}
85
2447a96f
JP
86static int mall_replace_hw_filter(struct tcf_proto *tp,
87 struct cls_mall_head *head,
88 unsigned long cookie)
b87f7936 89{
de4784ca 90 struct tc_cls_matchall_offload cls_mall = {};
2447a96f
JP
91 struct tcf_block *block = tp->chain->block;
92 bool skip_sw = tc_skip_sw(head->flags);
93 int err;
b87f7936 94
de4784ca 95 tc_cls_common_offload_init(&cls_mall.common, tp);
2447a96f
JP
96 cls_mall.command = TC_CLSMATCHALL_REPLACE;
97 cls_mall.exts = &head->exts;
de4784ca 98 cls_mall.cookie = cookie;
b87f7936 99
2447a96f
JP
100 err = tc_setup_cb_call(block, NULL, TC_SETUP_CLSMATCHALL,
101 &cls_mall, skip_sw);
102 if (err < 0) {
103 mall_destroy_hw_filter(tp, head, cookie);
104 return err;
105 } else if (err > 0) {
106 head->flags |= TCA_CLS_FLAGS_IN_HW;
107 }
108
109 if (skip_sw && !(head->flags & TCA_CLS_FLAGS_IN_HW))
110 return -EINVAL;
111
112 return 0;
b87f7936
YG
113}
114
763dbf63 115static void mall_destroy(struct tcf_proto *tp)
bf3994d2
JP
116{
117 struct cls_mall_head *head = rtnl_dereference(tp->root);
118
fd62d9f5 119 if (!head)
763dbf63 120 return;
bf3994d2 121
2447a96f 122 if (!tc_skip_hw(head->flags))
fd62d9f5 123 mall_destroy_hw_filter(tp, head, (unsigned long) head);
b87f7936 124
57767e78
CW
125 if (tcf_exts_get_net(&head->exts))
126 call_rcu(&head->rcu, mall_destroy_rcu);
127 else
128 __mall_destroy(head);
bf3994d2
JP
129}
130
8113c095 131static void *mall_get(struct tcf_proto *tp, u32 handle)
bf3994d2 132{
8113c095 133 return NULL;
bf3994d2
JP
134}
135
136static const struct nla_policy mall_policy[TCA_MATCHALL_MAX + 1] = {
137 [TCA_MATCHALL_UNSPEC] = { .type = NLA_UNSPEC },
138 [TCA_MATCHALL_CLASSID] = { .type = NLA_U32 },
139};
140
141static int mall_set_parms(struct net *net, struct tcf_proto *tp,
fd62d9f5 142 struct cls_mall_head *head,
bf3994d2
JP
143 unsigned long base, struct nlattr **tb,
144 struct nlattr *est, bool ovr)
145{
bf3994d2
JP
146 int err;
147
a74cb369 148 err = tcf_exts_validate(net, tp, tb, est, &head->exts, ovr);
bf3994d2 149 if (err < 0)
a74cb369 150 return err;
bf3994d2
JP
151
152 if (tb[TCA_MATCHALL_CLASSID]) {
fd62d9f5
YG
153 head->res.classid = nla_get_u32(tb[TCA_MATCHALL_CLASSID]);
154 tcf_bind_filter(tp, &head->res, base);
bf3994d2 155 }
bf3994d2
JP
156 return 0;
157}
158
159static int mall_change(struct net *net, struct sk_buff *in_skb,
160 struct tcf_proto *tp, unsigned long base,
161 u32 handle, struct nlattr **tca,
8113c095 162 void **arg, bool ovr)
bf3994d2
JP
163{
164 struct cls_mall_head *head = rtnl_dereference(tp->root);
bf3994d2 165 struct nlattr *tb[TCA_MATCHALL_MAX + 1];
fd62d9f5 166 struct cls_mall_head *new;
b87f7936 167 u32 flags = 0;
bf3994d2
JP
168 int err;
169
170 if (!tca[TCA_OPTIONS])
171 return -EINVAL;
172
fd62d9f5
YG
173 if (head)
174 return -EEXIST;
bf3994d2 175
fceb6435
JB
176 err = nla_parse_nested(tb, TCA_MATCHALL_MAX, tca[TCA_OPTIONS],
177 mall_policy, NULL);
bf3994d2
JP
178 if (err < 0)
179 return err;
180
b87f7936
YG
181 if (tb[TCA_MATCHALL_FLAGS]) {
182 flags = nla_get_u32(tb[TCA_MATCHALL_FLAGS]);
183 if (!tc_flags_valid(flags))
184 return -EINVAL;
185 }
186
fd62d9f5
YG
187 new = kzalloc(sizeof(*new), GFP_KERNEL);
188 if (!new)
bf3994d2
JP
189 return -ENOBUFS;
190
e2160156 191 err = tcf_exts_init(&new->exts, TCA_MATCHALL_ACT, 0);
ec2507d2
YG
192 if (err)
193 goto err_exts_init;
bf3994d2
JP
194
195 if (!handle)
196 handle = 1;
fd62d9f5
YG
197 new->handle = handle;
198 new->flags = flags;
bf3994d2 199
fd62d9f5 200 err = mall_set_parms(net, tp, new, base, tb, tca[TCA_RATE], ovr);
bf3994d2 201 if (err)
ec2507d2 202 goto err_set_parms;
bf3994d2 203
2447a96f 204 if (!tc_skip_hw(new->flags)) {
fd62d9f5 205 err = mall_replace_hw_filter(tp, new, (unsigned long) new);
2447a96f
JP
206 if (err)
207 goto err_replace_hw_filter;
b87f7936 208 }
c7d2b2f5
OG
209
210 if (!tc_in_hw(new->flags))
211 new->flags |= TCA_CLS_FLAGS_NOT_IN_HW;
b87f7936 212
8113c095 213 *arg = head;
fd62d9f5 214 rcu_assign_pointer(tp->root, new);
bf3994d2
JP
215 return 0;
216
ec2507d2
YG
217err_replace_hw_filter:
218err_set_parms:
e2160156 219 tcf_exts_destroy(&new->exts);
ec2507d2 220err_exts_init:
fd62d9f5 221 kfree(new);
bf3994d2
JP
222 return err;
223}
224
8113c095 225static int mall_delete(struct tcf_proto *tp, void *arg, bool *last)
bf3994d2 226{
fd62d9f5 227 return -EOPNOTSUPP;
bf3994d2
JP
228}
229
230static void mall_walk(struct tcf_proto *tp, struct tcf_walker *arg)
231{
232 struct cls_mall_head *head = rtnl_dereference(tp->root);
bf3994d2
JP
233
234 if (arg->count < arg->skip)
235 goto skip;
8113c095 236 if (arg->fn(tp, head, arg) < 0)
bf3994d2
JP
237 arg->stop = 1;
238skip:
239 arg->count++;
240}
241
8113c095 242static int mall_dump(struct net *net, struct tcf_proto *tp, void *fh,
bf3994d2
JP
243 struct sk_buff *skb, struct tcmsg *t)
244{
8113c095 245 struct cls_mall_head *head = fh;
bf3994d2
JP
246 struct nlattr *nest;
247
fd62d9f5 248 if (!head)
bf3994d2
JP
249 return skb->len;
250
fd62d9f5 251 t->tcm_handle = head->handle;
bf3994d2
JP
252
253 nest = nla_nest_start(skb, TCA_OPTIONS);
254 if (!nest)
255 goto nla_put_failure;
256
fd62d9f5
YG
257 if (head->res.classid &&
258 nla_put_u32(skb, TCA_MATCHALL_CLASSID, head->res.classid))
bf3994d2
JP
259 goto nla_put_failure;
260
7a335ada
OG
261 if (head->flags && nla_put_u32(skb, TCA_MATCHALL_FLAGS, head->flags))
262 goto nla_put_failure;
263
fd62d9f5 264 if (tcf_exts_dump(skb, &head->exts))
bf3994d2
JP
265 goto nla_put_failure;
266
267 nla_nest_end(skb, nest);
268
fd62d9f5 269 if (tcf_exts_dump_stats(skb, &head->exts) < 0)
bf3994d2
JP
270 goto nla_put_failure;
271
272 return skb->len;
273
274nla_put_failure:
275 nla_nest_cancel(skb, nest);
276 return -1;
277}
278
07d79fc7
CW
279static void mall_bind_class(void *fh, u32 classid, unsigned long cl)
280{
281 struct cls_mall_head *head = fh;
282
283 if (head && head->res.classid == classid)
284 head->res.class = cl;
285}
286
bf3994d2
JP
287static struct tcf_proto_ops cls_mall_ops __read_mostly = {
288 .kind = "matchall",
289 .classify = mall_classify,
290 .init = mall_init,
291 .destroy = mall_destroy,
292 .get = mall_get,
293 .change = mall_change,
294 .delete = mall_delete,
295 .walk = mall_walk,
296 .dump = mall_dump,
07d79fc7 297 .bind_class = mall_bind_class,
bf3994d2
JP
298 .owner = THIS_MODULE,
299};
300
301static int __init cls_mall_init(void)
302{
303 return register_tcf_proto_ops(&cls_mall_ops);
304}
305
306static void __exit cls_mall_exit(void)
307{
308 unregister_tcf_proto_ops(&cls_mall_ops);
309}
310
311module_init(cls_mall_init);
312module_exit(cls_mall_exit);
313
314MODULE_AUTHOR("Jiri Pirko <jiri@mellanox.com>");
315MODULE_DESCRIPTION("Match-all classifier");
316MODULE_LICENSE("GPL v2");