int fall_through; /* 0: only classify if explicit match */
};
-static struct tcf_ext_map tcindex_ext_map = {
+static const struct tcf_ext_map tcindex_ext_map = {
.police = TCA_TCINDEX_POLICE,
.action = TCA_TCINDEX_ACT
};
return p->hash > (p->mask >> p->shift);
}
+static const struct nla_policy tcindex_policy[TCA_TCINDEX_MAX + 1] = {
+ [TCA_TCINDEX_HASH] = { .type = NLA_U32 },
+ [TCA_TCINDEX_MASK] = { .type = NLA_U16 },
+ [TCA_TCINDEX_SHIFT] = { .type = NLA_U32 },
+ [TCA_TCINDEX_FALL_THROUGH] = { .type = NLA_U32 },
+ [TCA_TCINDEX_CLASSID] = { .type = NLA_U32 },
+};
+
static int
tcindex_set_parms(struct tcf_proto *tp, unsigned long base, u32 handle,
struct tcindex_data *p, struct tcindex_filter_result *r,
- struct rtattr **tb, struct rtattr *est)
+ struct nlattr **tb, struct nlattr *est)
{
int err, balloc = 0;
struct tcindex_filter_result new_filter_result, *old_r = r;
else
memset(&cr, 0, sizeof(cr));
- err = -EINVAL;
- if (tb[TCA_TCINDEX_HASH-1]) {
- if (RTA_PAYLOAD(tb[TCA_TCINDEX_HASH-1]) < sizeof(u32))
- goto errout;
- cp.hash = *(u32 *) RTA_DATA(tb[TCA_TCINDEX_HASH-1]);
- }
+ if (tb[TCA_TCINDEX_HASH])
+ cp.hash = nla_get_u32(tb[TCA_TCINDEX_HASH]);
- if (tb[TCA_TCINDEX_MASK-1]) {
- if (RTA_PAYLOAD(tb[TCA_TCINDEX_MASK-1]) < sizeof(u16))
- goto errout;
- cp.mask = *(u16 *) RTA_DATA(tb[TCA_TCINDEX_MASK-1]);
- }
+ if (tb[TCA_TCINDEX_MASK])
+ cp.mask = nla_get_u16(tb[TCA_TCINDEX_MASK]);
- if (tb[TCA_TCINDEX_SHIFT-1]) {
- if (RTA_PAYLOAD(tb[TCA_TCINDEX_SHIFT-1]) < sizeof(int))
- goto errout;
- cp.shift = *(int *) RTA_DATA(tb[TCA_TCINDEX_SHIFT-1]);
- }
+ if (tb[TCA_TCINDEX_SHIFT])
+ cp.shift = nla_get_u32(tb[TCA_TCINDEX_SHIFT]);
err = -EBUSY;
/* Hash already allocated, make sure that we still meet the
goto errout;
err = -EINVAL;
- if (tb[TCA_TCINDEX_FALL_THROUGH-1]) {
- if (RTA_PAYLOAD(tb[TCA_TCINDEX_FALL_THROUGH-1]) < sizeof(u32))
- goto errout;
- cp.fall_through =
- *(u32 *) RTA_DATA(tb[TCA_TCINDEX_FALL_THROUGH-1]);
- }
+ if (tb[TCA_TCINDEX_FALL_THROUGH])
+ cp.fall_through = nla_get_u32(tb[TCA_TCINDEX_FALL_THROUGH]);
if (!cp.hash) {
/* Hash not specified, use perfect hash if the upper limit
goto errout_alloc;
}
- if (tb[TCA_TCINDEX_CLASSID-1]) {
- cr.res.classid = *(u32 *) RTA_DATA(tb[TCA_TCINDEX_CLASSID-1]);
+ if (tb[TCA_TCINDEX_CLASSID]) {
+ cr.res.classid = nla_get_u32(tb[TCA_TCINDEX_CLASSID]);
tcf_bind_filter(tp, &cr.res, base);
}
static int
tcindex_change(struct tcf_proto *tp, unsigned long base, u32 handle,
- struct rtattr **tca, unsigned long *arg)
+ struct nlattr **tca, unsigned long *arg)
{
- struct rtattr *opt = tca[TCA_OPTIONS-1];
- struct rtattr *tb[TCA_TCINDEX_MAX];
+ struct nlattr *opt = tca[TCA_OPTIONS];
+ struct nlattr *tb[TCA_TCINDEX_MAX + 1];
struct tcindex_data *p = PRIV(tp);
struct tcindex_filter_result *r = (struct tcindex_filter_result *) *arg;
+ int err;
pr_debug("tcindex_change(tp %p,handle 0x%08x,tca %p,arg %p),opt %p,"
"p %p,r %p,*arg 0x%lx\n",
if (!opt)
return 0;
- if (rtattr_parse_nested(tb, TCA_TCINDEX_MAX, opt) < 0)
- return -EINVAL;
+ err = nla_parse_nested(tb, TCA_TCINDEX_MAX, opt, tcindex_policy);
+ if (err < 0)
+ return err;
- return tcindex_set_parms(tp, base, handle, p, r, tb, tca[TCA_RATE-1]);
+ return tcindex_set_parms(tp, base, handle, p, r, tb, tca[TCA_RATE]);
}
struct tcindex_data *p = PRIV(tp);
struct tcindex_filter_result *r = (struct tcindex_filter_result *) fh;
unsigned char *b = skb_tail_pointer(skb);
- struct rtattr *rta;
+ struct nlattr *nest;
pr_debug("tcindex_dump(tp %p,fh 0x%lx,skb %p,t %p),p %p,r %p,b %p\n",
tp, fh, skb, t, p, r, b);
pr_debug("p->perfect %p p->h %p\n", p->perfect, p->h);
- rta = (struct rtattr *) b;
- RTA_PUT(skb, TCA_OPTIONS, 0, NULL);
+
+ nest = nla_nest_start(skb, TCA_OPTIONS);
+ if (nest == NULL)
+ goto nla_put_failure;
+
if (!fh) {
t->tcm_handle = ~0; /* whatever ... */
- RTA_PUT(skb, TCA_TCINDEX_HASH, sizeof(p->hash), &p->hash);
- RTA_PUT(skb, TCA_TCINDEX_MASK, sizeof(p->mask), &p->mask);
- RTA_PUT(skb, TCA_TCINDEX_SHIFT, sizeof(p->shift), &p->shift);
- RTA_PUT(skb, TCA_TCINDEX_FALL_THROUGH, sizeof(p->fall_through),
- &p->fall_through);
- rta->rta_len = skb_tail_pointer(skb) - b;
+ NLA_PUT_U32(skb, TCA_TCINDEX_HASH, p->hash);
+ NLA_PUT_U16(skb, TCA_TCINDEX_MASK, p->mask);
+ NLA_PUT_U32(skb, TCA_TCINDEX_SHIFT, p->shift);
+ NLA_PUT_U32(skb, TCA_TCINDEX_FALL_THROUGH, p->fall_through);
+ nla_nest_end(skb, nest);
} else {
if (p->perfect) {
t->tcm_handle = r-p->perfect;
}
pr_debug("handle = %d\n", t->tcm_handle);
if (r->res.class)
- RTA_PUT(skb, TCA_TCINDEX_CLASSID, 4, &r->res.classid);
+ NLA_PUT_U32(skb, TCA_TCINDEX_CLASSID, r->res.classid);
if (tcf_exts_dump(skb, &r->exts, &tcindex_ext_map) < 0)
- goto rtattr_failure;
- rta->rta_len = skb_tail_pointer(skb) - b;
+ goto nla_put_failure;
+ nla_nest_end(skb, nest);
if (tcf_exts_dump_stats(skb, &r->exts, &tcindex_ext_map) < 0)
- goto rtattr_failure;
+ goto nla_put_failure;
}
return skb->len;
-rtattr_failure:
+nla_put_failure:
nlmsg_trim(skb, b);
return -1;
}