2 "check bpf_perf_event_data->sample_period byte load permitted",
4 BPF_MOV64_IMM(BPF_REG_0
, 0),
5 #if __BYTE_ORDER == __LITTLE_ENDIAN
6 BPF_LDX_MEM(BPF_B
, BPF_REG_0
, BPF_REG_1
,
7 offsetof(struct bpf_perf_event_data
, sample_period
)),
9 BPF_LDX_MEM(BPF_B
, BPF_REG_0
, BPF_REG_1
,
10 offsetof(struct bpf_perf_event_data
, sample_period
) + 7),
15 .prog_type
= BPF_PROG_TYPE_PERF_EVENT
,
18 "check bpf_perf_event_data->sample_period half load permitted",
20 BPF_MOV64_IMM(BPF_REG_0
, 0),
21 #if __BYTE_ORDER == __LITTLE_ENDIAN
22 BPF_LDX_MEM(BPF_H
, BPF_REG_0
, BPF_REG_1
,
23 offsetof(struct bpf_perf_event_data
, sample_period
)),
25 BPF_LDX_MEM(BPF_H
, BPF_REG_0
, BPF_REG_1
,
26 offsetof(struct bpf_perf_event_data
, sample_period
) + 6),
31 .prog_type
= BPF_PROG_TYPE_PERF_EVENT
,
34 "check bpf_perf_event_data->sample_period word load permitted",
36 BPF_MOV64_IMM(BPF_REG_0
, 0),
37 #if __BYTE_ORDER == __LITTLE_ENDIAN
38 BPF_LDX_MEM(BPF_W
, BPF_REG_0
, BPF_REG_1
,
39 offsetof(struct bpf_perf_event_data
, sample_period
)),
41 BPF_LDX_MEM(BPF_W
, BPF_REG_0
, BPF_REG_1
,
42 offsetof(struct bpf_perf_event_data
, sample_period
) + 4),
47 .prog_type
= BPF_PROG_TYPE_PERF_EVENT
,
50 "check bpf_perf_event_data->sample_period dword load permitted",
52 BPF_MOV64_IMM(BPF_REG_0
, 0),
53 BPF_LDX_MEM(BPF_DW
, BPF_REG_0
, BPF_REG_1
,
54 offsetof(struct bpf_perf_event_data
, sample_period
)),
58 .prog_type
= BPF_PROG_TYPE_PERF_EVENT
,