1 // SPDX-License-Identifier: GPL-2.0-or-later
3 // phy-packet-definitions.h - The definitions of phy packet for IEEE 1394.
5 // Copyright (c) 2024 Takashi Sakamoto
7 #ifndef _FIREWIRE_PHY_PACKET_DEFINITIONS_H
8 #define _FIREWIRE_PHY_PACKET_DEFINITIONS_H
10 #define PACKET_IDENTIFIER_MASK 0xc0000000
11 #define PACKET_IDENTIFIER_SHIFT 30
13 static inline unsigned int phy_packet_get_packet_identifier(u32 quadlet
)
15 return (quadlet
& PACKET_IDENTIFIER_MASK
) >> PACKET_IDENTIFIER_SHIFT
;
18 static inline void phy_packet_set_packet_identifier(u32
*quadlet
, unsigned int packet_identifier
)
20 *quadlet
&= ~PACKET_IDENTIFIER_MASK
;
21 *quadlet
|= (packet_identifier
<< PACKET_IDENTIFIER_SHIFT
) & PACKET_IDENTIFIER_MASK
;
24 #define PHY_PACKET_PACKET_IDENTIFIER_PHY_CONFIG 0
26 #define PHY_CONFIG_ROOT_ID_MASK 0x3f000000
27 #define PHY_CONFIG_ROOT_ID_SHIFT 24
28 #define PHY_CONFIG_FORCE_ROOT_NODE_MASK 0x00800000
29 #define PHY_CONFIG_FORCE_ROOT_NODE_SHIFT 23
30 #define PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK 0x00400000
31 #define PHY_CONFIG_GAP_COUNT_OPTIMIZATION_SHIFT 22
32 #define PHY_CONFIG_GAP_COUNT_MASK 0x003f0000
33 #define PHY_CONFIG_GAP_COUNT_SHIFT 16
35 static inline unsigned int phy_packet_phy_config_get_root_id(u32 quadlet
)
37 return (quadlet
& PHY_CONFIG_ROOT_ID_MASK
) >> PHY_CONFIG_ROOT_ID_SHIFT
;
40 static inline void phy_packet_phy_config_set_root_id(u32
*quadlet
, unsigned int root_id
)
42 *quadlet
&= ~PHY_CONFIG_ROOT_ID_MASK
;
43 *quadlet
|= (root_id
<< PHY_CONFIG_ROOT_ID_SHIFT
) & PHY_CONFIG_ROOT_ID_MASK
;
46 static inline bool phy_packet_phy_config_get_force_root_node(u32 quadlet
)
48 return (quadlet
& PHY_CONFIG_FORCE_ROOT_NODE_MASK
) >> PHY_CONFIG_FORCE_ROOT_NODE_SHIFT
;
51 static inline void phy_packet_phy_config_set_force_root_node(u32
*quadlet
, bool has_force_root_node
)
53 *quadlet
&= ~PHY_CONFIG_FORCE_ROOT_NODE_MASK
;
54 *quadlet
|= (has_force_root_node
<< PHY_CONFIG_FORCE_ROOT_NODE_SHIFT
) & PHY_CONFIG_FORCE_ROOT_NODE_MASK
;
57 static inline bool phy_packet_phy_config_get_gap_count_optimization(u32 quadlet
)
59 return (quadlet
& PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK
) >> PHY_CONFIG_GAP_COUNT_OPTIMIZATION_SHIFT
;
62 static inline void phy_packet_phy_config_set_gap_count_optimization(u32
*quadlet
, bool has_gap_count_optimization
)
64 *quadlet
&= ~PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK
;
65 *quadlet
|= (has_gap_count_optimization
<< PHY_CONFIG_GAP_COUNT_OPTIMIZATION_SHIFT
) & PHY_CONFIG_GAP_COUNT_OPTIMIZATION_MASK
;
68 static inline unsigned int phy_packet_phy_config_get_gap_count(u32 quadlet
)
70 return (quadlet
& PHY_CONFIG_GAP_COUNT_MASK
) >> PHY_CONFIG_GAP_COUNT_SHIFT
;
73 static inline void phy_packet_phy_config_set_gap_count(u32
*quadlet
, unsigned int gap_count
)
75 *quadlet
&= ~PHY_CONFIG_GAP_COUNT_MASK
;
76 *quadlet
|= (gap_count
<< PHY_CONFIG_GAP_COUNT_SHIFT
) & PHY_CONFIG_GAP_COUNT_MASK
;
79 #define PHY_PACKET_PACKET_IDENTIFIER_SELF_ID 2
81 #define SELF_ID_PHY_ID_MASK 0x3f000000
82 #define SELF_ID_PHY_ID_SHIFT 24
83 #define SELF_ID_EXTENDED_MASK 0x00800000
84 #define SELF_ID_EXTENDED_SHIFT 23
85 #define SELF_ID_MORE_PACKETS_MASK 0x00000001
86 #define SELF_ID_MORE_PACKETS_SHIFT 0
88 #define SELF_ID_ZERO_LINK_ACTIVE_MASK 0x00400000
89 #define SELF_ID_ZERO_LINK_ACTIVE_SHIFT 22
90 #define SELF_ID_ZERO_GAP_COUNT_MASK 0x003f0000
91 #define SELF_ID_ZERO_GAP_COUNT_SHIFT 16
92 #define SELF_ID_ZERO_SCODE_MASK 0x0000c000
93 #define SELF_ID_ZERO_SCODE_SHIFT 14
94 #define SELF_ID_ZERO_CONTENDER_MASK 0x00000800
95 #define SELF_ID_ZERO_CONTENDER_SHIFT 11
96 #define SELF_ID_ZERO_POWER_CLASS_MASK 0x00000700
97 #define SELF_ID_ZERO_POWER_CLASS_SHIFT 8
98 #define SELF_ID_ZERO_INITIATED_RESET_MASK 0x00000002
99 #define SELF_ID_ZERO_INITIATED_RESET_SHIFT 1
101 #define SELF_ID_EXTENDED_SEQUENCE_MASK 0x00700000
102 #define SELF_ID_EXTENDED_SEQUENCE_SHIFT 20
104 #define SELF_ID_PORT_STATUS_MASK 0x3
106 #define SELF_ID_SEQUENCE_MAXIMUM_QUADLET_COUNT 4
108 static inline unsigned int phy_packet_self_id_get_phy_id(u32 quadlet
)
110 return (quadlet
& SELF_ID_PHY_ID_MASK
) >> SELF_ID_PHY_ID_SHIFT
;
113 static inline void phy_packet_self_id_set_phy_id(u32
*quadlet
, unsigned int phy_id
)
115 *quadlet
&= ~SELF_ID_PHY_ID_MASK
;
116 *quadlet
|= (phy_id
<< SELF_ID_PHY_ID_SHIFT
) & SELF_ID_PHY_ID_MASK
;
119 static inline bool phy_packet_self_id_get_extended(u32 quadlet
)
121 return (quadlet
& SELF_ID_EXTENDED_MASK
) >> SELF_ID_EXTENDED_SHIFT
;
124 static inline void phy_packet_self_id_set_extended(u32
*quadlet
, bool extended
)
126 *quadlet
&= ~SELF_ID_EXTENDED_MASK
;
127 *quadlet
|= (extended
<< SELF_ID_EXTENDED_SHIFT
) & SELF_ID_EXTENDED_MASK
;
130 static inline bool phy_packet_self_id_zero_get_link_active(u32 quadlet
)
132 return (quadlet
& SELF_ID_ZERO_LINK_ACTIVE_MASK
) >> SELF_ID_ZERO_LINK_ACTIVE_SHIFT
;
135 static inline void phy_packet_self_id_zero_set_link_active(u32
*quadlet
, bool is_active
)
137 *quadlet
&= ~SELF_ID_ZERO_LINK_ACTIVE_MASK
;
138 *quadlet
|= (is_active
<< SELF_ID_ZERO_LINK_ACTIVE_SHIFT
) & SELF_ID_ZERO_LINK_ACTIVE_MASK
;
141 static inline unsigned int phy_packet_self_id_zero_get_gap_count(u32 quadlet
)
143 return (quadlet
& SELF_ID_ZERO_GAP_COUNT_MASK
) >> SELF_ID_ZERO_GAP_COUNT_SHIFT
;
146 static inline void phy_packet_self_id_zero_set_gap_count(u32
*quadlet
, unsigned int gap_count
)
148 *quadlet
&= ~SELF_ID_ZERO_GAP_COUNT_MASK
;
149 *quadlet
|= (gap_count
<< SELF_ID_ZERO_GAP_COUNT_SHIFT
) & SELF_ID_ZERO_GAP_COUNT_MASK
;
152 static inline unsigned int phy_packet_self_id_zero_get_scode(u32 quadlet
)
154 return (quadlet
& SELF_ID_ZERO_SCODE_MASK
) >> SELF_ID_ZERO_SCODE_SHIFT
;
157 static inline void phy_packet_self_id_zero_set_scode(u32
*quadlet
, unsigned int speed
)
159 *quadlet
&= ~SELF_ID_ZERO_SCODE_MASK
;
160 *quadlet
|= (speed
<< SELF_ID_ZERO_SCODE_SHIFT
) & SELF_ID_ZERO_SCODE_MASK
;
163 static inline bool phy_packet_self_id_zero_get_contender(u32 quadlet
)
165 return (quadlet
& SELF_ID_ZERO_CONTENDER_MASK
) >> SELF_ID_ZERO_CONTENDER_SHIFT
;
168 static inline void phy_packet_self_id_zero_set_contender(u32
*quadlet
, bool is_contender
)
170 *quadlet
&= ~SELF_ID_ZERO_CONTENDER_MASK
;
171 *quadlet
|= (is_contender
<< SELF_ID_ZERO_CONTENDER_SHIFT
) & SELF_ID_ZERO_CONTENDER_MASK
;
174 static inline unsigned int phy_packet_self_id_zero_get_power_class(u32 quadlet
)
176 return (quadlet
& SELF_ID_ZERO_POWER_CLASS_MASK
) >> SELF_ID_ZERO_POWER_CLASS_SHIFT
;
179 static inline void phy_packet_self_id_zero_set_power_class(u32
*quadlet
, unsigned int power_class
)
181 *quadlet
&= ~SELF_ID_ZERO_POWER_CLASS_MASK
;
182 *quadlet
|= (power_class
<< SELF_ID_ZERO_POWER_CLASS_SHIFT
) & SELF_ID_ZERO_POWER_CLASS_MASK
;
185 static inline bool phy_packet_self_id_zero_get_initiated_reset(u32 quadlet
)
187 return (quadlet
& SELF_ID_ZERO_INITIATED_RESET_MASK
) >> SELF_ID_ZERO_INITIATED_RESET_SHIFT
;
190 static inline void phy_packet_self_id_zero_set_initiated_reset(u32
*quadlet
, bool is_initiated_reset
)
192 *quadlet
&= ~SELF_ID_ZERO_INITIATED_RESET_MASK
;
193 *quadlet
|= (is_initiated_reset
<< SELF_ID_ZERO_INITIATED_RESET_SHIFT
) & SELF_ID_ZERO_INITIATED_RESET_MASK
;
196 static inline bool phy_packet_self_id_get_more_packets(u32 quadlet
)
198 return (quadlet
& SELF_ID_MORE_PACKETS_MASK
) >> SELF_ID_MORE_PACKETS_SHIFT
;
201 static inline void phy_packet_self_id_set_more_packets(u32
*quadlet
, bool is_more_packets
)
203 *quadlet
&= ~SELF_ID_MORE_PACKETS_MASK
;
204 *quadlet
|= (is_more_packets
<< SELF_ID_MORE_PACKETS_SHIFT
) & SELF_ID_MORE_PACKETS_MASK
;
207 static inline unsigned int phy_packet_self_id_extended_get_sequence(u32 quadlet
)
209 return (quadlet
& SELF_ID_EXTENDED_SEQUENCE_MASK
) >> SELF_ID_EXTENDED_SEQUENCE_SHIFT
;
212 static inline void phy_packet_self_id_extended_set_sequence(u32
*quadlet
, unsigned int sequence
)
214 *quadlet
&= ~SELF_ID_EXTENDED_SEQUENCE_MASK
;
215 *quadlet
|= (sequence
<< SELF_ID_EXTENDED_SHIFT
) & SELF_ID_EXTENDED_SEQUENCE_MASK
;
218 struct self_id_sequence_enumerator
{
220 unsigned int quadlet_count
;
223 static inline const u32
*self_id_sequence_enumerator_next(
224 struct self_id_sequence_enumerator
*enumerator
, unsigned int *quadlet_count
)
226 const u32
*self_id_sequence
, *cursor
;
229 unsigned int sequence
;
231 if (enumerator
->cursor
== NULL
|| enumerator
->quadlet_count
== 0)
232 return ERR_PTR(-ENODATA
);
233 cursor
= enumerator
->cursor
;
238 while (phy_packet_self_id_get_more_packets(quadlet
)) {
239 if (count
>= enumerator
->quadlet_count
||
240 count
>= SELF_ID_SEQUENCE_MAXIMUM_QUADLET_COUNT
)
241 return ERR_PTR(-EPROTO
);
246 if (!phy_packet_self_id_get_extended(quadlet
) ||
247 sequence
!= phy_packet_self_id_extended_get_sequence(quadlet
))
248 return ERR_PTR(-EPROTO
);
252 *quadlet_count
= count
;
253 self_id_sequence
= enumerator
->cursor
;
255 enumerator
->cursor
+= count
;
256 enumerator
->quadlet_count
-= count
;
258 return self_id_sequence
;
261 enum phy_packet_self_id_port_status
{
262 PHY_PACKET_SELF_ID_PORT_STATUS_NONE
= 0,
263 PHY_PACKET_SELF_ID_PORT_STATUS_NCONN
= 1,
264 PHY_PACKET_SELF_ID_PORT_STATUS_PARENT
= 2,
265 PHY_PACKET_SELF_ID_PORT_STATUS_CHILD
= 3,
268 static inline unsigned int self_id_sequence_get_port_capacity(unsigned int quadlet_count
)
270 return quadlet_count
* 8 - 5;
273 static inline enum phy_packet_self_id_port_status
self_id_sequence_get_port_status(
274 const u32
*self_id_sequence
, unsigned int quadlet_count
, unsigned int port_index
)
276 unsigned int index
, shift
;
278 index
= (port_index
+ 5) / 8;
279 shift
= 16 - ((port_index
+ 5) % 8) * 2;
281 if (index
< quadlet_count
&& index
< SELF_ID_SEQUENCE_MAXIMUM_QUADLET_COUNT
)
282 return (self_id_sequence
[index
] >> shift
) & SELF_ID_PORT_STATUS_MASK
;
284 return PHY_PACKET_SELF_ID_PORT_STATUS_NONE
;
287 static inline void self_id_sequence_set_port_status(u32
*self_id_sequence
, unsigned int quadlet_count
,
288 unsigned int port_index
,
289 enum phy_packet_self_id_port_status status
)
291 unsigned int index
, shift
;
293 index
= (port_index
+ 5) / 8;
294 shift
= 16 - ((port_index
+ 5) % 8) * 2;
296 if (index
< quadlet_count
) {
297 self_id_sequence
[index
] &= ~(SELF_ID_PORT_STATUS_MASK
<< shift
);
298 self_id_sequence
[index
] |= status
<< shift
;
302 #endif // _FIREWIRE_PHY_PACKET_DEFINITIONS_H