repository_infos: Enable automatic updates on the main Haiku repostiory.
[haiku.git] / src / apps / cortex / addons / Flanger / FlangerNode.h
blobd9adabf4f714037144dafc3078db83d8bcb3087b
1 /*
2 * Copyright (c) 1999-2000, Eric Moon.
3 * All rights reserved.
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions, and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions, and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
16 * 3. The name of the author may not be used to endorse or promote products
17 * derived from this software without specific prior written permission.
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
20 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
21 * OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
23 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
24 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
25 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
26 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
27 * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
32 // FlangerNode.h
33 // * PURPOSE
34 // - implements a basic audio filter
35 // - eventually abstract -> 'SimpleAudioFilter'?
38 // * HISTORY
39 // e.moon 15jun99 Begun
41 #ifndef __FlangerNode_H__
42 #define __FlangerNode_H__
44 #include <BufferProducer.h>
45 #include <BufferConsumer.h>
46 #include <Controllable.h>
47 #include <MediaEventLooper.h>
49 // forwards
50 class BBufferGroup;
51 class BMediaAddOn;
53 class AudioBuffer;
55 class FlangerNode :
56 public BBufferConsumer,
57 public BBufferProducer,
58 public BControllable,
59 public BMediaEventLooper {
61 public: // *** ctor/dtor
62 virtual ~FlangerNode();
63 FlangerNode(BMediaAddOn* pAddOn=0);
65 public: // *** BMediaNode
67 virtual status_t HandleMessage(
68 int32 code,
69 const void* pData,
70 size_t size);
72 virtual BMediaAddOn* AddOn(
73 int32* poID) const;
75 virtual void SetRunMode(
76 run_mode mode);
78 protected: // *** BMediaEventLooper
80 virtual void HandleEvent(
81 const media_timed_event* pEvent,
82 bigtime_t howLate,
83 bool realTimeEvent=false);
85 protected:
86 // "The Media Server calls this hook function after the node has
87 // been registered. This is derived from BMediaNode; BMediaEventLooper
88 // implements it to call Run() automatically when the node is registered;
89 // if you implement NodeRegistered() you should call through to
90 // BMediaEventLooper::NodeRegistered() after you've done your custom
91 // operations."
93 virtual void NodeRegistered();
95 // "Augment OfflineTime() to compute the node's current time; it's called
96 // by the Media Kit when it's in offline mode. Update any appropriate
97 // internal information as well, then call through to the BMediaEventLooper
98 // implementation."
100 virtual bigtime_t OfflineTime(); //nyi
102 public: // *** BBufferConsumer
104 virtual status_t AcceptFormat(
105 const media_destination& destination,
106 media_format* pioFormat);
108 // "If you're writing a node, and receive a buffer with the B_SMALL_BUFFER
109 // flag set, you must recycle the buffer before returning."
111 virtual void BufferReceived(
112 BBuffer* pBuffer);
114 // * make sure to fill in poInput->format with the contents of
115 // pFormat; as of R4.5 the Media Kit passes poInput->format to
116 // the producer in BBufferProducer::Connect().
118 virtual status_t Connected(
119 const media_source& source,
120 const media_destination& destination,
121 const media_format& format,
122 media_input* poInput);
124 virtual void Disconnected(
125 const media_source& source,
126 const media_destination& destination);
128 virtual void DisposeInputCookie(
129 int32 cookie);
131 // "You should implement this function so your node will know that the data
132 // format is going to change. Note that this may be called in response to
133 // your AcceptFormat() call, if your AcceptFormat() call alters any wildcard
134 // fields in the specified format.
136 // Because FormatChanged() is called by the producer, you don't need to (and
137 // shouldn't) ask it if the new format is acceptable.
139 // If the format change isn't possible, return an appropriate error from
140 // FormatChanged(); this error will be passed back to the producer that
141 // initiated the new format negotiation in the first place."
143 virtual status_t FormatChanged(
144 const media_source& source,
145 const media_destination& destination,
146 int32 changeTag,
147 const media_format& newFormat);
149 virtual status_t GetLatencyFor(
150 const media_destination& destination,
151 bigtime_t* poLatency,
152 media_node_id* poTimeSource);
154 virtual status_t GetNextInput(
155 int32* pioCookie,
156 media_input* poInput);
158 virtual void ProducerDataStatus(
159 const media_destination& destination,
160 int32 status,
161 bigtime_t tpWhen);
163 // "This function is provided to aid in supporting media formats in which the
164 // outer encapsulation layer doesn't supply timing information. Producers will
165 // tag the buffers they generate with seek tags; these tags can be used to
166 // locate key frames in the media data."
168 virtual status_t SeekTagRequested(
169 const media_destination& destination,
170 bigtime_t targetTime,
171 uint32 flags,
172 media_seek_tag* poSeekTag,
173 bigtime_t* poTaggedTime,
174 uint32* poFlags);
176 public: // *** BBufferProducer
178 // "When a consumer calls BBufferConsumer::RequestAdditionalBuffer(), this
179 // function is called as a result. Its job is to call SendBuffer() to
180 // immediately send the next buffer to the consumer. The previousBufferID,
181 // previousTime, and previousTag arguments identify the last buffer the
182 // consumer received. Your node should respond by sending the next buffer
183 // after the one described.
185 // The previousTag may be NULL.
186 // Return B_OK if all is well; otherwise return an appropriate error code."
187 virtual void AdditionalBufferRequested(
188 const media_source& source,
189 media_buffer_id previousBufferID,
190 bigtime_t previousTime,
191 const media_seek_tag* pPreviousTag); //nyi
193 virtual void Connect(
194 status_t status,
195 const media_source& source,
196 const media_destination& destination,
197 const media_format& format,
198 char* pioName); //nyi
200 virtual void Disconnect(
201 const media_source& source,
202 const media_destination& destination); //nyi
204 virtual status_t DisposeOutputCookie(
205 int32 cookie); //nyi
207 virtual void EnableOutput(
208 const media_source& source,
209 bool enabled,
210 int32* _deprecated_); //nyi
212 virtual status_t FormatChangeRequested(
213 const media_source& source,
214 const media_destination& destination,
215 media_format* pioFormat,
216 int32* _deprecated_); //nyi
218 virtual status_t FormatProposal(
219 const media_source& source,
220 media_format* pioFormat); //nyi
222 virtual status_t FormatSuggestionRequested(
223 media_type type,
224 int32 quality,
225 media_format* poFormat); //nyi
227 virtual status_t GetLatency(
228 bigtime_t* poLatency); //nyi
230 virtual status_t GetNextOutput(
231 int32* pioCookie,
232 media_output* poOutput); //nyi
234 // "This hook function is called when a BBufferConsumer that's receiving data
235 // from you determines that its latency has changed. It will call its
236 // BBufferConsumer::SendLatencyChange() function, and in response, the Media
237 // Server will call your LatencyChanged() function. The source argument
238 // indicates your output that's involved in the connection, and destination
239 // specifies the input on the consumer to which the connection is linked.
240 // newLatency is the consumer's new latency. The flags are currently unused."
241 virtual void LatencyChanged(
242 const media_source& source,
243 const media_destination& destination,
244 bigtime_t newLatency,
245 uint32 flags); //nyi
247 virtual void LateNoticeReceived(
248 const media_source& source,
249 bigtime_t howLate,
250 bigtime_t tpWhen); //nyi
252 // PrepareToConnect() is the second stage of format negotiations that happens
253 // inside BMediaRoster::Connect(). At this point, the consumer's AcceptFormat()
254 // method has been called, and that node has potentially changed the proposed
255 // format. It may also have left wildcards in the format. PrepareToConnect()
256 // *must* fully specialize the format before returning!
258 virtual status_t PrepareToConnect(
259 const media_source& source,
260 const media_destination& destination,
261 media_format* pioFormat,
262 media_source* poSource,
263 char* poName); //nyi
265 virtual status_t SetBufferGroup(
266 const media_source& source,
267 BBufferGroup* pGroup); //nyi
269 virtual status_t SetPlayRate(
270 int32 numerator,
271 int32 denominator); //nyi
273 virtual status_t VideoClippingChanged(
274 const media_source& source,
275 int16 numShorts,
276 int16* pClipData,
277 const media_video_display_info& display,
278 int32* poFromChangeTag); //nyi
280 public: // *** BControllable
282 virtual status_t GetParameterValue(
283 int32 id,
284 bigtime_t* poLastChangeTime,
285 void* poValue,
286 size_t* pioSize); //nyi
288 virtual void SetParameterValue(
289 int32 id,
290 bigtime_t changeTime,
291 const void* pValue,
292 size_t size); //nyi
295 protected: // HandleEvent() impl.
296 void handleParameterEvent(
297 const media_timed_event* pEvent);
299 void handleStartEvent(
300 const media_timed_event* pEvent);
302 void handleStopEvent(
303 const media_timed_event* pEvent);
305 void ignoreEvent(
306 const media_timed_event* pEvent);
308 protected: // *** internal operations
310 // figure the preferred format: any fields left as wildcards
311 // are negotiable
312 virtual void getPreferredFormat(
313 media_format& ioFormat);
315 // test the given template format against a proposed format.
316 // specialize wildcards for fields where the template contains
317 // non-wildcard data; write required fields into proposed format
318 // if they mismatch.
319 // Returns B_OK if the proposed format doesn't conflict with the
320 // template, or B_MEDIA_BAD_FORMAT otherwise.
322 status_t validateProposedFormat(
323 const media_format& preferredFormat,
324 media_format& ioProposedFormat);
326 // fill in wildcards in the given format.
327 // (assumes the format passes validateProposedFormat().)
328 void specializeOutputFormat(
329 media_format& ioFormat);
331 // set parameters to their default settings
332 virtual void initParameterValues();
334 // create and register a parameter web
335 virtual void initParameterWeb();
337 // construct delay line if necessary, reset filter state
338 virtual void initFilter();
340 virtual void startFilter();
341 virtual void stopFilter();
343 // figure processing latency by doing 'dry runs' of filterBuffer()
344 virtual bigtime_t calcProcessingLatency();
346 // filter buffer data in place
347 virtual void filterBuffer(
348 BBuffer* pBuffer); //nyi
350 private: // *** connection/format members
352 // The 'template' format
353 // +++++ init in NodeRegistered()
354 media_format m_preferredFormat;
356 // The current input/output format (this filter doesn't do any
357 // on-the-fly conversion.) Any fields that are not wildcards
358 // are mandatory; the first connection (input or output) decides
359 // the node's format. If both input and output are disconnected,
360 // m_format.u.raw_audio should revert to media_raw_audio_format::wildcard.
361 media_format m_format;
363 // Connections & associated state variables
364 media_input m_input;
366 media_output m_output;
367 bool m_outputEnabled;
369 // [16jun99] buffers are generated by the upstream producer; this
370 // node processes them in-place and forwards them downstream.
372 // // The outbound buffer group
373 // BBufferGroup* m_pBufferGroup;
375 // Time required by downstream consumer(s) to properly deliver a buffer
376 bigtime_t m_downstreamLatency;
378 // Worst-case time needed to fill a buffer
379 bigtime_t m_processingLatency;
381 private: // *** filter state
383 // Frames sent since the filter started
384 uint64 m_framesSent;
386 // the buffer
387 AudioBuffer* m_pDelayBuffer;
389 // write position (buffer offset at which the next
390 // incoming frame will be stored)
391 uint32 m_delayWriteFrame;
393 // radial counter (for sweep 'LFO')
394 float m_fTheta;
395 float m_fThetaInc;
397 // sweep LFO state
398 float m_fSweepBase;
399 float m_fSweepFactor;
401 // // position (relative to m_delayWriteFrame) from which
402 // // delayed frames are read. varies between -m_fSweepMax and
403 // // -m_fSweepMin.
404 // float m_fDelayReadOffset;
406 // // rate at which m_fDelayReadOffset currently varies.
407 // // [16jun99: a triangle-shaped sweep for now]
408 // float m_fDelayReadDelta;
410 // maximum delay (buffer length) in milliseconds
411 static const float s_fMaxDelay;
413 private: // *** filter parameter data
415 // ratio of dry-to-processed signal
416 float m_fMixRatio;
417 bigtime_t m_tpMixRatioChanged;
419 // rate of sweep (Hz)
420 float m_fSweepRate;
421 bigtime_t m_tpSweepRateChanged;
423 // minimum delay (low bound of sweep) (ms)
424 float m_fDelay;
425 bigtime_t m_tpDelayChanged;
427 // range of sweep (ms)
428 float m_fDepth;
429 bigtime_t m_tpDepthChanged;
431 // feedback (0.0 - 1.0)
432 float m_fFeedback;
433 bigtime_t m_tpFeedbackChanged;
435 private: // *** add-on stuff
437 // host add-on
438 BMediaAddOn* m_pAddOn;
440 static const char* const s_nodeName;
443 #endif /*__FlangerNode_H__*/