From 873910f0b8162a207267b2eebaf96086f8f1a35c Mon Sep 17 00:00:00 2001 From: Ichthyostega Date: Sun, 28 Sep 2008 04:05:10 +0200 Subject: [PATCH] WIP considerations about querying --- src/common/streamtype.hpp | 18 +++---- src/lib/external/libgavl.hpp | 1 + src/proc/control/stypemanager.hpp | 1 + src/tool/try.cpp | 80 ------------------------------- wiki/renderengine.html | 25 +++++++--- 5 files changed, 30 insertions(+), 95 deletions(-) diff --git a/src/common/streamtype.hpp b/src/common/streamtype.hpp index dfebfca95..28c87bfa4 100644 --- a/src/common/streamtype.hpp +++ b/src/common/streamtype.hpp @@ -70,7 +70,6 @@ namespace lumiera { class ImplConstraint; - MediaKind kind; Prototype const& prototype; ImplFacade * implType; Usage intentionTag; @@ -85,13 +84,14 @@ namespace lumiera { struct StreamType::Prototype { Symbol id; + MediaKind kind; bool subsumes (Prototype const& other) const; - bool canConvert (Prototype const& other) const; + bool canRender (Prototype const& other) const; }; - + /** * A (more or less) concrete implementation type, wired up @@ -106,7 +106,7 @@ namespace lumiera { /** placeholder definition for the contents of a data buffer */ struct DataBuffer { }; - + virtual bool operator== (ImplFacade const& other) const =0; virtual bool operator== (StreamType const& other) const =0; @@ -115,6 +115,7 @@ namespace lumiera { virtual bool canConvert (StreamType const& other) const =0; virtual DataBuffer* createFrame () const =0; + virtual MediaKind getKind() const =0; virtual ~ImplFacade() {}; @@ -144,7 +145,7 @@ namespace lumiera { /** create a default impl type in accordance to this constraint * and use it to create a new framebuffer */ virtual DataBuffer* createFrame () const =0; - + /** similarily create a impl type which complies to this constraint * as well as to the additional constraints (e.g. frame size). * Create a new framebuffer of the resutling type */ @@ -174,10 +175,9 @@ namespace lumiera { libraryID(lID) { } }; - - - - + + + } // namespace lumiera #endif diff --git a/src/lib/external/libgavl.hpp b/src/lib/external/libgavl.hpp index 945aec2b7..d8d7bc412 100644 --- a/src/lib/external/libgavl.hpp +++ b/src/lib/external/libgavl.hpp @@ -66,6 +66,7 @@ namespace lib { virtual bool canConvert (ImplFacade const& other) const; virtual bool canConvert (StreamType const& other) const; + virtual StreamType::MediaKind getKind() const; virtual DataBuffer* createFrame () const; }; diff --git a/src/proc/control/stypemanager.hpp b/src/proc/control/stypemanager.hpp index 48445a427..dcefe74f3 100644 --- a/src/proc/control/stypemanager.hpp +++ b/src/proc/control/stypemanager.hpp @@ -48,6 +48,7 @@ namespace control { typedef StreamType::ImplFacade ImplFacade; + /** (re)-access a media stream type using * just a symbolic ID. Effectively this queries a default */ StreamType const& getType (Symbol sTypeID) ; diff --git a/src/tool/try.cpp b/src/tool/try.cpp index 3f51bcaf6..167946552 100644 --- a/src/tool/try.cpp +++ b/src/tool/try.cpp @@ -20,101 +20,21 @@ using std::string; using std::cout; -using std::ostream; using boost::format; -#include -using boost::enable_if; -#include -using boost::is_base_of; -#include -#include - - -#include "common/meta/generator.hpp" -using lumiera::typelist::NullType; -using lumiera::typelist::Node; -using lumiera::typelist::Types; - - namespace { boost::format fmt ("<%2i>"); - /** constant-wrapper type for debugging purposes, - * usable for generating lists of distinghishable types - */ - template - struct Num - { - enum{ VAL=I }; - static string str () { return boost::str (fmt % I); } - Num() - { - cout << Num::str(); - } - }; - - - - template class _CandidateTemplate_> - class Instantiation - { - template - struct If_possibleArgument : _CandidateTemplate_ - { - typedef void Type; - }; - - public: - - template - struct Test - : boost::false_type {}; - - template - struct Test::Type > - : boost::true_type {}; - - }; - } - struct Boing { typedef boost::true_type is_defined; }; - - template struct Zoing ; - - template<> struct Zoing<2> : Boing { enum{wau = 2}; }; - template<> struct Zoing<5> : Boing { enum{wau = 5}; }; - - typedef char yes_type; - struct no_type - { - char padding[8]; - }; - - template - yes_type check(typename U::is_defined *); - template - no_type check(...); - - int main (int argc, char* argv[]) { NOBUG_INIT; - typedef Zoing<2> Z2; - typedef Zoing<3> Z3; - typedef Zoing<5> Z5; - - cout << sizeof(check(0)) << " / " - << sizeof(check(0)) << " / " - << sizeof(check(0)) ; - - cout << "\ngulp\n"; diff --git a/wiki/renderengine.html b/wiki/renderengine.html index c1681b9f6..d3b2b7c47 100644 --- a/wiki/renderengine.html +++ b/wiki/renderengine.html @@ -3354,7 +3354,7 @@ Consequently, as we can't get away with an fixed Enum of all stream prototypes, NTSC and PAL video, video versus digitized film, HD video versus SD video, 3D versus flat video, cinemascope versus 4:3, stereophonic versus monaural, periphonic versus panoramic sound, Ambisonics versus 5.1, dolby versus linear PCM... -
+
//how to classify and describe media streams//
 Media data is understood to appear structured as stream(s) over time. While there may be an inherent internal structuring, at a given perspective ''any stream is a unit and homogeneous''. In the context of digital media data processing, streams are always ''quantized'', which means they appear as a temporal sequence of data chunks called ''frames''.
 
@@ -3376,8 +3376,8 @@ A stream type is denoted by a StreamTypeID, which is an identifier, acting as an
 !! Classification
 Within the Proc-Layer, media streams are treated largely in a similar manner. But, looking closer, note everything can be connected together, while on the other hand there may be some classes of media streams which can be considered //equivalent// in most respects. Thus, it seems reasonable to separate the distinction between various media streams into several levels
 * Each media belongs to a fundamental ''kind'' of media, examples being __Video__, __Image__, __Audio__, __MIDI__,... Media streams of different kind can be considered somewhat "completely separate" &mdash; just the handling of each of those media kinds follows a common //generic pattern// augmented with specialisations. Basically, it is //impossible to connect// media streams of different kind. Under some circumstances there may be the possibility of a //transformation// though. For example, a still image can be incorporated into video, sound may be visualized, MIDI may control a sound synthesizer.
-* Below the level of distinct kinds of media streams, within every kind we have an open ended collection of ''prototypes'', which, when compared directly may each be quite distinct and different, but which may be //rendered//&nbsp; into each other. For example, we have stereoscopic (3D) video and we have the common flat video lacking depth information, we have several spatial audio systems (Ambisonics, Wave Field Synthesis), we have panorama simulating sound systems (5.1, 7.1,...), we have common stereophonic and monaural audio. It is considered important to retain some openness and configurability within this level of distinction, which means this classification should better be done by rules then by setting up a fixed property table. For example, it may be desirable for some production to distinguish between digitized film and video NTSC and PAL, while in another production everything is just "video" and can be converted automatically. The most noticeable consequence of such a distinction is that any Bus or [[Pipe]] is always limited to a media stream of a single prototype. (&rarr; [[more|StreamPrototype]])
-* Besides the distinction by prototypes, there are the various media ''implementation types''. This classification is not necessarily hierarchically related to the prototype classification, while in practice commonly there will be some sort of dependency. For example, both stereophonic and monaural audio may be implemented as 96kHz 24bit PCM with just a different number of channel streams, as well we may have a dedicated stereo audio stream with two channels multiplexed into a single stream. For dealing with media streams of various implementation type, we need //library// routines, which also yield a //type classification system.// Most notably, for raw sound and video data we use the GAVL library, which defines a classification system for buffers and streams.
+* Below the level of distinct kinds of media streams, within every kind we have an open ended collection of ''prototypes'', which, when compared directly, may each be quite distinct and different, but which may be //rendered//&nbsp; into each other. For example, we have stereoscopic (3D) video and we have the common flat video lacking depth information, we have several spatial audio systems (Ambisonics, Wave Field Synthesis), we have panorama simulating sound systems (5.1, 7.1,...), we have common stereophonic and monaural audio. It is considered important to retain some openness and configurability within this level of distinction, which means this classification should better be done by rules then by setting up a fixed property table. For example, it may be desirable for some production to distinguish between digitized film and video NTSC and PAL, while in another production everything is just "video" and can be converted automatically. The most noticeable consequence of such a distinction is that any Bus or [[Pipe]] is always limited to a media stream of a single prototype. (&rarr; [[more|StreamPrototype]])
+* Besides the distinction by prototypes, there are the various media ''implementation types''. This classification is not necessarily hierarchically related to the prototype classification, while in practice commonly there will be some sort of dependency. For example, both stereophonic and monaural audio may be implemented as 96kHz 24bit PCM with just a different number of channel streams, as well we may have a dedicated stereo audio stream with two channels multiplexed into a single stream. For dealing with media streams of various implementation type, we need //library// routines, which also yield a //type classification system.// Most notably, for raw sound and video data we use the [[GAVL]] library, which defines a classification system for buffers and streams.
 * Besides the type classification detailed thus far, we introduce an ''intention tag''. This is a synthetic classification owned by Lumiera and used for internal wiring decisions. Currently (8/08), we recognize the following intention tags: __Source__, __Raw__, __Intermediary__ and __Target__. Only media streams tagged as __Raw__ can be processed.
 
 !! Media handling requirements involving stream type classification
@@ -3385,7 +3385,9 @@ Within the Proc-Layer, media streams are treated largely in a similar manner. Bu
 * determine if a given media data source and sink can be connected, and how.
 * determine and enumerate the internal structure of a stream.
 * discover processing facilities
-&rarr; see StreamTypeUse
+&rarr; see StreamTypeUse +&rarr; [[querying types|StreamTypeQuery]] +
A description and classification record usable to find out about the properties of a media stream. The stream type descriptor can be accessed using an unique StreamTypeID. The information contained in this descriptor record can intentionally be //incomplete,// in which case the descriptor captures a class of matching media stream types. The following information is maintained:
@@ -3417,13 +3419,24 @@ An implementation constraint can //stand-in// for a completely specified impleme
 //Note:// there is a sort-of "degraded" variant just requiring some &rarr; [[implementation constraint|StreamTypeImplConstraint]] to hold
 
-
+
+
Querying for media stream type information comes in various flavours
+* you may find a structural object (pipe, output, processing patten) associated with / able to deal with a certain stream type
+* you may need a StreamTypeDescriptor for an existing stream given as implementation data
+* you may want to build or complete type information from partial specification.
+Mostly, those queries involve the ConfigRules system in some way or the other. The [[prototype-|StreamPrototype]] and [[implementation type|StreamTypeImplFacade]]-interfaces themselves are mostly a facade for issuing appropriate queries. Some objects (especially [[pipes|Pipe]]) are tied to a certain stream type and thus store a direct link to type information. Others are just associated with a type by virtue of the DefaultsManagement.
+
+
+
Questions regarding the use of StreamType within the Proc-Layer.
-
 * what is the relation between Buffer and Frame?
 * how to get the required size of a Buffer?
 * who does buffer allocations and how?
 
+Mostly, stream types are used for querying, either to decide if they can be connected, or to find usable processing modules.
+Even building a stream type from partial information involves some sort of query.
+&rarr; more on [[media stream type queries|StreamTypeQuery]]
+
 !creating stream types
 seemingly stream types are created based on an already existing media stream (or a Frame of media data?). {{red{really?}}}
 The other use case seems to be that of an //incomplete// stream type based on a [[Prototype|StreamPrototype]]