1.matlab BP神经网络的训练算法中训练函数(traingdm 、trainlm、横盘放量指标源码trainbr)的免费qq网站源码实现过程及相应的VC源代码
2.å¦ä½è®¾è®¡èªå®ä¹çtransform filter
matlab BP神经网络的训练算法中训练函数(traingdm 、trainlm、聊天系统exe源码trainbr)的轻博客系统源码实现过程及相应的VC源代码
VC源代码?你很搞笑嘛。。乐彩源码教程
给你trainlm的m码
function [out1,out2] = trainlm(varargin)
%TRAINLM Levenberg-Marquardt backpropagation.
%
% <a href="matlab:doc trainlm">trainlm</a> is a network training function that updates weight and
% bias states according to Levenberg-Marquardt optimization.
%
% <a href="matlab:doc trainlm">trainlm</a> is often the fastest backpropagation algorithm in the toolbox,
% and is highly recommended as a first choice supervised algorithm,
% although it does require more memory than other algorithms.
%
% [NET,TR] = <a href="matlab:doc trainlm">trainlm</a>(NET,X,T) takes a network NET, input data X
% and target data T and returns the network after training it, and a
% a training record TR.
%
% [NET,TR] = <a href="matlab:doc trainlm">trainlm</a>(NET,X,T,Xi,Ai,EW) takes additional optional
% arguments suitable for training dynamic networks and training with
% error weights. Xi and Ai are the initial input and layer delays states
% respectively and EW defines error weights used to indicate
% the relative importance of each target value.
%
% Training occurs according to training parameters, with default values.
% Any or all of these can be overridden with parameter name/value argument
% pairs appended to the input argument list, or by appending a structure
% argument with fields having one or more of these names.
% show Epochs between displays
% showCommandLine 0 generate command line output
% showWindow 1 show training GUI
% epochs Maximum number of epochs to train
% goal 0 Performance goal
% max_fail 5 Maximum validation failures
% min_grad 1e- Minimum performance gradient
% mu 0. Initial Mu
% mu_dec 0.1 Mu decrease factor
% mu_inc Mu increase factor
% mu_max 1e Maximum Mu
% time inf Maximum time to train in seconds
%
% To make this the default training function for a network, and view
% and/or change parameter settings, use these two properties:
%
% net.<a href="matlab:doc nnproperty.net_trainFcn">trainFcn</a> = 'trainlm';
% net.<a href="matlab:doc nnproperty.net_trainParam">trainParam</a>
%
% See also trainscg, feedforwardnet, narxnet.
% Mark Beale, --, ODJ //
% Updated by Orlando De Jes鷖, Martin Hagan, Dynamic Training 7--
% Copyright - The MathWorks, Inc.
% $Revision: 1.1.6..2.2 $ $Date: // :: $
%% =======================================================
% BOILERPLATE_START
% This code is the same for all Training Functions.
persistent INFO;
if isempty(INFO), INFO = get_info; end
nnassert.minargs(nargin,1);
in1 = varargin{ 1};
if ischar(in1)
switch (in1)
case 'info'
out1 = INFO;
case 'check_param'
nnassert.minargs(nargin,2);
param = varargin{ 2};
err = nntest.param(INFO.parameters,param);
if isempty(err)
err = check_param(param);
end
if nargout > 0
out1 = err;
elseif ~isempty(err)
nnerr.throw('Type',err);
end
otherwise,
try
out1 = eval(['INFO.' in1]);
catch me, nnerr.throw(['Unrecognized first argument: ''' in1 ''''])
end
end
return
end
nnassert.minargs(nargin,2);
net = nn.hints(nntype.network('format',in1,'NET'));
oldTrainFcn = net.trainFcn;
oldTrainParam = net.trainParam;
if ~strcmp(net.trainFcn,mfilename)
net.trainFcn = mfilename;
net.trainParam = INFO.defaultParam;
end
[args,param] = nnparam.extract_param(varargin(2:end),net.trainParam);
err = nntest.param(INFO.parameters,param);
if ~isempty(err), nnerr.throw(nnerr.value(err,'NET.trainParam')); end
if INFO.isSupervised && isempty(net.performFcn) % TODO - fill in MSE
nnerr.throw('Training function is supervised but NET.performFcn is undefined.');
end
if INFO.usesGradient && isempty(net.derivFcn) % TODO - fill in
nnerr.throw('Training function uses derivatives but NET.derivFcn is undefined.');
end
if net.hint.zeroDelay, nnerr.throw('NET contains a zero-delay loop.'); end
[X,T,Xi,Ai,EW] = nnmisc.defaults(args,{ },{ },{ },{ },{ 1});
X = nntype.data('format',X,'Inputs X');
T = nntype.data('format',T,'Targets T');
Xi = nntype.data('format',Xi,'Input states Xi');
Ai = nntype.data('format',Ai,'Layer states Ai');
EW = nntype.nndata_pos('format',EW,'Error weights EW');
% Prepare Data
[net,data,tr,~,err] = nntraining.setup(net,mfilename,X,Xi,Ai,T,EW);
if ~isempty(err), nnerr.throw('Args',err), end
% Train
net = struct(net);
fcns = nn.subfcns(net);
[net,tr] = train_network(net,tr,data,fcns,param);
tr = nntraining.tr_clip(tr);
if isfield(tr,'perf')
tr.best_perf = tr.perf(tr.best_epoch+1);
end
if isfield(tr,'vperf')
tr.best_vperf = tr.vperf(tr.best_epoch+1);
end
if isfield(tr,'tperf')
tr.best_tperf = tr.tperf(tr.best_epoch+1);
end
net.trainFcn = oldTrainFcn;
net.trainParam = oldTrainParam;
out1 = network(net);
out2 = tr;
end
% BOILERPLATE_END
%% =======================================================
% TODO - MU => MU_START
% TODO - alternate parameter names (i.e. MU for MU_START)
function info = get_info()
info = nnfcnTraining(mfilename,'Levenberg-Marquardt',7.0,true,true,...
[ ...
nnetParamInfo('showWindow','Show Training Window Feedback','nntype.bool_scalar',true,...
'Display training window during training.'), ...
nnetParamInfo('showCommandLine','Show Command Line Feedback','nntype.bool_scalar',false,...
'Generate command line output during training.'), ...
nnetParamInfo('show','Command Line Frequency','nntype.strict_pos_int_inf_scalar',,...
'Frequency to update command line.'), ...
...
nnetParamInfo('epochs','Maximum Epochs','nntype.pos_int_scalar',,...
'Maximum number of training iterations before training is stopped.'), ...
nnetParamInfo('time','Maximum Training Time','nntype.pos_inf_scalar',inf,...
'Maximum time in seconds before training is stopped.'), ...
...
nnetParamInfo('goal','Performance Goal','nntype.pos_scalar',0,...
'Performance goal.'), ...
nnetParamInfo('min_grad','Minimum Gradient','nntype.pos_scalar',1e-5,...
'Minimum performance gradient before training is stopped.'), ...
nnetParamInfo('max_fail','Maximum Validation Checks','nntype.strict_pos_int_scalar',6,...
'Maximum number of validation checks before training is stopped.'), ...
...
nnetParamInfo('mu','Mu','nntype.pos_scalar',0.,...
'Mu.'), ...
nnetParamInfo('mu_dec','Mu Decrease Ratio','nntype.real_0_to_1',0.1,...
'Ratio to decrease mu.'), ...
nnetParamInfo('mu_inc','Mu Increase Ratio','nntype.over1',,...
'Ratio to increase mu.'), ...
nnetParamInfo('mu_max','Maximum mu','nntype.strict_pos_scalar',1e,...
'Maximum mu before training is stopped.'), ...
], ...
[ ...
nntraining.state_info('gradient','Gradient','continuous','log') ...
nntraining.state_info('mu','Mu','continuous','log') ...
nntraining.state_info('val_fail','Validation Checks','discrete','linear') ...
]);
end
function err = check_param(param)
err = '';
end
function [net,tr] = train_network(net,tr,data,fcns,param)
% Checks
if isempty(net.performFcn)
warning('nnet:trainlm:Performance',nnwarning.empty_performfcn_corrected);
net.performFcn = 'mse';
net.performParam = mse('defaultParam');
tr.performFcn = net.performFcn;
tr.performParam = net.performParam;
end
if isempty(strmatch(net.performFcn,{ 'sse','mse'},'exact'))
warning('nnet:trainlm:Performance',nnwarning.nonjacobian_performfcn_replaced);
net.performFcn = 'mse';
net.performParam = mse('defaultParam');
tr.performFcn = net.performFcn;
tr.performParam = net.performParam;
end
% Initialize
startTime = clock;
original_net = net;
[perf,vperf,tperf,je,jj,gradient] = nntraining.perfs_jejj(net,data,fcns);
[best,val_fail] = nntraining.validation_start(net,perf,vperf);
WB = getwb(net);
lengthWB = length(WB);
ii = sparse(1:lengthWB,1:lengthWB,ones(1,lengthWB));
mu = param.mu;
% Training Record
tr.best_epoch = 0;
tr.goal = param.goal;
tr.states = { 'epoch','time','perf','vperf','tperf','mu','gradient','val_fail'};
% Status
status = ...
[ ...
nntraining.status('Epoch','iterations','linear','discrete',0,param.epochs,0), ...
nntraining.status('Time','seconds','linear','discrete',0,param.time,0), ...
nntraining.status('Performance','','log','continuous',perf,param.goal,perf) ...
nntraining.status('Gradient','','log','continuous',gradient,param.min_grad,gradient) ...
nntraining.status('Mu','','log','continuous',mu,param.mu_max,mu) ...
nntraining.status('Validation Checks','','linear','discrete',0,param.max_fail,0) ...
];
nn_train_feedback('start',net,status);
% Train
for epoch = 0:param.epochs
% Stopping Criteria
current_time = etime(clock,startTime);
[userStop,userCancel] = nntraintool('check');
if userStop, tr.stop = 'User stop.'; net = best.net;
elseif userCancel, tr.stop = 'User cancel.'; net = original_net;
elseif (perf <= param.goal), tr.stop = 'Performance goal met.'; net = best.net;
elseif (epoch == param.epochs), tr.stop = 'Maximum epoch reached.'; net = best.net;
elseif (current_time >= param.time), tr.stop = 'Maximum time elapsed.'; net = best.net;
elseif (gradient <= param.min_grad), tr.stop = 'Minimum gradient reached.'; net = best.net;
elseif (mu >= param.mu_max), tr.stop = 'Maximum MU reached.'; net = best.net;
elseif (val_fail >= param.max_fail), tr.stop = 'Validation stop.'; net = best.net;
end
% Feedback
tr = nntraining.tr_update(tr,[epoch current_time perf vperf tperf mu gradient val_fail]);
nn_train_feedback('update',net,status,tr,data, ...
[epoch,current_time,best.perf,gradient,mu,val_fail]);
% Stop
if ~isempty(tr.stop), break, end
% Levenberg Marquardt
while (mu <= param.mu_max)
% CHECK FOR SINGULAR MATRIX
[msgstr,msgid] = lastwarn;
lastwarn('MATLAB:nothing','MATLAB:nothing')
warnstate = warning('off','all');
dWB = -(jj+ii*mu) \ je;
[~,msgid1] = lastwarn;
flag_inv = isequal(msgid1,'MATLAB:nothing');
if flag_inv, lastwarn(msgstr,msgid); end;
warning(warnstate)
WB2 = WB + dWB;
net2 = setwb(net,WB2);
perf2 = nntraining.train_perf(net2,data,fcns);
% TODO - possible speed enhancement
% - retain intermediate variables for Memory Reduction = 1
if (perf2 < perf) && flag_inv
WB = WB2; net = net2;
mu = max(mu*param.mu_dec,1e-);
break
end
mu = mu * param.mu_inc;
end
% Validation
[perf,vperf,tperf,je,jj,gradient] = nntraining.perfs_jejj(net,data,fcns);
[best,tr,val_fail] = nntraining.validation(best,tr,val_fail,net,perf,vperf,epoch);
end
end
å¦ä½è®¾è®¡èªå®ä¹çtransform filter
对äºDIrectShowçåå¦è èè¨ï¼æ大çå°é¾è«è¿äºå°è¯è®¾è®¡èªå®ä¹çfilterã
设计èªå®ä¹çtransform filteræ¯å°é¾ç
å 为 é¦å filteræ¯ä¸ç§dll ï¼åç¼å为.axï¼èç¼ådllå·¥ç¨éè¦ä¸å®çVCåºç¡ æ以建议å è¡¥å ä¸ç¹dllçç¥è¯
å ¶æ¬¡ dllç注åï¼GUIDççæåå·¥ç¨çé ç½®é½å¾éº»ç¦ã
å次 ç½ä¸ç¼ºä¹ç°æçtransform filterçä¾åãDirectShowç»çæºç æ¯å¦NULLINPLACE åCONTRASTé½å¤ªå¤æï¼é½å¸¦æ对è¯æ¡åå±æ§é¡µï¼ä¸éååå¦è ï¼èä¸è¿äºä¾å 没æä¸ä¸ªæ¶åå°å¾åæ ¼å¼ç转æ¢ï¼ètransform filteræ大çå ¬ç¨å°±æ¯åªä½ç±»åç转æ¢ï¼å æ¤è¿äºä¾åä¸éç¨
ä½ä¸ºä¸ä¸ªåå¦è ï¼æ深深åå°è¿äºé®é¢çå°æ°ï¼ç»è¿å»è¦é»ç ç»äºèµ°åºäºè¿ä¸ªæ³¥æ½ï¼è±ç¶å¼æãäºæ¯æå®è®°å½ä¸æ¥ï¼å¸æå¯ä»¥å¯¹å ¶ä»äººæ帮å©ï¼ä¹ä½ä¸ºå¯¹å¹´çä¸ä¸ªå°ç»ã
æçä¾åæ¯ è®¾è®¡ä¸ä¸ª transform filter æ YUY2 bit çåªä½è½¬å为RGB bitçç±»åã
åå æ¯æçæå头åªæ¯æYUY2 bitè¿ç§æ ¼å¼ï¼ ææ³å¾å°ä½å¾ãã顺便å¦ä¹ ä¸ä¸Filterç设计
以ä¸ä¸ºå ·ä½æ¥éª¤ï¼
ä¸ é ç½®å¼åç¯å¢
1. VCä¸å¨Tools->Options->Directories 设置好DirectX SDKç头æ件ååºæ件路å¾
2. ç¼è¯äºåºç±»æºç ,çæstrmbasd.lib (debugç), strmbase.lib(releaseç)
3. VCå导æ°å»ºä¸ä¸ªwin DLLï¼emptyï¼å·¥ç¨
4. Setting->Link->Output file name: YUV2RGBfilter.ax
5. Setting->Linkå å ¥strmbasd.lib winmm.lib quartz.lib vfw.lib (注æè·¯å¾)
6. å®ä¹ä¸ä¸ªåå.defæ件ï¼å å ¥å°å·¥ç¨ï¼å 容å¦ä¸ï¼
LIBRARY YUV2RGBfilter.ax
EXPORTS
DllMain PRIVATE
DllGetClassObject PRIVATE
DllCanUnloadNow PRIVATE
DllRegisterServer PRIVATE
DllUnregisterServer PRIVATE
7.建ç«ä¸ä¸ªç±» YUV2RGBfilter 建ç«ä»çcppæ件åhæ件
8. å¨YUV2RGBfilter.cppä¸å®ä¹DLLçå ¥å£å½æ°å注å æ¾å¨cppæ件çæå
//
// DllEntryPoint
//
extern "C" BOOL WINAPI DllEntryPoint(HINSTANCE, ULONG, LPVOID);
BOOL APIENTRY DllMain(HANDLE hModule,
DWORD dwReason,
LPVOID lpReserved)
{
return DllEntryPoint((HINSTANCE)(hModule), dwReason, lpReserved);
}
////////////////////////////////////////////////////////////////////////
//
// Exported entry points for registration and unregistration
// (in this case they only call through to default implementations).
//
////////////////////////////////////////////////////////////////////////
STDAPI DllRegisterServer()
{
return AMovieDllRegisterServer2( TRUE );
}
STDAPI DllUnregisterServer()
{
return AMovieDllRegisterServer2( FALSE );
}
9. cppæ件ä¸è¦å å«ç头æ件
#include <streams.h>
#include <windows.h>
#include <initguid.h>
#include <olectl.h>
#if ( > _MSC_VER)
#include <olectlid.h>
#endif
#include "Y2Ruids.h" // our own public guids
#include "YUV2RGBfilter.h"
äº å¼åFilter
1. çæGUID( å½ä»¤è¡æ¨¡å¼ä¸è¿è¡guidgenå·¥å ·) 为ä»å»ºç«ä¸ä¸ªæ件Y2Ruids.h åç¬å¼ç¨
#include <initguid.h>
// YUV2toRGB Filter Object
// { FFC8FD-B1A6-b0-A-D6EDEAFDA}
DEFINE_GUID(CLSID_YUV2toRGB,
0xffc8fd, 0xb1a6, 0xb0, 0xa3, 0x8, 0xd6, 0xed, 0xea, 0xf4, 0x5, 0xda);
2. æé CYUV2RGBfilterç±» 继æ¿èªCTransformFilter åå¨TransformFilter.hä¸
// ----------------------------------------------------------------------------
// Class definitions of CYUV2RGBfilter
// ----------------------------------------------------------------------------
//
//
class CYUV2RGBfilter : public CTransformFilter
{
public:
static CUnknown * WINAPI CreateInstance(LPUNKNOWN punk, HRESULT *phr);
STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void ** ppv);
DECLARE_IUNKNOWN;
// override pure virtual function
HRESULT CheckInputType(const CMediaType *mtIn);
HRESULT CheckTransform(const CMediaType *mtIn, const CMediaType *mtOut);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProp);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT Transform(IMediaSample *pIn, IMediaSample *pOut);
private:
//Constructor
CYUV2RGBfilter(TCHAR *tszName, LPUNKNOWN punk, HRESULT *phr);
// member function
VOID ChangeFormat(AM_MEDIA_TYPE* pAdjustedType);
DWORD ConvertYUV2toRGB(BYTE* yuv, BYTE* rgb, DWORD dsize);
// member variable
const long m_lBufferRequest;
CCritSec m_Y2RLock; // To serialise access.
};
3. ææ ¼å¼æ¹åæé å½æ°
//
// CNullInPlace::Constructor
//
CYUV2RGBfilter::CYUV2RGBfilter(TCHAR *tszName,LPUNKNOWN punk,HRESULT *phr) :
CTransformFilter(tszName, punk, CLSID_YUV2toRGB),
m_lBufferRequest(1)
{
ASSERT(tszName);
ASSERT(phr);
} // CYUV2RGBfilter
4. æ¹åCTransformFilteräºä¸ªçº¯èå½æ°ï¼æéè¦çå°æ¹ï¼
HRESULT CheckInputType(const CMediaType *mtIn);
HRESULT CheckTransform(const CMediaType *mtIn, const CMediaType *mtOut);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProp);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT Transform(IMediaSample *pIn, IMediaSample *pOut);
5. 设计èªå·±çç§æå½æ° å®æä¸å®çåè½
6. 注åFilterä¿¡æ¯
// 注åä¿¡æ¯
//setup data
const AMOVIESETUP_MEDIATYPE
sudPinTypes = { &MEDIATYPE_Video // clsMajorType
, &MEDIASUBTYPE_NULL } ; // clsMinorType
const AMOVIESETUP_PIN
psudPins[] = { { L"Input" // strName
, FALSE // bRendered
, FALSE // bOutput
, FALSE // bZero
, FALSE // bMany
, &CLSID_NULL // clsConnectsToFilter
, L"Output" // strConnectsToPin
, 1 // nTypes
, &sudPinTypes } // lpTypes
, { L"Output" // strName
, FALSE // bRendered
, TRUE // bOutput
, FALSE // bZero
, FALSE // bMany
, &CLSID_NULL // clsConnectsToFilter
, L"Input" // strConnectsToPin
, 1 // nTypes
, &sudPinTypes } }; // lpTypes
const AMOVIESETUP_FILTER
sudYUV2RGB = { &CLSID_YUV2toRGB // clsID
, L"YUV2RGB" // strName
, MERIT_DO_NOT_USE // dwMerit
, 2 // nPins
, psudPins }; // lpPin
//
// Needed for the CreateInstance mechanism
//
CFactoryTemplate g_Templates[1]=
{ { L"YUV2RGB"
, &CLSID_YUV2toRGB
, CYUV2RGBfilter::CreateInstance
, NULL
, &sudYUV2RGB }
};
int g_cTemplates = sizeof(g_Templates)/sizeof(g_Templates[0]);
ç¼è¯æååçæGrayFilter.ax
å½ä»¤è¡è¿è¡regsvr GrayFilter.ax注åå³å¯ ä¸ç¨åå¤æ³¨åï¼åªç¨æ³¨åä¸æ¬¡ï¼å¦è¥ä¿®æ¹åªéå°éæ°ç¼è¯ç.axè¦çåæ¥çå°±è¡äº
è°è¯æ好å¨graphEditä¸ç»è¡ æ¯è¾æ¹ä¾¿ã
以ä¸å°±æ¯è®¾è®¡ä¸ä¸ªfilterçæ»ä½æ¥éª¤ã
ä¸ ä¸é¢å°±å ³é®ç¹ äºä¸ªéè½½ç纯èå½æ°å详ç»ä»ç»ã è¿ææ¯æå ³é®çå°æ¹ã
HRESULT CheckInputType(const CMediaType *mtIn);
HRESULT CheckTransform(const CMediaType *mtIn, const CMediaType *mtOut);
HRESULT DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProp);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
HRESULT Transform(IMediaSample *pIn, IMediaSample *pOut);
è¿äºä¸ªå½æ°å ¨é¨æ¯é½çº¯èå½æ° ï¼æ¯CTransformFilter为æ们æä¾çæ¥å£ï¼å¿ é¡»éè½½ä»ä»¬æè½å®ä¾åã
åå¦è æ大çå°æ°è«è¿äºï¼æ¯è°è°ç¨äºè¿äºå½æ°ãè¿äºå½æ°è°ç¨çæ¶åå®åæ¯ä»åªæ¥çãæä¸å¼å§å°±è¢«è¿äºé®é¢å°æ°ãå ¶å®DXç帮å©ææ¡£é就讲çå¾æ¸ æ¥äºåªæ¯æä¸å¼å§æ²¡è®¤ççï¼
CheckInputTypeæ¯ç±tranformfiltrçè¾å ¥pinè°ç¨çç¨æ¥æ£æ¥æ¬Filterçè¾å ¥åªä½æ¯å¦åæ³ï¼
CheckTransformæ¯ç±tranformfiltrçè¾åºpinè°ç¨çç¨æ¥æ£æ¥æ¬filterçè¾åºæ¯å¦ååæ³ï¼
GetMediaTypeæ¯æç±tranformfiltrçè¾åºpinè°ç¨çç¨æ¥è·å该è¾åºç«¯å£æ¯æçåªä½æ ¼å¼ä¾ä¸æ¸¸filterçæ举
DecideBufferSizeæ¯ç±tranformfiltrçè¾åºpinè°ç¨çæ¥ç¡®å®bufferçæ°éå大å°
ä¸æ¸¸filteréè¿è°ç¨filterä¸è¾å ¥pinä¸çIMemInputPin::Receiveæ¹æ³ï¼å°sampleä¼ éå°filterï¼filterè°ç¨CTransformFilter::Transformæ¹æ³æ¥å¤çæ°æ®
æ´ä¸ªè¿ç¨å°±æ¯
è¾å ¥pinè°ç¨CheckInputTypeæ¥çéä¸æ¸¸è¿æ¥çåªä½ç±»åï¼å¦æå¯ä»¥æ¥å å°±æè¾åºpinéGetMediaTypeæ¥æ举è¾åºåªä½ç±»åï¼è¿ä¸æ¥éè¿è¾åºpinçCheckTransformæ¥æ¾å°ä¸è¾å ¥åªä½ç±»åç¸èåçè¾åºåª ä½ç±»å并éä¸ãå¨éè¿DecideBufferSizeç¡®å®è¾åºbufferçå±æ§ï¼ææçæ£æ¥åçééè¿ä»¥åå°±å¯ä»¥è¿æ¥äºï¼ 并éè¿tranform å°è¾å ¥pinä¸çsample ä¼ ä¸ªè¾åºpinè¾åºåªä½çç±»åæ¯ç±GetMediaTypeæ¥ç¡®å®çï¼ åªè¦åªä½ç±»å对åºäºå°±å¯ä»¥æåè¿æ¥ä½æ¯æ°æ®çä¼ éè¿æ¯è¦éè¿transformæ¥å®ç°ãç论ä¸å¯¹äºæ²¡æå缩çè§é¢ï¼ ä¸ä¸ªsampleå°±æ¯ä¸å¸§çæ°æ®ï¼å¯ä»¥ç²¾ç¡®çéåå¤çã
è¦å®ç°è¾åºpinä¸åªä½æ ¼å¼ç转å å°±å¿ é¡»å¨å¨GetMediaTypeå½æ°ä¸ä¿®æ¹æ°çåªä½æ ¼å¼ï¼ç¶åå¨checkTransformä¸ç¡®è®¤ è¾åºçåªä½æ ¼å¼æ¯ä¸æ¯ææçè¾åºãä¾å¦ è¦å°YUY2 bitçåªä½æ ¼å¼æ¹ä¸ºRGB8 8bitçåªä½æ ¼å¼ å°±è¦åå¦ä¸ä¿®æ¹ï¼
å¨GetMediaTypeä¸
CheckPointer(pMediaType,E_POINTER);
VIDEOINFO vih;
memset(&vih, 0, sizeof(vih));
vih.bmiHeader.biCompression = 0;
vih.bmiHeader.biBitCount = 8;
vih.bmiHeader.biSize = ;
vih.bmiHeader.biWidth = ;
vih.bmiHeader.biHeight = ;
vih.bmiHeader.biPlanes = 1;
vih.bmiHeader.biSizeImage = ;
vih.bmiHeader.biClrImportant = 0;
vih.bmiHeader.biClrUsed = ;
//alter the pallete
for (UINT i=0; i<; i++)
{
vih.bmiColors[i].rgbBlue=(BYTE)i;
vih.bmiColors[i].rgbRed=(BYTE)i;
vih.bmiColors[i].rgbGreen=(BYTE)i;
vih.bmiColors[i].rgbReserved=(BYTE)0;
}
pMediaType->SetType(&MEDIATYPE_Video);
pMediaType->SetFormatType(&FORMAT_VideoInfo);
pMediaType->SetFormat((BYTE*)&vih, sizeof(vih));
pMediaType->SetSubtype(&MEDIASUBTYPE_RGB8);
pMediaType->SetSampleSize();
return NOERROR;
ç¶åå¨checkTransformä¸ç¡®è®¤æ¯å¦æ¯ææçè¾åº
BITMAPINFOHEADER *pNewType = HEADER(mtOut->Format());
if ((pNewType->biPlanes==1)
&&(pNewType->biBitCount==8)
&&(pNewType->biWidth==)
&&(pNewType->biHeight==)
&&(pNewType->biClrUsed==)
&&(pNewType->biSizeImage==))
{
return S_OK;
}
æçå®ç°è¿ç¨å¦ä¸
// GetMediaType
//
// I support one type, namely the type of the input pin
// We must be connected to support the single output type
//
HRESULT CYUV2RGBfilter::GetMediaType(int iPosition, CMediaType *pMediaType)
{
// Is the input pin connected
if(m_pInput->IsConnected() == FALSE)
{
return E_UNEXPECTED;
}
// This should never happen
if(iPosition < 0)
{
return E_INVALIDARG;
}
// Do we have more items to offer
if(iPosition > 0)
{
return VFW_S_NO_MORE_ITEMS;
}
CheckPointer(pMediaType,E_POINTER);
if (iPosition == 0)
{
HRESULT hr = m_pInput->ConnectionMediaType(pMediaType);
if (FAILED(hr))
{
return hr;
}
}
// make some appropriate change
ASSERT(pMediaType->formattype == FORMAT_VideoInfo);
pMediaType->subtype = MEDIASUBTYPE_RGB;
VIDEOINFOHEADER *pVih =
reinterpret_cast<VIDEOINFOHEADER*>(pMediaType->pbFormat);
pVih->bmiHeader.biCompression = 0;
pVih->bmiHeader.biSizeImage = DIBSIZE(pVih->bmiHeader);
pVih->bmiHeader.biBitCount = ;
pVih->bmiHeader.biHeight = ;
pVih->bmiHeader.biWidth = ;
return S_OK;
} // GetMediaType
//
// CheckInputType
//
// Check the input type is OK, return an error otherwise
//
HRESULT CYUV2RGBfilter::CheckInputType(const CMediaType *mtIn)
{
CheckPointer(mtIn,E_POINTER);
// Check this is a VIDEOINFO type
if(*mtIn->FormatType() != FORMAT_VideoInfo)
{
return E_INVALIDARG;
}
if((IsEqualGUID(*mtIn->Type(), MEDIATYPE_Video)) &&
(IsEqualGUID(*mtIn->Subtype(), MEDIASUBTYPE_YUY2)))
{
VIDEOINFO *pvi = (VIDEOINFO *) mtIn->Format();
if ((pvi->bmiHeader.biBitCount == )
&&(pvi->bmiHeader.biCompression==0))
return S_OK;
else
return FALSE;
}
else
{
return FALSE;
}
} // CheckInputType
// CheckTransform
//
// To be able to transform the formats must be compatible
//mtIn YUV2 bit
//mtOut RGB bit
HRESULT CYUV2RGBfilter::CheckTransform(const CMediaType *mtIn, const CMediaType *mtOut)
{
CheckPointer(mtIn,E_POINTER);
CheckPointer(mtOut,E_POINTER);
HRESULT hr;
if(FAILED(hr = CheckInputType(mtIn)))
{
return hr;
}
// format must be a VIDEOINFOHEADER
if((*mtOut->FormatType() != FORMAT_VideoInfo)
||(mtOut->cbFormat<sizeof(VIDEOINFOHEADER ))
||(mtOut->subtype!=MEDIASUBTYPE_RGB))
{
return E_INVALIDARG;
}
BITMAPINFOHEADER *pBmiOut = HEADER(mtOut->pbFormat);
if ((pBmiOut->biPlanes!=1)
||(pBmiOut->biBitCount!=)
||(pBmiOut->biCompression!=0)
||(pBmiOut->biWidth!=)
||(pBmiOut->biHeight!=))
{
return E_INVALIDARG;
}
return S_OK;
}
// CheckTransform
HRESULT CYUV2RGBfilter::DecideBufferSize(IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *pProperties)
{
CheckPointer(pAlloc,E_POINTER);
CheckPointer(pProperties,E_POINTER);
// Is the input pin connected
if(m_pInput->IsConnected() == FALSE)
{
return E_UNEXPECTED;
}
HRESULT hr = NOERROR;
pProperties->cBuffers = 1;
pProperties->cbBuffer = m_pInput->CurrentMediaType().GetSampleSize()*2; //output is double of the input samples
ASSERT(pProperties->cbBuffer);
// If we don't have fixed sized samples we must guess some size
if(!m_pInput->CurrentMediaType().bFixedSizeSamples)
{
if(pProperties->cbBuffer < )
{
// nothing more than a guess!!
pProperties->cbBuffer = ;
}
}
// Ask the allocator to reserve us some sample memory, NOTE the function
// can succeed (that is return NOERROR) but still not have allocated the
// memory that we requested, so we must check we got whatever we wanted
ALLOCATOR_PROPERTIES Actual;
hr = pAlloc->SetProperties(pProperties,&Actual);
if(FAILED(hr))
{
return hr;
}
ASSERT(Actual.cBuffers == 1);
if(pProperties->cBuffers > Actual.cBuffers ||
pProperties->cbBuffer > Actual.cbBuffer)
{
return E_FAIL;
}
return NOERROR;
} // DecideBufferSize
//
// Transform
//
// Copy the input sample into the output sample
//
//
HRESULT CYUV2RGBfilter::Transform(IMediaSample *pIn, IMediaSample *pOut)
{
CheckPointer(pIn,E_POINTER);
CheckPointer(pOut,E_POINTER);
// Copy the sample data
BYTE *pSourceBuffer, *pDestBuffer;
long lSourceSize = pIn->GetActualDataLength();
long lDestSize = (long)(lSourceSize*1.5);
pIn->GetPointer(&pSourceBuffer);
pOut->GetPointer(&pDestBuffer);
//change data
ConvertYUV2toRGB(pSourceBuffer,pDestBuffer,lSourceSize);
//memset(pDestBuffer,,lDestSize);
REFERENCE_TIME TimeStart, TimeEnd;
if(NOERROR == pIn->GetTime(&TimeStart, &TimeEnd))
{
pOut->SetTime(&TimeStart, &TimeEnd);
}
LONGLONG MediaStart, MediaEnd;
if(pIn->GetMediaTime(&MediaStart,&MediaEnd) == NOERROR)
{
pOut->SetMediaTime(&MediaStart,&MediaEnd);
}
// Copy the Sync point property
HRESULT hr = pIn->IsSyncPoint();
if(hr == S_OK)
{
pOut->SetSyncPoint(TRUE);
}
else if(hr == S_FALSE)
{
pOut->SetSyncPoint(FALSE);
}
else
{ // an unexpected error has occured...
return E_UNEXPECTED;
}
//
AM_MEDIA_TYPE* pMediaType;
pIn->GetMediaType(&pMediaType);
ChangeFormat(pMediaType);
// Copy the media type
pOut->SetMediaType(pMediaType);
// Copy the preroll property
hr = pIn->IsPreroll();
if(hr == S_OK)
{
pOut->SetPreroll(TRUE);
}
else if(hr == S_FALSE)
{
pOut->SetPreroll(FALSE);
}
else
{ // an unexpected error has occured...
return E_UNEXPECTED;
}
// Copy the discontinuity property
hr = pIn->IsDiscontinuity();
if(hr == S_OK)
{
pOut->SetDiscontinuity(TRUE);
}
else if(hr == S_FALSE)
{
pOut->SetDiscontinuity(FALSE);
}
else
{ // an unexpected error has occured...
return E_UNEXPECTED;
}
// Copy the actual data length
//KASSERT((long)lDestSize <= pOut->GetSize());
pOut->SetActualDataLength(lDestSize);
return S_OK;
} // Transform
ç»è¿è¿äºæ¥éª¤å°±è½å¾å°ç¬¦ååè½è¦æ±çtransform filter
åæ¶ç»è¿ä»¥ä¸æ¥éª¤ä¹è½å¯¹filterå¼åæ个大ä½çäºè§£
åºèªï¼/s/blog_vusf.html